From 5fed9ec88a0623531231f88214e05c8405031f89 Mon Sep 17 00:00:00 2001 From: JohnKoumarelas Date: Thu, 9 Jul 2020 16:15:25 +0200 Subject: [PATCH 001/590] Merlin: Handling permissions, auto-generating groups and mediacenters when private. --- etl/converter/spiders/merlin_spider.py | 59 +++++++++++--------------- 1 file changed, 25 insertions(+), 34 deletions(-) diff --git a/etl/converter/spiders/merlin_spider.py b/etl/converter/spiders/merlin_spider.py index ae3f1084..f5e0aa37 100644 --- a/etl/converter/spiders/merlin_spider.py +++ b/etl/converter/spiders/merlin_spider.py @@ -54,11 +54,6 @@ def parse(self, response: scrapy.http.Response): element_xml_str = etree.tostring(element, pretty_print=True, encoding='unicode') element_dict = xmltodict.parse(element_xml_str) - # Temporary solution for public-only content. - # TODO: remove this when licensed content are enabled! - if not self.is_public(element_dict["data"]): - continue - # TODO: It's probably a pointless attribute. #del element_dict["data"]["score"] @@ -174,34 +169,30 @@ def getValuespaces(self, response): valuespaces.add_value('learningResourceType', resource_types) return valuespaces - def is_public(self, element_dict) -> bool: + def getPermissions(self, response): """ - Temporary solution to check whether the content is public and only save it if this holds. + In case license information, in the form of Kreis codes, is available. This changes the permissions from + public to private and sets accordingly the groups and mediacenters. For more information regarding the available + Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' """ - return not (element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0) - - # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. - # - # def getPermissions(self, response): - # """ - # In case license information, in the form of Kreis codes, is available. This changes the permissions from - # public to private and sets accordingly the groups and mediacenters. For more information regarding the available - # Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' - # """ - # - # permissions = LomBase.getPermissions(self, response) - # - # element_dict = response.meta["item"] - # - # if element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: # private - # kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - # if not isinstance(kreis_ids, list): # one element - # kreis_ids = [kreis_ids] - # kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) - # kreis_ids = ["merlin_" + id for id in kreis_ids] # add prefix - # - # permissions.replace_value('public', False) - # permissions.add_value('groups', ['Lower Saxony']) - # permissions.add_value('mediacenters', kreis_ids) - # - # return permissions \ No newline at end of file + + permissions = LomBase.getPermissions(self, response) + + # Self-explained. 1 media center per Kreis-code in this case. + permissions.add_value("autoCreateGroups", True) + permissions.add_value("autoCreateMediacenters", True) + + element_dict = response.meta["item"] + + if element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: # private + kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + if not isinstance(kreis_ids, list): # one element + kreis_ids = [kreis_ids] + kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) + kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix + + permissions.replace_value('public', False) + permissions.add_value('groups', ['Lower Saxony']) + permissions.add_value('mediacenters', kreis_ids) + + return permissions \ No newline at end of file From fb0f1b5f4f67d49e837a1fa7b7713f4d69b9fef3 Mon Sep 17 00:00:00 2001 From: JohnKoumarelas Date: Fri, 24 Jul 2020 14:52:13 +0200 Subject: [PATCH 002/590] Permission changes in Merlin, Mediothek. Minor fixes on Leifi, Serlo. --- etl/converter/spiders/leifi_spider.py | 3 +- .../spiders/mediothek_pixiothek_spider.py | 50 ++++++------ etl/converter/spiders/merlin_spider.py | 77 +++++++++++-------- etl/converter/spiders/serlo_spider.py | 2 +- 4 files changed, 70 insertions(+), 62 deletions(-) diff --git a/etl/converter/spiders/leifi_spider.py b/etl/converter/spiders/leifi_spider.py index 82ef99f6..2c8a5e2d 100644 --- a/etl/converter/spiders/leifi_spider.py +++ b/etl/converter/spiders/leifi_spider.py @@ -12,7 +12,8 @@ class LeifiSpider(scrapy.Spider, LomBase): name='leifi_spider' friendlyName = 'LEIFIphysik' url = 'https://www.leifiphysik.de/' - rssUrl = 'http://localhost/sources/leifi_feed_rss.xml' + # rssUrl = 'http://localhost/sources/leifi_feed_rss.xml' + rssUrl = 'https://www.leifiphysik.de/sites/default/files/elixier/leifi_feed_rss.xml' def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) diff --git a/etl/converter/spiders/mediothek_pixiothek_spider.py b/etl/converter/spiders/mediothek_pixiothek_spider.py index 452a78d8..67a900ba 100644 --- a/etl/converter/spiders/mediothek_pixiothek_spider.py +++ b/etl/converter/spiders/mediothek_pixiothek_spider.py @@ -46,9 +46,6 @@ def parse(self, response: scrapy.http.Response): # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. LomBase.parse(self, copyResponse) - # def _if_exists_add(self, edu_dict: dict, element_dict: dict, edu_attr: str, element_attr: str): - # if element_attr in element_dict: - # edu_dict[edu_attr] = element_dict[element_attr] def getId(self, response): # Element response as a Python dict. @@ -118,7 +115,11 @@ def getLicense(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - license.replace_value('internal', Constants.LICENSE_NONPUBLIC if element_dict['oeffentlich'] == '1' else Constants.LICENSE_COPYRIGHT_LAW) + if "oeffentlich" in element_dict and element_dict["oeffentlich"] == "0": # private + license.replace_value('internal', Constants.LICENSE_NONPUBLIC) + else: + license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public + return license def getLOMTechnical(self, response): @@ -130,27 +131,24 @@ def getLOMTechnical(self, response): return technical - def is_public(self, element_dict) -> bool: + + def getPermissions(self, response): """ - Temporary solution to check whether the content is public and only save it if this holds. + Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, + otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups + and mediacenters accordingly. """ - return element_dict["oeffentlich"] == "1" - - # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. - # - # def getPermissions(self, response): - # """ - # Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, - # otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups - # and mediacenters accordingly. - # """ - # permissions = LomBase.getPermissions(self, response) - # - # element_dict = response.meta["item"] - # - # if element_dict["oeffentlich"] == "0": # private - # permissions.replace_value('public', False) - # permissions.add_value('groups', ['Thuringia']) - # permissions.add_value('mediacenters', 'mediothek') # only 1 mediacenter. - # - # return permissions + permissions = LomBase.getPermissions(self, response) + + # Self-explained. Only 1 media center in this case. + permissions.add_value("autoCreateGroups", True) + # permissions.add_value("autoCreateMediacenters", True) + + element_dict = response.meta["item"] + + if "oeffentlich" in element_dict and element_dict["oeffentlich"] == "0": # private + permissions.replace_value('public', False) + permissions.add_value('groups', ['Thuringia']) + # permissions.add_value('mediacenters', [self.name]) # only 1 mediacenter. + + return permissions diff --git a/etl/converter/spiders/merlin_spider.py b/etl/converter/spiders/merlin_spider.py index ae3f1084..6c050891 100644 --- a/etl/converter/spiders/merlin_spider.py +++ b/etl/converter/spiders/merlin_spider.py @@ -3,6 +3,8 @@ import xmltodict as xmltodict from lxml import etree from scrapy.spiders import CrawlSpider + +from converter.constants import Constants from converter.items import * from converter.spiders.lom_base import LomBase @@ -54,11 +56,6 @@ def parse(self, response: scrapy.http.Response): element_xml_str = etree.tostring(element, pretty_print=True, encoding='unicode') element_dict = xmltodict.parse(element_xml_str) - # Temporary solution for public-only content. - # TODO: remove this when licensed content are enabled! - if not self.is_public(element_dict["data"]): - continue - # TODO: It's probably a pointless attribute. #del element_dict["data"]["score"] @@ -128,6 +125,19 @@ def getUri(self, response): location = response.xpath('/data/media_url/text()').get() return "http://merlin.nibis.de" + location + def getLicense(self, response): + license = LomBase.getLicense(self, response) + + # Element response as a Python dict. + element_dict = response.meta["item"] + + if "kreis_id" in element_dict and element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: + license.replace_value('internal', Constants.LICENSE_NONPUBLIC) # private + else: + license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public + + return license + def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) @@ -174,34 +184,33 @@ def getValuespaces(self, response): valuespaces.add_value('learningResourceType', resource_types) return valuespaces - def is_public(self, element_dict) -> bool: + def getPermissions(self, response): """ - Temporary solution to check whether the content is public and only save it if this holds. + In case license information, in the form of Kreis codes, is available. This changes the permissions from + public to private and sets accordingly the groups and mediacenters. For more information regarding the available + Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' """ - return not (element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0) - - # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. - # - # def getPermissions(self, response): - # """ - # In case license information, in the form of Kreis codes, is available. This changes the permissions from - # public to private and sets accordingly the groups and mediacenters. For more information regarding the available - # Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' - # """ - # - # permissions = LomBase.getPermissions(self, response) - # - # element_dict = response.meta["item"] - # - # if element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: # private - # kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - # if not isinstance(kreis_ids, list): # one element - # kreis_ids = [kreis_ids] - # kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) - # kreis_ids = ["merlin_" + id for id in kreis_ids] # add prefix - # - # permissions.replace_value('public', False) - # permissions.add_value('groups', ['Lower Saxony']) - # permissions.add_value('mediacenters', kreis_ids) - # - # return permissions \ No newline at end of file + + permissions = LomBase.getPermissions(self, response) + + element_dict = response.meta["item"] + + permissions.replace_value('public', True) + + # If the license is private. + if "kreis_id" in element_dict and element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: + # Self-explained. 1 media center per Kreis-code in this case. + permissions.add_value("autoCreateGroups", True) + # permissions.add_value("autoCreateMediacenters", True) + + kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + if not isinstance(kreis_ids, list): # one element + kreis_ids = [kreis_ids] + kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) + # kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix + + permissions.replace_value('public', False) + permissions.add_value('groups', ['Lower Saxony']) + # permissions.add_value('mediacenters', kreis_ids) + + return permissions diff --git a/etl/converter/spiders/serlo_spider.py b/etl/converter/spiders/serlo_spider.py index eda75b9a..328cc9c7 100644 --- a/etl/converter/spiders/serlo_spider.py +++ b/etl/converter/spiders/serlo_spider.py @@ -24,7 +24,7 @@ def __init__(self, **kwargs): def start_requests(self): url = self.url + '/entity/api/json/export/article' # current dummy fallback since the Serlo API is basically down - url = 'http://localhost/sources/serlo.json' + # url = 'http://localhost/sources/serlo.json' yield scrapy.Request(url=url, callback=self.parseList) # some fields are having xml entities (for whatever reason), we will unescape them here From 36eaf259912ddd7f84094dd99ba954a5b89793c8 Mon Sep 17 00:00:00 2001 From: Susi Date: Mon, 27 Jul 2020 11:53:30 +0200 Subject: [PATCH 003/590] adds defaultThumbnail property to use, if there is no real thumbnail --- etl/converter/items.py | 1 + etl/converter/pipelines.py | 4 ++++ etl/converter/spiders/merlin_spider.py | 5 +++++ 3 files changed, 10 insertions(+) diff --git a/etl/converter/items.py b/etl/converter/items.py index d486280f..54eeb28b 100644 --- a/etl/converter/items.py +++ b/etl/converter/items.py @@ -142,6 +142,7 @@ class BaseItem(Item): ranking = Field() fulltext = Field() thumbnail = Field() + defaultThumbnail = Field() lastModified = Field() lom = Field(serializer=LomBaseItem) valuespaces = Field(serializer=ValuespaceItem) diff --git a/etl/converter/pipelines.py b/etl/converter/pipelines.py index a835fc98..e6b8389f 100644 --- a/etl/converter/pipelines.py +++ b/etl/converter/pipelines.py @@ -161,6 +161,10 @@ def process_item(self, item, spider): if 'thumbnail' in item: url = item['thumbnail'] response = requests.get(url) + elif 'defaultThumbnail' in item: + url = item['defaultThumbnail'] + print(url) + response = requests.get(url) elif 'location' in item['lom']['technical'] and 'format' in item['lom']['technical'] and item['lom']['technical']['format'] == 'text/html': response = requests.post(settings.get('SPLASH_URL')+'/render.png', json={ 'url': item['lom']['technical']['location'], diff --git a/etl/converter/spiders/merlin_spider.py b/etl/converter/spiders/merlin_spider.py index 6c050891..14add229 100644 --- a/etl/converter/spiders/merlin_spider.py +++ b/etl/converter/spiders/merlin_spider.py @@ -17,6 +17,7 @@ class MerlinSpider(CrawlSpider, LomBase): Author: Ioannis Koumarelas, ioannis.koumarelas@hpi.de, Schul-Cloud, Content team. """ name = 'merlin_spider' + domain = 'https://merlin.nibis.de' url = 'https://merlin.nibis.de/index.php' # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = 'Merlin' # name as shown in the search ui version = '0.1' # the version of your crawler, used to identify if a reimport is necessary @@ -111,6 +112,10 @@ def handleEntry(self, response): def getBase(self, response): base = LomBase.getBase(self, response) base.add_value('thumbnail', response.xpath('/data/thumbnail/text()').get()) + if response.xpath('/data/srcLogoUrl/text()').get(): + base.add_value('defaultThumbnail', self.domain + response.xpath('/data/srcLogoUrl/text()').get()) + elif response.xpath('/data/logo/text()').get(): + base.add_value('defaultThumbnail', self.domain + response.xpath('/data/logo/text()').get()) return base From 1bbb6789ce2b28fb73636c0a887eecd6c20a5950 Mon Sep 17 00:00:00 2001 From: Susi Date: Mon, 27 Jul 2020 12:01:06 +0200 Subject: [PATCH 004/590] fixes Exceptionhandling --- etl/converter/pipelines.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/etl/converter/pipelines.py b/etl/converter/pipelines.py index e6b8389f..058fa743 100644 --- a/etl/converter/pipelines.py +++ b/etl/converter/pipelines.py @@ -163,7 +163,6 @@ def process_item(self, item, spider): response = requests.get(url) elif 'defaultThumbnail' in item: url = item['defaultThumbnail'] - print(url) response = requests.get(url) elif 'location' in item['lom']['technical'] and 'format' in item['lom']['technical'] and item['lom']['technical']['format'] == 'text/html': response = requests.post(settings.get('SPLASH_URL')+'/render.png', json={ @@ -198,6 +197,9 @@ def process_item(self, item, spider): if 'thumbnail' in item: del item['thumbnail'] return self.process_item(item, spider) + elif 'defaultThumbnail' in item: + del item['defaultThumbnail'] + return self.process_item(item, spider) else: #item['thumbnail']={} raise DropItem('No thumbnail provided or ressource was unavailable for fetching') From 6b848958ed4c9c183224ac1b4ac3998ae23276d2 Mon Sep 17 00:00:00 2001 From: JohnKoumarelas Date: Wed, 29 Jul 2020 12:15:47 +0200 Subject: [PATCH 005/590] Adding a delay across the LomBase.parse() executions, execution script, and .gitignore changes. --- .gitignore | 4 +- etl/converter/settings.py | 4 ++ etl/converter/spiders/lom_base.py | 7 ++++ etl/crawl_schulcloud.sh | 67 +++++++++++++++++++++++++++++++ 4 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 etl/crawl_schulcloud.sh diff --git a/.gitignore b/.gitignore index 668f8c70..889317d1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,6 @@ .idea/ __pycache__/ .venv/ -.env \ No newline at end of file +.env +nohups/ +nohup.out diff --git a/etl/converter/settings.py b/etl/converter/settings.py index 2f244887..95f3f735 100644 --- a/etl/converter/settings.py +++ b/etl/converter/settings.py @@ -56,6 +56,10 @@ # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs DOWNLOAD_DELAY = 0 + +# Configure a delay between the parsing executions. (default: 0) +PARSE_DELAY = 0 + # The download delay setting will honor only one of: # CONCURRENT_REQUESTS_PER_DOMAIN = 16 # CONCURRENT_REQUESTS_PER_IP = 16 diff --git a/etl/converter/spiders/lom_base.py b/etl/converter/spiders/lom_base.py index ca76b639..88f81da7 100644 --- a/etl/converter/spiders/lom_base.py +++ b/etl/converter/spiders/lom_base.py @@ -1,3 +1,5 @@ +import time + from converter.items import * from pprint import pprint import logging @@ -78,6 +80,11 @@ def parse(self, response): if not self.hasChanged(response): return None + # Avoid stressing the servers across calls of this method. + settings = get_project_settings() + if 'PARSE_DELAY' in settings and float(settings.get('PARSE_DELAY')) > 0: + time.sleep(float(settings.get('PARSE_DELAY'))) + main = self.getBase(response) main.add_value('lom', self.getLOM(response).load_item()) main.add_value('valuespaces', self.getValuespaces(response).load_item()) diff --git a/etl/crawl_schulcloud.sh b/etl/crawl_schulcloud.sh new file mode 100644 index 00000000..42ac3f94 --- /dev/null +++ b/etl/crawl_schulcloud.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +# This script is used to execute the spiders, while storing their output to log files. + +# First we store all spiders in an array variable. +spiders=( + "br_rss" + "digitallearninglab" + "geogebra" + "irights" + "leifi" + "mediothek_pixiothek" + "memucho" + "merlin" + "oai_sodis" + "planet_schule" + "rlp" + "serlo" + "wirlernenonline" + "wirlernenonline_gsheet" + "zdf_rss" + "zoerr" + "zum" +) + +# Print the spiders that wil be executed (for debugging purposes). +#echo ${spiders[@]} + +# Make the directory "nohups" if it does not already exist. +mkdir -p nohups + +echo +' + ( + ) + ( + /\ .-"""-. /\ + //\\/ ,,, \//\\ + |/\| ,;;;;;, |/\| + //\\\;-"""-;///\\ + // \/ . \/ \\ + (| ,-_| \ | / |_-, |) + //`__\.-.-./__`\\ + // /.-(() ())-.\ \\ + (\ |) '---' (| /) + ` (| |) ` + \) (/ + ____ ________ __ _ __ + / __ \/ ____/ / / / _________ (_)___/ /__ __________ + / / / / __/ / /_/ / / ___/ __ \/ / __ / _ \/ ___/ ___/ +/ /_/ / /___/ __ / (__ ) /_/ / / /_/ / __/ / (__ ) +\____/_____/_/ /_/ /____/ .___/_/\__,_/\___/_/ /____/ + /_/ +' + +# Execute the spiders. +for spider in ${spiders[@]} +do + echo "Executing $spider spider." + + # Execute the spider and save its output to two files: "nohup_SPIDER.out" (individual log) and "nohup.out" (collective logs). + nohup scrapy crawl ${spider}_spider | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null & 2>&1 + + # Execute the spider in the background. + #scrapy crawl ${spider}_spider & +done +echo "Happy crawling! :-)" From 0671deec7ada2faffbfe6eb95aa840793c61943d Mon Sep 17 00:00:00 2001 From: JohnKoumarelas Date: Mon, 10 Aug 2020 15:42:41 +0200 Subject: [PATCH 006/590] Grouping items in Mediothek based on "mediumId". --- .../spiders/mediothek_pixiothek_spider.py | 64 +++++++++++++++++-- 1 file changed, 57 insertions(+), 7 deletions(-) diff --git a/etl/converter/spiders/mediothek_pixiothek_spider.py b/etl/converter/spiders/mediothek_pixiothek_spider.py index 67a900ba..2707fe41 100644 --- a/etl/converter/spiders/mediothek_pixiothek_spider.py +++ b/etl/converter/spiders/mediothek_pixiothek_spider.py @@ -18,7 +18,8 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): url = 'https://www.schulportal-thueringen.de/' # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = 'MediothekPixiothek' # name as shown in the search ui version = '0.1' # the version of your crawler, used to identify if a reimport is necessary - start_urls = ['https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] + # start_urls = ['https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] + start_urls = ['http://localhost:8080/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -29,7 +30,10 @@ def parse(self, response: scrapy.http.Response): data = self.getUrlData(response.url) response.meta["rendered_data"] = data elements = json.loads(response.body_as_unicode()) - for i, element in enumerate(elements): + + grouped_elements = self.group_elements(elements) + + for i, element in enumerate(grouped_elements): copyResponse = response.copy() # Passing the dictionary for easier access to attributes. @@ -46,6 +50,43 @@ def parse(self, response: scrapy.http.Response): # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. LomBase.parse(self, copyResponse) + def group_elements(self, elements): + """ + This method groups the corresponding elements based on their mediumId. This changes the logic so that every + element in the end maps to an educational element in the https://www.schulportal-thueringen.de. + """ + + medium_id_groups = {} + for idx, element in enumerate(elements): + medium_id = element["mediumId"] + + # The first element that has this mediumId creates the representative for this medium. + if medium_id not in medium_id_groups: + medium_id_groups[medium_id] = { + "id": medium_id, + "pts": self.get_or_default(element, "pts"), + "previewImageUrl": self.get_or_default(element, "previewImageUrl"), + "titel": self.get_or_default(element, "einzeltitel"), + "kurzinhalt": self.get_or_default(element, "kurzinhalt"), + "listeStichwort": self.get_or_default(element, "listeStichwort"), + "oeffentlich": self.get_or_default(element, "oeffentlich"), + "downloadUrl": "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" + str(medium_id) + } + + # The first element to have a serientitel for this mediumId will save it. The rest will just skip it. + if "serientitel" in element and "serientitel" not in medium_id_groups[medium_id]: + medium_id_groups[medium_id]["titel"] = element["serientitel"] + medium_id_groups[medium_id]["serientitel"] = element["serientitel"] + + grouped_elements = [medium_id_groups[medium_id] for medium_id in medium_id_groups] + + return grouped_elements + + def get_or_default(self, element, attribute, default_value=""): + if attribute in element: + return element[attribute] + else: + return default_value def getId(self, response): # Element response as a Python dict. @@ -80,7 +121,15 @@ def getBase(self, response): # TODO: "For licensing reasons, this content is only available to users registered in the Thuringian school # portal." - base.add_value('thumbnail', element_dict['previewImageUrl']) + # base.add_value('thumbnail', element_dict['previewImageUrl']) + + # TODO: Remove this. This is only for a local execution of Mediothek to check whether Edu-Sharing has issues. + thumbnail = element_dict['previewImageUrl'] + thumbnail = thumbnail.replace("https://www.schulportal-thueringen.de/", "http://localhost:8080/thumbnails/") + # Fix the encoding + from converter.offline_mode.mediothek_pixiothek_spider_offline import encode_url_for_local + thumbnail = encode_url_for_local(thumbnail) + base.add_value('thumbnail', thumbnail) return base @@ -145,10 +194,11 @@ def getPermissions(self, response): # permissions.add_value("autoCreateMediacenters", True) element_dict = response.meta["item"] - + permissions.replace_value('public', False) if "oeffentlich" in element_dict and element_dict["oeffentlich"] == "0": # private - permissions.replace_value('public', False) - permissions.add_value('groups', ['Thuringia']) + permissions.add_value('groups', ['Thuringia-private']) # permissions.add_value('mediacenters', [self.name]) # only 1 mediacenter. + else: + permissions.add_value('groups', ['Thuringia-public']) - return permissions + return permissions \ No newline at end of file From 44ca68724cff18b6393ac7f7e519679018ce82ca Mon Sep 17 00:00:00 2001 From: JohnKoumarelas Date: Tue, 11 Aug 2020 12:02:11 +0200 Subject: [PATCH 007/590] Grouping items in Mediothek based on "mediumId", keeping only the "einzeltitel". --- .../mediothek_pixiothek_spider_offline.py | 107 +++++++++++++ .../offline_mode/merlin_spider_offline.py | 144 ++++++++++++++++++ .../spiders/mediothek_pixiothek_spider.py | 9 +- 3 files changed, 258 insertions(+), 2 deletions(-) create mode 100644 etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py create mode 100644 etl/converter/offline_mode/merlin_spider_offline.py diff --git a/etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py b/etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py new file mode 100644 index 00000000..dcce13eb --- /dev/null +++ b/etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py @@ -0,0 +1,107 @@ +import json +import os +import ssl +import time +import urllib +import urllib.request +from urllib.parse import urlencode, urlparse + +import requests +import scrapy +import xmltodict +from lxml import etree +from scrapy.spiders import CrawlSpider + +# TODO: find a better solution. +import ssl +ssl._create_default_https_context = ssl._create_unverified_context + + +def encode_url_for_local(url): + return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) + +class MediothekPixiothekSpiderOffline(CrawlSpider): + name = 'mediothek_pixiothek_spider_offline' + url = 'https://www.schulportal-thueringen.de/' # the url which will be linked as the primary link to your source (should be the main url of your site) + friendlyName = 'MediothekPixiothek' # name as shown in the search ui + version = '0.1' # the version of your crawler, used to identify if a reimport is necessary + # start_urls = ['https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] + start_urls = ['file:///data/projects/schul_cloud/workspace/content_sources/mediothek_pixiothek/cache/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] + + + limit = 100 + page = 0 + + elements_count = 0 + + data_dir = "/data/projects/schul_cloud/workspace/content_sources/mediothek_pixiothek/cache" + + thumbnails_dir = data_dir + "/thumbnails" + + def __init__(self, *a, **kwargs): + # LomBase.__init__(self, **kwargs) + super().__init__(*a, **kwargs) + + def encode_url_for_local(self, url): + return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) + + def parse(self, response: scrapy.http.Response): + # Avoid stressing the API. + # time.sleep(0.5) + print(response.url) + + text_response = response.body + + if not os.path.exists(self.data_dir): + os.makedirs(self.data_dir) + if not os.path.exists(self.thumbnails_dir): + os.makedirs(self.thumbnails_dir) + + self.save_json_array_data(text_response) + + elements = json.loads(response.body_as_unicode()) + for i, element in enumerate(elements): + if "previewImageUrl" in element: + time.sleep(0.5) + self.store_thumbnails(element['previewImageUrl']) + + + def save_json_array_data(self, text_response): + # Save the JSON array data file. + resource_path = "/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" + # Create the subdirectories + directories = self.data_dir + "/tip-ms/api/public_mediothek_metadatenexport" + if not os.path.exists(directories): + os.makedirs(directories) + with open(self.data_dir + resource_path, "wb") as fout: + fout.write(text_response) + + def store_thumbnails(self, thumbnail_url): + urlparse_result = urlparse(thumbnail_url) + thumbnail_path = urlparse_result.path + if urlparse_result.query != "": + thumbnail_path += "&" + urlparse_result.query + + # Create the subdirectories + directories = self.thumbnails_dir + os.path.dirname(os.path.abspath(thumbnail_path)) + if not os.path.exists(directories): + os.makedirs(directories) + + local_path = self.thumbnails_dir + thumbnail_path + + if not os.path.exists(local_path): + # urllib.request.urlretrieve(thumbnail_url, local_path) + self.download_and_save_image(thumbnail_url, local_path) + + def download_and_save_image(self, pic_url, local_path): + with open(local_path, 'wb') as handle: + response = requests.get(pic_url, stream=True, allow_redirects=True) + + if not response.ok: + print(response) + + for block in response.iter_content(1024): + if not block: + break + + handle.write(block) \ No newline at end of file diff --git a/etl/converter/offline_mode/merlin_spider_offline.py b/etl/converter/offline_mode/merlin_spider_offline.py new file mode 100644 index 00000000..b3877f26 --- /dev/null +++ b/etl/converter/offline_mode/merlin_spider_offline.py @@ -0,0 +1,144 @@ +import os +import ssl +import time +import urllib +import urllib.request +from urllib.parse import urlencode, urlparse + +import requests +import scrapy +import xmltodict +from lxml import etree +from scrapy.spiders import CrawlSpider + +# TODO: find a better solution. +import ssl +ssl._create_default_https_context = ssl._create_unverified_context + +def encode_url_for_local(url): + return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) + +class MerlinSpiderOffline(CrawlSpider): + name = 'merlin_spider_offline' + domain = 'https://merlin.nibis.de' + url = 'https://merlin.nibis.de/index.php' # the url which will be linked as the primary link to your source (should be the main url of your site) + friendlyName = 'Merlin' # name as shown in the search ui + version = '0.1' # the version of your crawler, used to identify if a reimport is necessary + apiUrl = 'https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*' # * regular expression, to represent all possible values. + + limit = 100 + page = 0 + + elements_count = 0 + + data_dir = "/data/projects/schul_cloud/workspace/content_sources/merlin/cache" + + thumbnails_dir = data_dir + "/thumbnails" + + def __init__(self, *a, **kwargs): + # LomBase.__init__(self, **kwargs) + super().__init__(*a, **kwargs) + + def encode_url_for_local(self, url): + return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) + + def start_requests(self): + response = 1 + while response is not None: + yield scrapy.Request(url=self.apiUrl.replace('%start', str(self.page * self.limit)) + .replace('%anzahl', str(self.limit)), + callback=self.parse_offline, headers={ + 'Accept': 'application/xml', + 'Content-Type': 'application/xml' + }) + + + def parse_offline(self, response: scrapy.http.Response): + # Avoid stressing the API. + # time.sleep(0.5) + print(response.url) + + text_response = response.body + + if not os.path.exists(self.data_dir): + os.makedirs(self.data_dir) + if not os.path.exists(self.thumbnails_dir): + os.makedirs(self.thumbnails_dir) + + + + resource_path = response.url.replace("https://merlin.nibis.de/", "") + + with open(self.data_dir + "/" + resource_path, "wb") as fout: + fout.write(text_response) + + # We would use .fromstring(response.text) if the response did not include the XML declaration: + # + root = etree.XML(response.body) + tree = etree.ElementTree(root) + + # Get the total number of possible elements + elements_total = int(tree.xpath('/root/sum')[0].text) + + # If results are returned. + elements = tree.xpath('/root/items/*') + + self.elements_count += len(elements) + + if len(elements) > 0: + for element in elements: + time.sleep(0.5) + # copyResponse = response.copy() + + element_xml_str = etree.tostring(element, pretty_print=True, encoding='unicode') + element_dict = xmltodict.parse(element_xml_str)["data"] + + if "thumbnail" in element_dict: + self.store_thumbnails(element_dict["thumbnail"]) + self.store_thumbnails(self.domain + element_dict["srcLogoUrl"]) + # self.store_thumbnails(self.domain + element_dict["logo"]) + + + # If the number of returned results is equal to the imposed limit, it means that there are more to be returned. + # if len(elements) == self.limit: + if self.elements_count < elements_total: + self.page += 1 + url = self.apiUrl.replace('%start', str(self.page * self.limit)).replace('%anzahl', str(self.limit)) + yield scrapy.Request(url=url, callback=self.parse_offline, headers={ + 'Accept': 'application/xml', + 'Content-Type': 'application/xml' + }) + + + def store_thumbnails(self, thumbnail_url): + + urlparse_result = urlparse(thumbnail_url) + thumbnail_path = urlparse_result.path + if urlparse_result.query != "": + thumbnail_path += "&" + urlparse_result.query + + # Create the subdirectories + directories = self.thumbnails_dir + os.path.dirname(os.path.abspath(thumbnail_path)) + if not os.path.exists(directories): + os.makedirs(directories) + + # local_path = self.thumbnails_dir + thumbnail_url.replace("https://thumbnails.merlin.nibis.de/", "") + local_path = self.thumbnails_dir + thumbnail_path + + if not os.path.exists(local_path): + # urllib.request.urlretrieve(thumbnail_url, local_path) + self.download_and_save_image(thumbnail_url, local_path) + + + def download_and_save_image(self, pic_url, local_path): + with open(local_path, 'wb') as handle: + response = requests.get(pic_url, stream=True) + + if not response.ok: + print(response) + + for block in response.iter_content(1024): + if not block: + break + + handle.write(block) \ No newline at end of file diff --git a/etl/converter/spiders/mediothek_pixiothek_spider.py b/etl/converter/spiders/mediothek_pixiothek_spider.py index 2707fe41..9cfd5c1c 100644 --- a/etl/converter/spiders/mediothek_pixiothek_spider.py +++ b/etl/converter/spiders/mediothek_pixiothek_spider.py @@ -4,6 +4,7 @@ from scrapy.spiders import CrawlSpider from converter.items import * +from converter.offline_mode.mediothek_pixiothek_spider_offline import encode_url_for_local from converter.spiders.lom_base import LomBase from converter.constants import *; @@ -73,10 +74,15 @@ def group_elements(self, elements): "downloadUrl": "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" + str(medium_id) } + # TODO: Discuss when it makes sense to combine "serientitel" and "einzeltitel"! # The first element to have a serientitel for this mediumId will save it. The rest will just skip it. if "serientitel" in element and "serientitel" not in medium_id_groups[medium_id]: medium_id_groups[medium_id]["titel"] = element["serientitel"] medium_id_groups[medium_id]["serientitel"] = element["serientitel"] + if "einzeltitel" in element: + medium_id_groups[medium_id]["titel"] += " - " + element["einzeltitel"] + medium_id_groups[medium_id]["einzeltitel"] = element["einzeltitel"] + grouped_elements = [medium_id_groups[medium_id] for medium_id in medium_id_groups] @@ -127,7 +133,6 @@ def getBase(self, response): thumbnail = element_dict['previewImageUrl'] thumbnail = thumbnail.replace("https://www.schulportal-thueringen.de/", "http://localhost:8080/thumbnails/") # Fix the encoding - from converter.offline_mode.mediothek_pixiothek_spider_offline import encode_url_for_local thumbnail = encode_url_for_local(thumbnail) base.add_value('thumbnail', thumbnail) @@ -141,7 +146,7 @@ def getLOMGeneral(self, response): # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel - general.add_value('title', element_dict["einzeltitel"]) + general.add_value('title', element_dict["titel"]) # self._if_exists_add(general, element_dict, "description", "kurzinhalt") if "kurzinhalt" in element_dict: general.add_value('description', element_dict["kurzinhalt"]) From 997934414f30e5b7af6e04544ea4ffdfd2d2e93d Mon Sep 17 00:00:00 2001 From: JohnKoumarelas Date: Mon, 24 Aug 2020 11:57:27 +0200 Subject: [PATCH 008/590] CON-153 - Cleans code and merges latest changes from OEH --- .gitignore | 1 + docker-compose-dev.yml | 98 ++++ etl/converter/.env.example | 9 +- etl/converter/constants.py | 53 +- etl/converter/custom_log_formatter.py | 30 +- etl/converter/env.py | 45 +- etl/converter/es_connector.py | 488 ++++++++++++------ etl/converter/items.py | 78 ++- etl/converter/middlewares.py | 4 +- .../mediothek_pixiothek_spider_offline.py | 107 ---- .../offline_mode/merlin_spider_offline.py | 144 ------ etl/converter/pipelines.py | 348 +++++++++---- etl/converter/run.py | 36 ++ etl/converter/settings.py | 124 +++-- etl/converter/spiders/br_rss_spider.py | 22 +- etl/converter/spiders/csv_base.py | 126 +++-- .../spiders/digitallearninglab_spider.py | 226 ++++---- etl/converter/spiders/edu_sharing_base.py | 178 +++++++ etl/converter/spiders/geogebra_spider.py | 184 +++---- etl/converter/spiders/irights_spider.py | 64 +-- etl/converter/spiders/json_base.py | 32 +- etl/converter/spiders/leifi_spider.py | 160 +++--- etl/converter/spiders/lom_base.py | 327 ++++++------ etl/converter/spiders/lrmi_base.py | 153 +++--- .../spiders/mediothek_pixiothek_spider.py | 100 ++-- etl/converter/spiders/memucho_spider.py | 145 +++--- etl/converter/spiders/merlin_spider.py | 160 +++--- etl/converter/spiders/oai_base.py | 241 +++++---- etl/converter/spiders/oai_sodis_spider.py | 41 +- etl/converter/spiders/oeh_rss_spider.py | 42 ++ etl/converter/spiders/oeh_spider.py | 29 ++ etl/converter/spiders/planet_schule_spider.py | 126 +++-- etl/converter/spiders/rlp_spider.py | 51 +- etl/converter/spiders/rss_base.py | 69 ++- etl/converter/spiders/rss_list_base.py | 69 ++- etl/converter/spiders/sample_spider.py | 109 ++-- etl/converter/spiders/serlo_spider.py | 242 +++++---- etl/converter/spiders/tutory_spider.py | 146 +++--- .../spiders/wirlernenonline_gsheet_spider.py | 50 +- .../spiders/wirlernenonline_spider.py | 273 +++++----- etl/converter/spiders/youtube_spider.py | 330 ++++++++++++ etl/converter/spiders/zdf_rss_spider.py | 22 +- etl/converter/spiders/zoerr_spider.py | 25 +- etl/converter/spiders/zum_spider.py | 191 ++++--- etl/converter/valuespace_helper.py | 46 +- etl/crawl_schulcloud.sh | 4 +- etl/csv/youtube.csv | 115 +++++ etl/edu_sharing_client/api/bulk_v1_api.py | 7 +- etl/edu_sharing_client/models/job_detail.py | 58 +-- etl/edu_sharing_client/models/service.py | 326 +----------- etl/requirements.txt | 3 +- etl/valuespace_converter/app/valuespaces.py | 2 +- 52 files changed, 3625 insertions(+), 2434 deletions(-) create mode 100644 docker-compose-dev.yml delete mode 100644 etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py delete mode 100644 etl/converter/offline_mode/merlin_spider_offline.py create mode 100644 etl/converter/run.py create mode 100644 etl/converter/spiders/edu_sharing_base.py create mode 100644 etl/converter/spiders/oeh_rss_spider.py create mode 100644 etl/converter/spiders/oeh_spider.py create mode 100644 etl/converter/spiders/youtube_spider.py create mode 100644 etl/csv/youtube.csv diff --git a/.gitignore b/.gitignore index 889317d1..3b08a9e0 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ __pycache__/ .env nohups/ nohup.out +out \ No newline at end of file diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml new file mode 100644 index 00000000..f4cf6db4 --- /dev/null +++ b/docker-compose-dev.yml @@ -0,0 +1,98 @@ +version: "3.4" + +services: + elasticsearch: + image: elasticsearch:7.4.2 + environment: + - "discovery.type=single-node" + #- ELASTIC_PASSWORD=changethisinproduction + #- xpack.security.enabled=true + - http.port=9200 + - http.cors.enabled=true + - http.cors.allow-origin=* + - http.cors.allow-headers=X-Requested-With,X-Auth-Token,Content-Type,Content-Length,Authorization,Access-Control-Allow-Headers,Accept + - http.cors.allow-credentials=true + - bootstrap.memory_lock=true + - 'ES_JAVA_OPTS=-Xms2g -Xmx4g' + networks: + - elasticnet + ports: + - "127.0.0.1:9200:9200" + restart: on-failure + volumes: + - es-data:/usr/share/elasticsearch/data + kibana: + image: docker.elastic.co/kibana/kibana:7.4.2 + networks: + - elasticnet + depends_on: + - elasticsearch + ports: + - "5601:5601" # exposte to host + postgres: + build: + context: ./postgres + dockerfile: postgres.Dockerfile + environment: + - "POSTGRES_USER=search" + - "POSTGRES_PASSWORD=admin" + - "POSTGRES_DB=search" + networks: + - elasticnet + ports: + - "127.0.0.1:5432:5432" + restart: always + volumes: + - pg-data:/var/lib/postgresql/data + valuespace_converter: + build: + context: ./etl/valuespace_converter + dockerfile: valuespace_converter.Dockerfile + networks: + - elasticnet + ports: + - "5010:5010" # exposte to host + restart: on-failure + logstash: + build: + context: ./logstash + dockerfile: logstash_psql.Dockerfile + environment: + - LS_JAVA_OPTS=-Xmx4g + networks: + - elasticnet + depends_on: + - elasticsearch + - postgres + restart: on-failure + valuespaces: + image: laocoon667/oer-flask-api:dev + networks: + - elasticnet + ports: + - "127.0.0.1:5000:5000" + restart: on-failure + splash: + image: scrapinghub/splash + networks: + - elasticnet + command: --maxrss 4000 + restart: always + ports: + - "127.0.0.1:8050:8050" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8050/_ping"] + interval: 30s + timeout: 5s + retries: 3 + +networks: + elasticnet: + +volumes: + pg-data: + driver: local + es-data: + driver: local + + diff --git a/etl/converter/.env.example b/etl/converter/.env.example index 124706e2..a3e5d937 100644 --- a/etl/converter/.env.example +++ b/etl/converter/.env.example @@ -4,6 +4,13 @@ # Level for logs, supported DEBUG, INFO, WARNING, ERROR LOG_LEVEL = "WARNING" +# Don't upload to Edu-Sharing +DRY_RUN = False + +DISABLE_SPLASH = False + EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" EDU_SHARING_USERNAME = "admin" -EDU_SHARING_PASSWORD = "admin" \ No newline at end of file +EDU_SHARING_PASSWORD = "admin" + +YOUTUBE_API_KEY = "" \ No newline at end of file diff --git a/etl/converter/constants.py b/etl/converter/constants.py index 73c33e4f..c74fff25 100644 --- a/etl/converter/constants.py +++ b/etl/converter/constants.py @@ -1,31 +1,44 @@ - class Constants: - LICENSE_CC_ZERO_10 = 'https://creativecommons.org/publicdomain/zero/1.0/' - LICENSE_CC_BY_SA_30 = 'https://creativecommons.org/licenses/by-sa/3.0/' - LICENSE_CC_BY_SA_40 = 'https://creativecommons.org/licenses/by-sa/4.0/' - LICENSE_CC_BY_40 = 'https://creativecommons.org/licenses/by/4.0/' - LICENSE_PDM = 'https://creativecommons.org/publicdomain/mark/1.0/' + LICENSE_CC_ZERO_10 = "https://creativecommons.org/publicdomain/zero/1.0/" + LICENSE_CC_BY_SA_30 = "https://creativecommons.org/licenses/by-sa/3.0/" + LICENSE_CC_BY_SA_40 = "https://creativecommons.org/licenses/by-sa/4.0/" + LICENSE_CC_BY_40 = "https://creativecommons.org/licenses/by/4.0/" + LICENSE_PDM = "https://creativecommons.org/publicdomain/mark/1.0/" + VALID_LICENSE_URLS = [ + LICENSE_CC_ZERO_10, + LICENSE_CC_BY_SA_30, + LICENSE_CC_BY_SA_40, + LICENSE_CC_BY_40, + LICENSE_PDM, + ] LICENSE_MAPPINGS = { - 'https://creativecommons.org/publicdomain/zero': 'https://creativecommons.org/publicdomain/zero/1.0/', - 'https://creativecommons.org/licenses/by': 'https://creativecommons.org/licenses/by/4.0/', - 'https://creativecommons.org/licenses/by-sa': 'https://creativecommons.org/licenses/by-sa/4.0/', + "https://creativecommons.org/publicdomain/zero/": LICENSE_CC_ZERO_10, + "https://creativecommons.org/licenses/by/": LICENSE_CC_BY_40, + "https://creativecommons.org/licenses/by-sa/": LICENSE_CC_BY_SA_40, # wrong mapping (currently from edu-sharing) - 'https://creativecommons.org/licenses/pdm': 'https://creativecommons.org/publicdomain/mark/1.0/', + "https://creativecommons.org/licenses/pdm/": LICENSE_PDM, + } + LICENSE_MAPPINGS_INTERNAL = { + "CC_0": LICENSE_CC_ZERO_10, + "CC_BY": LICENSE_CC_BY_40, + "CC_BY_SA": LICENSE_CC_BY_SA_40, + "PDM": LICENSE_PDM, } - LICENSE_COPYRIGHT_LAW = 'COPYRIGHT_LAW' - LICENSE_NONPUBLIC = 'NONPUBLIC' + LICENSE_COPYRIGHT_LAW = "COPYRIGHT_LAW" + LICENSE_NONPUBLIC = "NONPUBLIC" - TYPE_MATERIAL = 'MATERIAL' - TYPE_TOOL = 'TOOL' - TYPE_SOURCE = 'SOURCE' - TYPE_LESSONPLANNING = 'LESSONPLANNING' + TYPE_MATERIAL = "MATERIAL" + TYPE_TOOL = "TOOL" + TYPE_SOURCE = "SOURCE" + TYPE_LESSONPLANNING = "LESSONPLANNING" SOURCE_TYPE_SPIDER = 1 SOURCE_TYPE_EDITORIAL = 2 - + + class OerType: - NONE = 'NONE' - MIXED = 'MIXED' - ALL = 'ALL' + NONE = "NONE" + MIXED = "MIXED" + ALL = "ALL" diff --git a/etl/converter/custom_log_formatter.py b/etl/converter/custom_log_formatter.py index a199d19e..c5ed3b92 100644 --- a/etl/converter/custom_log_formatter.py +++ b/etl/converter/custom_log_formatter.py @@ -2,31 +2,27 @@ import logging import os + class CustomLogFormatter(LogFormatter): DROPPEDMSG = "Dropped: %(exception)s" + os.linesep + "%(item)s" ITEMERRORMSG = "Error processing %(item)s" def dropped(self, item, exception, response, spider): - """Logs a message when an item is dropped while it is passing through the item pipeline.""" - return { - 'level': logging.WARNING, - 'msg': self.DROPPEDMSG, - 'args': { - 'exception': exception, - 'item': item['lom'], - } - } + """Logs a message when an item is dropped while it is passing through the item pipeline.""" + return { + "level": logging.WARNING, + "msg": self.DROPPEDMSG, + "args": {"exception": exception, "item": item["lom"],}, + } def item_error(self, item, exception, response, spider): - """Logs a message when an item causes an error while it is passing + """Logs a message when an item causes an error while it is passing through the item pipeline. .. versionadded:: 2.0 """ - return { - 'level': logging.ERROR, - 'msg': self.ITEMERRORMSG, - 'args': { - 'item': item['lom'], - } - } + return { + "level": logging.ERROR, + "msg": self.ITEMERRORMSG, + "args": {"item": item["lom"],}, + } diff --git a/etl/converter/env.py b/etl/converter/env.py index 43ef01f4..0c8450e7 100644 --- a/etl/converter/env.py +++ b/etl/converter/env.py @@ -1,11 +1,46 @@ import os import sys from dotenv import load_dotenv +from typing import NoReturn + load_dotenv() -class Env: - def get(key: str, allowNull = False) -> str: - value = os.getenv(key) - if value == None and not allowNull: - sys.exit('No configuration for key ' + key + ' was found in your .env file. Please refer to the .env.example file for a sample value') + +def get(key: str, allow_null: bool = False, default: str = None) -> str: + """ + Get environment variable by key. + + Exits on undefined variable unless either `allow_null` or `default` is set. + """ + value = os.getenv(key, default) + if value != None: return value + elif allow_null: + return None + else: + _fail_on_missing_key(key) + + +def get_bool(key: str, allow_null: bool = False, default: bool = None) -> bool: + value = os.getenv(key) + if value != None: + if value.lower() in ["true", "1", "yes"]: + return True + elif value.lower() in ["false", "0", "no"]: + return False + else: + raise RuntimeError( + "Failed to parse value for boolean variable {}: {}".format(key, value) + ) + if default != None: + return default + elif allow_null: + return None + else: + _fail_on_missing_key(key) + + +def _fail_on_missing_key(key: str) -> NoReturn: + print("No configuration for key {} was found in your .env file.".format(key)) + print("Please refer to the .env.example file for a sample value.") + sys.exit(1) diff --git a/etl/converter/es_connector.py b/etl/converter/es_connector.py index c428cec3..17611cf3 100644 --- a/etl/converter/es_connector.py +++ b/etl/converter/es_connector.py @@ -1,3 +1,4 @@ +import time import uuid import requests import json @@ -7,6 +8,9 @@ from requests.auth import HTTPBasicAuth from io import BytesIO import logging + +from vobject.vcard import VCardBehavior + from converter.constants import Constants from edu_sharing_client.api_client import ApiClient from edu_sharing_client.configuration import Configuration @@ -19,16 +23,23 @@ from typing import List from enum import Enum + class EduSharingConstants: - HOME = '-home-' - GROUP_EVERYONE = 'GROUP_EVERYONE' - AUTHORITYTYPE_GROUP = 'GROUP' - AUTHORITYTYPE_EVERYONE = 'EVERYONE' - PERMISSION_CONSUMER = 'Consumer' - PERMISSION_CCPUBLISH = 'CCPublish' - GROUP_PREFIX = 'GROUP_' - MEDIACENTER_PREFIX = 'MEDIA_CENTER_' - MEDIACENTER_PROXY_PREFIX = 'MEDIA_CENTER_PROXY_' + HOME = "-home-" + GROUP_EVERYONE = "GROUP_EVERYONE" + AUTHORITYTYPE_GROUP = "GROUP" + AUTHORITYTYPE_EVERYONE = "EVERYONE" + PERMISSION_CONSUMER = "Consumer" + PERMISSION_CCPUBLISH = "CCPublish" + GROUP_PREFIX = "GROUP_" + MEDIACENTER_PREFIX = "MEDIA_CENTER_" + MEDIACENTER_PROXY_PREFIX = "MEDIA_CENTER_PROXY_" + LIFECYCLE_ROLES_MAPPING = { + "publisher": "ccm:lifecyclecontributer_publisher", + "author": "ccm:lifecyclecontributer_author", + "editor": "ccm:lifecyclecontributer_editor", + } + # creating the swagger client: java -jar swagger-codegen-cli-3.0.20.jar generate -l python -i http://localhost:8080/edu-sharing/rest/swagger.json -o edu_sharing_swagger -c edu-sharing-swagger.config.json class ESApiClient(ApiClient): @@ -52,12 +63,15 @@ def deserialize(self, response, response_type): except ValueError: data = response.data # workaround for es: simply return to prevent error throwing - #return self.__deserialize(data, response_type) + # return self.__deserialize(data, response_type) return data + + class EduSharing: class CreateGroupType(Enum): Regular = 1 MediaCenter = 2 + cookie: str = None resetVersion: bool = False apiClient: ESApiClient @@ -66,18 +80,41 @@ class CreateGroupType(Enum): mediacenterApi: MEDIACENTERV1Api nodeApi: NODEV1Api groupCache: List[str] + def __init__(self): self.initApiClient() - def getHeaders(self, contentType = 'application/json'): - return { 'COOKIE' : EduSharing.cookie, 'Accept' : 'application/json', 'Content-Type' : contentType} + + def getHeaders(self, contentType="application/json"): + return { + "COOKIE": EduSharing.cookie, + "Accept": "application/json", + "Content-Type": contentType, + } + def syncNode(self, spider, type, properties): - response = EduSharing.bulkApi.sync(body = properties, match = ['ccm:replicationsource', 'ccm:replicationsourceid'], type = type, group = spider.name, reset_version = EduSharing.resetVersion) - return response['node'] + groupBy = [] + if "ccm:replicationsourceorigin" in properties: + groupBy = ["ccm:replicationsourceorigin"] + response = EduSharing.bulkApi.sync( + body=properties, + match=["ccm:replicationsource", "ccm:replicationsourceid"], + type=type, + group=spider.name, + group_by=groupBy, + reset_version=EduSharing.resetVersion, + ) + return response["node"] + def setNodeText(self, uuid, item) -> bool: - if 'fulltext' in item: - response = requests.post(get_project_settings().get('EDU_SHARING_BASE_URL') + 'rest/node/v1/nodes/-home-/' + uuid + '/textContent?mimetype = text/plain', - headers = self.getHeaders(None), - data = item['fulltext'].encode('utf-8')) + if "fulltext" in item: + response = requests.post( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/textContent", + headers=self.getHeaders("multipart/form-data"), + data=item["fulltext"].encode("utf-8"), + ) return response.status_code == 200 # does currently not store data # try: @@ -89,104 +126,156 @@ def setNodeText(self, uuid, item) -> bool: def setPermissions(self, uuid, permissions) -> bool: try: - EduSharing.nodeApi.set_permission(repository = EduSharingConstants.HOME, node = uuid, body = permissions, send_mail = False, send_copy = False) + EduSharing.nodeApi.set_permission( + repository=EduSharingConstants.HOME, + node=uuid, + body=permissions, + send_mail=False, + send_copy=False, + ) return True except ApiException as e: return False + def setNodePreview(self, uuid, item) -> bool: - key = 'large' if 'large' in item['thumbnail'] else 'small' - files = {'image': base64.b64decode(item['thumbnail'][key])} - response = requests.post(get_project_settings().get('EDU_SHARING_BASE_URL') + 'rest/node/v1/nodes/-home-/' + uuid + '/preview?mimetype=' + item['thumbnail']['mimetype'], - headers = self.getHeaders(None), - files = files) - return response.status_code == 200 - + if "thumbnail" in item: + key = ( + "large" + if "large" in item["thumbnail"] + else "small" + if "small" in item["thumbnail"] + else None + ) + if key: + files = {"image": base64.b64decode(item["thumbnail"][key])} + response = requests.post( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/preview?mimetype=" + + item["thumbnail"]["mimetype"], + headers=self.getHeaders(None), + files=files, + ) + return response.status_code == 200 + else: + logging.warning("No thumbnail provided for " + uuid) + def mapLicense(self, spaces, license): - if 'url' in license: - if license['url'] == Constants.LICENSE_CC_BY_40: - spaces['ccm:commonlicense_key'] = 'CC_BY' - spaces['ccm:commonlicense_cc_version'] = '4.0' - if license['url'] == Constants.LICENSE_CC_BY_SA_30: - spaces['ccm:commonlicense_key'] = 'CC_BY_SA' - spaces['ccm:commonlicense_cc_version'] = '3.0' - if license['url'] == Constants.LICENSE_CC_BY_SA_40: - spaces['ccm:commonlicense_key'] = 'CC_BY_SA' - spaces['ccm:commonlicense_cc_version'] = '4.0' - if license['url'] == Constants.LICENSE_CC_ZERO_10: - spaces['ccm:commonlicense_key'] = 'CC_0' - spaces['ccm:commonlicense_cc_version'] = '1.0' - if license['url'] == Constants.LICENSE_PDM: - spaces['ccm:commonlicense_key'] = 'PDM' - if 'internal' in license: - if license['internal'] == Constants.LICENSE_COPYRIGHT_LAW: - spaces['ccm:commonlicense_key'] = 'COPYRIGHT_FREE' + if "url" in license: + if license["url"] == Constants.LICENSE_CC_BY_40: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "4.0" + if license["url"] == Constants.LICENSE_CC_BY_SA_30: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "3.0" + if license["url"] == Constants.LICENSE_CC_BY_SA_40: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "4.0" + if license["url"] == Constants.LICENSE_CC_ZERO_10: + spaces["ccm:commonlicense_key"] = "CC_0" + spaces["ccm:commonlicense_cc_version"] = "1.0" + if license["url"] == Constants.LICENSE_PDM: + spaces["ccm:commonlicense_key"] = "PDM" + if "internal" in license: + if license["internal"] == Constants.LICENSE_COPYRIGHT_LAW: + spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" + if "author" in license: + spaces["ccm:author_freetext"] = license["author"] def transformItem(self, uuid, spider, item): spaces = { - 'ccm:replicationsource' : spider.name, - 'ccm:replicationsourceid' : item['sourceId'], - 'ccm:replicationsourcehash' : item['hash'], - 'ccm:objecttype' : item['type'], - 'ccm:replicationsourceuuid' : uuid, - 'cm:name' : item['lom']['general']['title'], - 'ccm:wwwurl' : item['lom']['technical']['location'], - 'cclom:location' : item['lom']['technical']['location'], - 'cclom:title' : item['lom']['general']['title'], + "ccm:replicationsource": spider.name, + "ccm:replicationsourceid": item["sourceId"], + "ccm:replicationsourcehash": item["hash"], + "ccm:objecttype": item["type"], + "ccm:replicationsourceuuid": uuid, + "cm:name": item["lom"]["general"]["title"], + "ccm:wwwurl": item["lom"]["technical"]["location"], + "cclom:location": item["lom"]["technical"]["location"], + "cclom:title": item["lom"]["general"]["title"], } - self.mapLicense(spaces, item['license']) - if 'description' in item['lom']['general']: - spaces['cclom:general_description'] = item['lom']['general']['description'] + if "origin" in item: + spaces["ccm:replicationsourceorigin"] = item[ + "origin" + ] # TODO currently not mapped in edu-sharing - if 'language' in item['lom']['general']: - spaces['cclom:general_language'] = item['lom']['general']['language'] + self.mapLicense(spaces, item["license"]) + if "description" in item["lom"]["general"]: + spaces["cclom:general_description"] = item["lom"]["general"]["description"] - if 'keyword' in item['lom']['general']: - spaces['cclom:general_keyword'] = item['lom']['general']['keyword'], + if "language" in item["lom"]["general"]: + spaces["cclom:general_language"] = item["lom"]["general"]["language"] - lifecycleRolesMapping = { - 'publisher' : 'ccm:lifecyclecontributer_publisher', - 'author' : 'ccm:lifecyclecontributer_author', - 'editor' : 'ccm:lifecyclecontributer_editor', - } + if "keyword" in item["lom"]["general"]: + spaces["cclom:general_keyword"] = (item["lom"]["general"]["keyword"],) # TODO: this does currently not support multiple values per role - if 'lifecycle' in item['lom']: - for person in item['lom']['lifecycle']: - if not 'role' in person: + if "lifecycle" in item["lom"]: + for person in item["lom"]["lifecycle"]: + if not "role" in person: continue - if not person['role'].lower() in lifecycleRolesMapping: - logging.warn('The lifecycle role ' + person['role'] + ' is currently not supported by the edu-sharing connector') + if ( + not person["role"].lower() + in EduSharingConstants.LIFECYCLE_ROLES_MAPPING + ): + logging.warn( + "The lifecycle role " + + person["role"] + + " is currently not supported by the edu-sharing connector" + ) continue - mapping = lifecycleRolesMapping[person['role'].lower()] + mapping = EduSharingConstants.LIFECYCLE_ROLES_MAPPING[ + person["role"].lower() + ] # convert to a vcard string - firstName = person['firstName'] if 'firstName' in person else '' - lastName = person['lastName'] if 'lastName' in person else '' + firstName = person["firstName"] if "firstName" in person else "" + lastName = person["lastName"] if "lastName" in person else "" + organization = ( + person["organization"] if "organization" in person else "" + ) + url = person["url"] if "url" in person else "" vcard = vobject.vCard() - vcard.add('n') - vcard.n.value = vobject.vcard.Name(family = lastName, given = firstName) - vcard.add('fn').value = (firstName + ' ' + lastName).strip() + vcard.add("n").value = vobject.vcard.Name( + family=lastName, given=firstName + ) + vcard.add("fn").value = ( + organization + if organization + else (firstName + " " + lastName).strip() + ) + if organization: + vcard.add("org") + # fix a bug of splitted org values + vcard.org.behavior = VCardBehavior.defaultBehavior + vcard.org.value = organization + vcard.add("url").value = url spaces[mapping] = [vcard.serialize()] - valuespaceMapping = { - 'discipline' : 'ccm:taxonid', - 'intendedEndUserRole' : 'ccm:educationalintendedenduserrole', - 'educationalContext' : 'ccm:educationalcontext', - 'learningResourceType' : 'ccm:learningResourceType', - 'sourceContentType' : 'ccm:sourceContentType', # @TODO find suited data field + "discipline": "ccm:taxonid", + "intendedEndUserRole": "ccm:educationalintendedenduserrole", + "educationalContext": "ccm:educationalcontext", + "learningResourceType": "ccm:educationallearningresourcetype", + "sourceContentType": "ccm:sourceContentType", + "toolCategory": "ccm:toolCategory", } - for key in item['valuespaces']: - spaces[valuespaceMapping[key]] = item['valuespaces'][key] - if 'typicalagerange' in item['lom']['educational']: - spaces['ccm:educationaltypicalagerange_from'] = item['lom']['educational']['typicalagerange']['from'] - spaces['ccm:educationaltypicalagerange_to'] = item['lom']['educational']['typicalagerange']['to'] + for key in item["valuespaces"]: + spaces[valuespaceMapping[key]] = item["valuespaces"][key] + if "typicalagerange" in item["lom"]["educational"]: + spaces["ccm:educationaltypicalagerange_from"] = item["lom"]["educational"][ + "typicalagerange" + ]["from"] + spaces["ccm:educationaltypicalagerange_to"] = item["lom"]["educational"][ + "typicalagerange" + ]["to"] # intendedEndUserRole = Field(output_processor=JoinMultivalues()) # discipline = Field(output_processor=JoinMultivalues()) # educationalContext = Field(output_processor=JoinMultivalues()) # learningResourceType = Field(output_processor=JoinMultivalues()) # sourceContentType = Field(output_processor=JoinMultivalues()) - spaces['cm:edu_metadataset'] = 'mds_oeh' - spaces['cm:edu_forcemetadataset'] = 'true' - + spaces["cm:edu_metadataset"] = "mds_oeh" + spaces["cm:edu_forcemetadataset"] = "true" + for key in spaces: if type(spaces[key]) is tuple: spaces[key] = list([x for y in spaces[key] for x in y]) @@ -194,85 +283,141 @@ def transformItem(self, uuid, spider, item): spaces[key] = [spaces[key]] return spaces + def createGroupsIfNotExists(self, groups, type: CreateGroupType): for group in groups: if type == EduSharing.CreateGroupType.MediaCenter: - uuid = EduSharingConstants.GROUP_PREFIX + EduSharingConstants.MEDIACENTER_PREFIX + group + uuid = ( + EduSharingConstants.GROUP_PREFIX + + EduSharingConstants.MEDIACENTER_PREFIX + + group + ) else: uuid = EduSharingConstants.GROUP_PREFIX + group if uuid in EduSharing.groupCache: - logging.debug('Group ' + uuid + ' is existing in cache, no need to create') + logging.debug( + "Group " + uuid + " is existing in cache, no need to create" + ) continue - logging.debug('Group ' + uuid + ' is not in cache, checking consistency...') + logging.debug("Group " + uuid + " is not in cache, checking consistency...") try: group = EduSharing.iamApi.get_group(EduSharingConstants.HOME, uuid) - logging.info('Group ' + uuid + ' was found in edu-sharing (cache inconsistency), no need to create') + logging.info( + "Group " + + uuid + + " was found in edu-sharing (cache inconsistency), no need to create" + ) EduSharing.groupCache.append(uuid) continue except ApiException as e: - logging.info('Group ' + uuid + ' was not found in edu-sharing, creating it') + logging.info( + "Group " + uuid + " was not found in edu-sharing, creating it" + ) pass if type == EduSharing.CreateGroupType.MediaCenter: - result = EduSharing.mediacenterApi.create_mediacenter(repository = EduSharingConstants.HOME, mediacenter = group, body = { - 'mediacenter': {}, - 'displayName': group - }) - EduSharing.groupCache.append(result['authorityName']) + result = EduSharing.mediacenterApi.create_mediacenter( + repository=EduSharingConstants.HOME, + mediacenter=group, + body={"mediacenter": {}, "displayName": group}, + ) + EduSharing.groupCache.append(result["authorityName"]) else: - result = EduSharing.iamApi.create_group(repository = EduSharingConstants.HOME, group = group, body = {}) - EduSharing.groupCache.append(result['authorityName']) + result = EduSharing.iamApi.create_group( + repository=EduSharingConstants.HOME, group=group, body={} + ) + EduSharing.groupCache.append(result["authorityName"]) def setNodePermissions(self, uuid, item): - if 'permissions' in item: + if "permissions" in item: permissions = { - "inherited": True, # let inherited = true to add additional permissions via edu-sharing - "permissions": [] + "inherited": True, # let inherited = true to add additional permissions via edu-sharing + "permissions": [], } - public = item['permissions']['public'] + public = item["permissions"]["public"] if public == True: - if 'groups' in item['permissions'] or 'mediacenters' in item['permissions']: - logging.error('Invalid state detected: Permissions public is set to true but groups or mediacenters are also set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!') + if ( + "groups" in item["permissions"] + or "mediacenters" in item["permissions"] + ): + logging.error( + "Invalid state detected: Permissions public is set to true but groups or mediacenters are also set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!" + ) return - permissions['permissions'].append({ - "authority": { - "authorityName": EduSharingConstants.GROUP_EVERYONE, - "authorityType": EduSharingConstants.AUTHORITYTYPE_EVERYONE - }, - "permissions": [ EduSharingConstants.PERMISSION_CONSUMER, EduSharingConstants.PERMISSION_CCPUBLISH ] - }) + permissions["permissions"].append( + { + "authority": { + "authorityName": EduSharingConstants.GROUP_EVERYONE, + "authorityType": EduSharingConstants.AUTHORITYTYPE_EVERYONE, + }, + "permissions": [ + EduSharingConstants.PERMISSION_CONSUMER, + EduSharingConstants.PERMISSION_CCPUBLISH, + ], + } + ) else: # Makes not much sense, may no permissions at all should be set - #if not 'groups' in item['permissions'] and not 'mediacenters' in item['permissions']: + # if not 'groups' in item['permissions'] and not 'mediacenters' in item['permissions']: # logging.error('Invalid state detected: Permissions public is set to false but neither groups or mediacenters are set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!') # return mergedGroups = [] - if 'groups' in item['permissions']: - if 'autoCreateGroups' in item['permissions'] and item['permissions']['autoCreateGroups'] == True: - self.createGroupsIfNotExists(item['permissions']['groups'], EduSharing.CreateGroupType.Regular) - mergedGroups = mergedGroups + list(map(lambda x: EduSharingConstants.GROUP_PREFIX + x, item['permissions']['groups'])) - if 'mediacenters' in item['permissions']: - if 'autoCreateMediacenters' in item['permissions'] and item['permissions']['autoCreateMediacenters'] == True: - self.createGroupsIfNotExists(item['permissions']['mediacenters'], EduSharing.CreateGroupType.MediaCenter) - mergedGroups = mergedGroups + list(map(lambda x: EduSharingConstants.GROUP_PREFIX + EduSharingConstants.MEDIACENTER_PROXY_PREFIX + x, item['permissions']['mediacenters'])) + if "groups" in item["permissions"]: + if ( + "autoCreateGroups" in item["permissions"] + and item["permissions"]["autoCreateGroups"] == True + ): + self.createGroupsIfNotExists( + item["permissions"]["groups"], + EduSharing.CreateGroupType.Regular, + ) + mergedGroups = mergedGroups + list( + map( + lambda x: EduSharingConstants.GROUP_PREFIX + x, + item["permissions"]["groups"], + ) + ) + if "mediacenters" in item["permissions"]: + if ( + "autoCreateMediacenters" in item["permissions"] + and item["permissions"]["autoCreateMediacenters"] == True + ): + self.createGroupsIfNotExists( + item["permissions"]["mediacenters"], + EduSharing.CreateGroupType.MediaCenter, + ) + mergedGroups = mergedGroups + list( + map( + lambda x: EduSharingConstants.GROUP_PREFIX + + EduSharingConstants.MEDIACENTER_PROXY_PREFIX + + x, + item["permissions"]["mediacenters"], + ) + ) for group in mergedGroups: - permissions['permissions'].append({ - "authority": { - "authorityName": group, - "authorityType": EduSharingConstants.AUTHORITYTYPE_GROUP - }, - "permissions": [ EduSharingConstants.PERMISSION_CONSUMER, EduSharingConstants.PERMISSION_CCPUBLISH ] - }) + permissions["permissions"].append( + { + "authority": { + "authorityName": group, + "authorityType": EduSharingConstants.AUTHORITYTYPE_GROUP, + }, + "permissions": [ + EduSharingConstants.PERMISSION_CONSUMER, + EduSharingConstants.PERMISSION_CCPUBLISH, + ], + } + ) if not self.setPermissions(uuid, permissions): - logging.error('Failed to set permissions, please check that the given groups/mediacenters are existing in the repository or set the autoCreate mode to true') - logging.error(item['permissions']) + logging.error( + "Failed to set permissions, please check that the given groups/mediacenters are existing in the repository or set the autoCreate mode to true" + ) + logging.error(item["permissions"]) def insertItem(self, spider, uuid, item): - node = self.syncNode(spider, 'ccm:io' ,self.transformItem(uuid, spider, item)) - self.setNodePermissions(node['ref']['id'], item) - self.setNodePreview(node['ref']['id'], item) - self.setNodeText(node['ref']['id'], item) - + node = self.syncNode(spider, "ccm:io", self.transformItem(uuid, spider, item)) + self.setNodePermissions(node["ref"]["id"], item) + self.setNodePreview(node["ref"]["id"], item) + self.setNodeText(node["ref"]["id"], item) def updateItem(self, spider, uuid, item): self.insertItem(spider, uuid, item) @@ -280,28 +425,45 @@ def updateItem(self, spider, uuid, item): def initApiClient(self): if EduSharing.cookie == None: settings = get_project_settings() - auth = requests.get(settings.get('EDU_SHARING_BASE_URL') + 'rest/authentication/v1/validateSession', - auth = HTTPBasicAuth(settings.get('EDU_SHARING_USERNAME'), settings.get('EDU_SHARING_PASSWORD')), - headers = { 'Accept' : 'application/json'} + auth = requests.get( + settings.get("EDU_SHARING_BASE_URL") + + "rest/authentication/v1/validateSession", + auth=HTTPBasicAuth( + settings.get("EDU_SHARING_USERNAME"), + settings.get("EDU_SHARING_PASSWORD"), + ), + headers={"Accept": "application/json"}, ) - isAdmin = json.loads(auth.text)['isAdmin'] + isAdmin = json.loads(auth.text)["isAdmin"] if isAdmin: - EduSharing.cookie = auth.headers['SET-COOKIE'].split(';')[0] + EduSharing.cookie = auth.headers["SET-COOKIE"].split(";")[0] configuration = Configuration() - configuration.host = settings.get('EDU_SHARING_BASE_URL') + 'rest' - EduSharing.apiClient = ESApiClient(configuration, cookie = EduSharing.cookie, header_name = 'Accept', header_value = 'application/json') + configuration.host = settings.get("EDU_SHARING_BASE_URL") + "rest" + EduSharing.apiClient = ESApiClient( + configuration, + cookie=EduSharing.cookie, + header_name="Accept", + header_value="application/json", + ) EduSharing.bulkApi = BULKV1Api(EduSharing.apiClient) EduSharing.iamApi = IAMV1Api(EduSharing.apiClient) EduSharing.mediacenterApi = MEDIACENTERV1Api(EduSharing.apiClient) EduSharing.nodeApi = NODEV1Api(EduSharing.apiClient) EduSharing.groupCache = list( - map(lambda x: x['authorityName'], - EduSharing.iamApi.search_groups(EduSharingConstants.HOME, '', max_items = 1000000)['groups'] - )) - logging.debug('Built up edu-sharing group cache', EduSharing.groupCache) + map( + lambda x: x["authorityName"], + EduSharing.iamApi.search_groups( + EduSharingConstants.HOME, "", max_items=1000000 + )["groups"], + ) + ) + logging.debug("Built up edu-sharing group cache", EduSharing.groupCache) return - raise Exception('Could not authentify as admin at edu-sharing. Please check your settings for repository ' + settings.get('EDU_SHARING_BASE_URL')) - + raise Exception( + "Could not authentify as admin at edu-sharing. Please check your settings for repository " + + settings.get("EDU_SHARING_BASE_URL") + ) + def buildUUID(self, url): return str(uuid.uuid5(uuid.NAMESPACE_URL, url)) @@ -310,14 +472,20 @@ def uuidExists(self, uuid): def findItem(self, id, spider): properties = { - 'ccm:replicationsource': [spider.name], - 'ccm:replicationsourceid': [id], + "ccm:replicationsource": [spider.name], + "ccm:replicationsourceid": [id], } try: response = EduSharing.bulkApi.find(properties) - properties = response['node']['properties'] - if 'ccm:replicationsourcehash' in properties and 'ccm:replicationsourceuuid' in properties: - return [properties['ccm:replicationsourceuuid'][0], properties['ccm:replicationsourcehash'][0]] + properties = response["node"]["properties"] + if ( + "ccm:replicationsourcehash" in properties + and "ccm:replicationsourceuuid" in properties + ): + return [ + properties["ccm:replicationsourceuuid"][0], + properties["ccm:replicationsourcehash"][0], + ] except ApiException as e: if e.status == 404: pass @@ -327,9 +495,9 @@ def findItem(self, id, spider): def findSource(self, spider): return True - + def createSource(self, spider): - #src = self.createNode(EduSharing.etlFolder['ref']['id'], 'ccm:map', {'cm:name' : [spider.name]}) - #EduSharing.spiderNodes[spider.name] = src - #return src + # src = self.createNode(EduSharing.etlFolder['ref']['id'], 'ccm:map', {'cm:name' : [spider.name]}) + # EduSharing.spiderNodes[spider.name] = src + # return src return None diff --git a/etl/converter/items.py b/etl/converter/items.py index 54eeb28b..e2814678 100644 --- a/etl/converter/items.py +++ b/etl/converter/items.py @@ -12,6 +12,7 @@ from w3lib.html import remove_tags, replace_escape_chars import logging + def replace_processor(value): if value is not None: return replace_escape_chars(remove_tags(value)).strip() @@ -20,8 +21,7 @@ def replace_processor(value): class JoinMultivalues(object): - - def __init__(self, separator=u' '): + def __init__(self, separator=u" "): self.separator = separator def __call__(self, values): @@ -31,6 +31,8 @@ def __call__(self, values): class MutlilangItem(Item): key = Field() de_DE = Field() + + class LomGeneralItem(Item): identifier = Field() title = Field() @@ -41,13 +43,16 @@ class LomGeneralItem(Item): aggregationLevel = Field() description = Field() + class LomLifecycleItem(Item): role = Field() firstName = Field() lastName = Field() organization = Field() + url = Field() uuid = Field() + class LomTechnicalItem(Item): format = Field() size = Field() @@ -57,10 +62,12 @@ class LomTechnicalItem(Item): otherPlatformRequirements = Field() duration = Field() + class LomAgeRangeItem(Item): fromRange = Field() toRange = Field() + class LomEducationalItem(Item): interactivityType = Field() # Please use valuespaces.learningResourceType @@ -68,7 +75,9 @@ class LomEducationalItem(Item): interactivityLevel = Field() semanticDensity = Field() # Please use valuespaces.intendedEndUserRole - intendedEndUserRole = Field(serializer=MutlilangItem, output_processor=JoinMultivalues()) + intendedEndUserRole = Field( + serializer=MutlilangItem, output_processor=JoinMultivalues() + ) # Please use valuespaces.educationalContext # context = Field() typicalAgeRange = Field(serializer=LomAgeRangeItem) @@ -77,11 +86,13 @@ class LomEducationalItem(Item): description = Field() language = Field() -#please use the seperate license data -#class LomRightsItem(Item): - #cost = Field() - #coyprightAndOtherRestrictions = Field() - #description = Field() + +# please use the seperate license data +# class LomRightsItem(Item): +# cost = Field() +# coyprightAndOtherRestrictions = Field() +# description = Field() + class LomClassificationItem(Item): cost = Field() @@ -90,34 +101,43 @@ class LomClassificationItem(Item): description = Field() keyword = Field() + class LomBaseItem(Item): general = Field(serializer=LomGeneralItem) lifecycle = Field(serializer=LomLifecycleItem, output_processor=JoinMultivalues()) technical = Field(serializer=LomTechnicalItem) educational = Field(serializer=LomEducationalItem) - #rights = Field(serializer=LomRightsItem) + # rights = Field(serializer=LomRightsItem) classification = Field(serializer=LomClassificationItem) + class ResponseItem(Item): status = Field() url = Field() html = Field() text = Field() headers = Field() + + class ValuespaceItem(Item): intendedEndUserRole = Field(output_processor=JoinMultivalues()) discipline = Field(output_processor=JoinMultivalues()) educationalContext = Field(output_processor=JoinMultivalues()) learningResourceType = Field(output_processor=JoinMultivalues()) sourceContentType = Field(output_processor=JoinMultivalues()) + toolCategory = Field(output_processor=JoinMultivalues()) + class LicenseItem(Item): - # url to a license description url = Field() - # a internal constants for this license + "url to a license description" internal = Field() - # a value of OerType (if empty, will be mapped via the given url or internal value) + "a internal constants for this license" oer = Field() + "a value of OerType (if empty, will be mapped via the given url or internal value)" + author = Field() + "an author freetext (basically, how the author should be named in case this is a by-license" + class PermissionItem(Item): public = Field() @@ -130,6 +150,8 @@ class PermissionItem(Item): "Should global groups be created if they don't exist" autoCreateMediacenters = Field() "Should media centers be created if they don't exist" + + class BaseItem(Item): sourceId = Field() uuid = Field() @@ -138,6 +160,8 @@ class BaseItem(Item): collection = Field(output_processor=JoinMultivalues()) "id of collections this entry should be placed into" type = Field() + origin = Field() + "in case it was fetched from a referatorium, the real origin name may be included here" response = Field(serializer=ResponseItem) ranking = Field() fulltext = Field() @@ -151,47 +175,71 @@ class BaseItem(Item): license = Field(serializer=LicenseItem) publisher = Field() + class BaseItemLoader(ItemLoader): default_item_class = BaseItem - #default_input_processor = MapCompose(replace_processor) + # default_input_processor = MapCompose(replace_processor) default_output_processor = TakeFirst() + class MutlilangItemLoader(ItemLoader): default_item_class = MutlilangItem default_output_processor = TakeFirst() + + class ValuespaceItemLoader(ItemLoader): default_item_class = ValuespaceItem default_output_processor = TakeFirst() + + class LicenseItemLoader(ItemLoader): default_item_class = LicenseItem default_output_processor = TakeFirst() + + class LomBaseItemloader(ItemLoader): default_item_class = LomBaseItem default_output_processor = TakeFirst() + + class ResponseItemLoader(ItemLoader): default_item_class = ResponseItem default_output_processor = TakeFirst() + + class LomGeneralItemloader(ItemLoader): default_item_class = LomGeneralItem default_output_processor = TakeFirst() + + class LomLifecycleItemloader(ItemLoader): default_item_class = LomLifecycleItem default_output_processor = TakeFirst() + + class LomTechnicalItemLoader(ItemLoader): default_item_class = LomTechnicalItem default_output_processor = TakeFirst() + + class LomAgeRangeItemLoader(ItemLoader): default_item_class = LomAgeRangeItem default_output_processor = TakeFirst() + + class LomEducationalItemLoader(ItemLoader): default_item_class = LomEducationalItem default_output_processor = TakeFirst() -#class LomRightsItemLoader(ItemLoader): + + +# class LomRightsItemLoader(ItemLoader): # default_item_class = LomRightsItem # default_output_processor = TakeFirst() class LomClassificationItemLoader(ItemLoader): default_item_class = LomClassificationItem default_output_processor = TakeFirst() + + class PermissionItemLoader(ItemLoader): default_item_class = PermissionItem - default_output_processor = TakeFirst() \ No newline at end of file + default_output_processor = TakeFirst() diff --git a/etl/converter/middlewares.py b/etl/converter/middlewares.py index 3654ba84..bbc9ef40 100644 --- a/etl/converter/middlewares.py +++ b/etl/converter/middlewares.py @@ -53,7 +53,7 @@ def process_start_requests(self, start_requests, spider): yield r def spider_opened(self, spider): - spider.logger.info('Spider opened: %s' % spider.name) + spider.logger.info("Spider opened: %s" % spider.name) class OerScrapyDownloaderMiddleware(object): @@ -100,4 +100,4 @@ def process_exception(self, request, exception, spider): pass def spider_opened(self, spider): - spider.logger.info('Spider opened: %s' % spider.name) + spider.logger.info("Spider opened: %s" % spider.name) diff --git a/etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py b/etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py deleted file mode 100644 index dcce13eb..00000000 --- a/etl/converter/offline_mode/mediothek_pixiothek_spider_offline.py +++ /dev/null @@ -1,107 +0,0 @@ -import json -import os -import ssl -import time -import urllib -import urllib.request -from urllib.parse import urlencode, urlparse - -import requests -import scrapy -import xmltodict -from lxml import etree -from scrapy.spiders import CrawlSpider - -# TODO: find a better solution. -import ssl -ssl._create_default_https_context = ssl._create_unverified_context - - -def encode_url_for_local(url): - return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) - -class MediothekPixiothekSpiderOffline(CrawlSpider): - name = 'mediothek_pixiothek_spider_offline' - url = 'https://www.schulportal-thueringen.de/' # the url which will be linked as the primary link to your source (should be the main url of your site) - friendlyName = 'MediothekPixiothek' # name as shown in the search ui - version = '0.1' # the version of your crawler, used to identify if a reimport is necessary - # start_urls = ['https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] - start_urls = ['file:///data/projects/schul_cloud/workspace/content_sources/mediothek_pixiothek/cache/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] - - - limit = 100 - page = 0 - - elements_count = 0 - - data_dir = "/data/projects/schul_cloud/workspace/content_sources/mediothek_pixiothek/cache" - - thumbnails_dir = data_dir + "/thumbnails" - - def __init__(self, *a, **kwargs): - # LomBase.__init__(self, **kwargs) - super().__init__(*a, **kwargs) - - def encode_url_for_local(self, url): - return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) - - def parse(self, response: scrapy.http.Response): - # Avoid stressing the API. - # time.sleep(0.5) - print(response.url) - - text_response = response.body - - if not os.path.exists(self.data_dir): - os.makedirs(self.data_dir) - if not os.path.exists(self.thumbnails_dir): - os.makedirs(self.thumbnails_dir) - - self.save_json_array_data(text_response) - - elements = json.loads(response.body_as_unicode()) - for i, element in enumerate(elements): - if "previewImageUrl" in element: - time.sleep(0.5) - self.store_thumbnails(element['previewImageUrl']) - - - def save_json_array_data(self, text_response): - # Save the JSON array data file. - resource_path = "/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" - # Create the subdirectories - directories = self.data_dir + "/tip-ms/api/public_mediothek_metadatenexport" - if not os.path.exists(directories): - os.makedirs(directories) - with open(self.data_dir + resource_path, "wb") as fout: - fout.write(text_response) - - def store_thumbnails(self, thumbnail_url): - urlparse_result = urlparse(thumbnail_url) - thumbnail_path = urlparse_result.path - if urlparse_result.query != "": - thumbnail_path += "&" + urlparse_result.query - - # Create the subdirectories - directories = self.thumbnails_dir + os.path.dirname(os.path.abspath(thumbnail_path)) - if not os.path.exists(directories): - os.makedirs(directories) - - local_path = self.thumbnails_dir + thumbnail_path - - if not os.path.exists(local_path): - # urllib.request.urlretrieve(thumbnail_url, local_path) - self.download_and_save_image(thumbnail_url, local_path) - - def download_and_save_image(self, pic_url, local_path): - with open(local_path, 'wb') as handle: - response = requests.get(pic_url, stream=True, allow_redirects=True) - - if not response.ok: - print(response) - - for block in response.iter_content(1024): - if not block: - break - - handle.write(block) \ No newline at end of file diff --git a/etl/converter/offline_mode/merlin_spider_offline.py b/etl/converter/offline_mode/merlin_spider_offline.py deleted file mode 100644 index b3877f26..00000000 --- a/etl/converter/offline_mode/merlin_spider_offline.py +++ /dev/null @@ -1,144 +0,0 @@ -import os -import ssl -import time -import urllib -import urllib.request -from urllib.parse import urlencode, urlparse - -import requests -import scrapy -import xmltodict -from lxml import etree -from scrapy.spiders import CrawlSpider - -# TODO: find a better solution. -import ssl -ssl._create_default_https_context = ssl._create_unverified_context - -def encode_url_for_local(url): - return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) - -class MerlinSpiderOffline(CrawlSpider): - name = 'merlin_spider_offline' - domain = 'https://merlin.nibis.de' - url = 'https://merlin.nibis.de/index.php' # the url which will be linked as the primary link to your source (should be the main url of your site) - friendlyName = 'Merlin' # name as shown in the search ui - version = '0.1' # the version of your crawler, used to identify if a reimport is necessary - apiUrl = 'https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*' # * regular expression, to represent all possible values. - - limit = 100 - page = 0 - - elements_count = 0 - - data_dir = "/data/projects/schul_cloud/workspace/content_sources/merlin/cache" - - thumbnails_dir = data_dir + "/thumbnails" - - def __init__(self, *a, **kwargs): - # LomBase.__init__(self, **kwargs) - super().__init__(*a, **kwargs) - - def encode_url_for_local(self, url): - return url[:url.find("?action")] + urllib.parse.quote(url[url.find("?action"):]) - - def start_requests(self): - response = 1 - while response is not None: - yield scrapy.Request(url=self.apiUrl.replace('%start', str(self.page * self.limit)) - .replace('%anzahl', str(self.limit)), - callback=self.parse_offline, headers={ - 'Accept': 'application/xml', - 'Content-Type': 'application/xml' - }) - - - def parse_offline(self, response: scrapy.http.Response): - # Avoid stressing the API. - # time.sleep(0.5) - print(response.url) - - text_response = response.body - - if not os.path.exists(self.data_dir): - os.makedirs(self.data_dir) - if not os.path.exists(self.thumbnails_dir): - os.makedirs(self.thumbnails_dir) - - - - resource_path = response.url.replace("https://merlin.nibis.de/", "") - - with open(self.data_dir + "/" + resource_path, "wb") as fout: - fout.write(text_response) - - # We would use .fromstring(response.text) if the response did not include the XML declaration: - # - root = etree.XML(response.body) - tree = etree.ElementTree(root) - - # Get the total number of possible elements - elements_total = int(tree.xpath('/root/sum')[0].text) - - # If results are returned. - elements = tree.xpath('/root/items/*') - - self.elements_count += len(elements) - - if len(elements) > 0: - for element in elements: - time.sleep(0.5) - # copyResponse = response.copy() - - element_xml_str = etree.tostring(element, pretty_print=True, encoding='unicode') - element_dict = xmltodict.parse(element_xml_str)["data"] - - if "thumbnail" in element_dict: - self.store_thumbnails(element_dict["thumbnail"]) - self.store_thumbnails(self.domain + element_dict["srcLogoUrl"]) - # self.store_thumbnails(self.domain + element_dict["logo"]) - - - # If the number of returned results is equal to the imposed limit, it means that there are more to be returned. - # if len(elements) == self.limit: - if self.elements_count < elements_total: - self.page += 1 - url = self.apiUrl.replace('%start', str(self.page * self.limit)).replace('%anzahl', str(self.limit)) - yield scrapy.Request(url=url, callback=self.parse_offline, headers={ - 'Accept': 'application/xml', - 'Content-Type': 'application/xml' - }) - - - def store_thumbnails(self, thumbnail_url): - - urlparse_result = urlparse(thumbnail_url) - thumbnail_path = urlparse_result.path - if urlparse_result.query != "": - thumbnail_path += "&" + urlparse_result.query - - # Create the subdirectories - directories = self.thumbnails_dir + os.path.dirname(os.path.abspath(thumbnail_path)) - if not os.path.exists(directories): - os.makedirs(directories) - - # local_path = self.thumbnails_dir + thumbnail_url.replace("https://thumbnails.merlin.nibis.de/", "") - local_path = self.thumbnails_dir + thumbnail_path - - if not os.path.exists(local_path): - # urllib.request.urlretrieve(thumbnail_url, local_path) - self.download_and_save_image(thumbnail_url, local_path) - - - def download_and_save_image(self, pic_url, local_path): - with open(local_path, 'wb') as handle: - response = requests.get(pic_url, stream=True) - - if not response.ok: - print(response) - - for block in response.iter_content(1024): - if not block: - break - - handle.write(block) \ No newline at end of file diff --git a/etl/converter/pipelines.py b/etl/converter/pipelines.py index 058fa743..db594aba 100644 --- a/etl/converter/pipelines.py +++ b/etl/converter/pipelines.py @@ -33,91 +33,123 @@ # fillup missing props by "guessing" or loading them if possible class LOMFillupPipeline: def process_item(self, item, spider): - if not 'fulltext' in item and 'text' in item['response']: - item['fulltext'] = item['response']['text'] + if not "fulltext" in item and "text" in item["response"]: + item["fulltext"] = item["response"]["text"] return item + + class FilterSparsePipeline: def process_item(self, item, spider): valid = False - if not 'location' in item['lom']['technical']: - raise DropItem('Entry ' + item['lom']['general']['title'] + ' has no technical location') + if not "location" in item["lom"]["technical"]: + raise DropItem( + "Entry " + + item["lom"]["general"]["title"] + + " has no technical location" + ) # pass through explicit uuid elements - if 'uuid' in item: + if "uuid" in item: return item try: - valid = item['lom']['general']['keyword'] + valid = item["lom"]["general"]["keyword"] except: pass try: - valid = valid or item['lom']['general']['description'] + valid = valid or item["lom"]["general"]["description"] except: pass try: - valid = valid or item['valuespaces']['learningResourceType'] + valid = valid or item["valuespaces"]["learningResourceType"] except: pass if not valid: - raise DropItem('Entry ' + item['lom']['general']['title'] + ' has neither keywords nor description') + raise DropItem( + "Entry " + + item["lom"]["general"]["title"] + + " has neither keywords nor description" + ) return item - -class NormLicensePipeline(object): + + +class NormLicensePipeline: def process_item(self, item, spider): - if 'url' in item['license'] and not 'oer' in item['license']: + if "url" in item["license"]: for key in Constants.LICENSE_MAPPINGS: - if item['license']['url'].startswith(key): - item['license']['url'] = Constants.LICENSE_MAPPINGS[key] - break - if( - item['license']['url'] == Constants.LICENSE_CC_BY_40 or - item['license']['url'] == Constants.LICENSE_CC_BY_SA_30 or - item['license']['url'] == Constants.LICENSE_CC_BY_SA_40 or - item['license']['url'] == Constants.LICENSE_CC_ZERO_10 - ): - item['license']['oer'] = OerType.ALL - - if 'internal' in item['license'] and not 'oer' in item['license']: - internal = item['license']['internal'].lower() - if( - 'cc-by-sa' in internal or - 'cc-0' in internal + if item["license"]["url"].startswith(key): + item["license"]["url"] = Constants.LICENSE_MAPPINGS[key] + break + if "internal" in item["license"] and ( + not "url" in item["license"] + or not item["license"]["url"] in Constants.VALID_LICENSE_URLS + ): + for key in Constants.LICENSE_MAPPINGS_INTERNAL: + if item["license"]["internal"].casefold() == key.casefold(): + item["license"]["url"] = Constants.LICENSE_MAPPINGS_INTERNAL[key] + break + + if "url" in item["license"] and not "oer" in item["license"]: + if ( + item["license"]["url"] == Constants.LICENSE_CC_BY_40 + or item["license"]["url"] == Constants.LICENSE_CC_BY_SA_30 + or item["license"]["url"] == Constants.LICENSE_CC_BY_SA_40 + or item["license"]["url"] == Constants.LICENSE_CC_ZERO_10 ): - item['license']['oer'] = OerType.ALL + item["license"]["oer"] = OerType.ALL + + if "internal" in item["license"] and not "oer" in item["license"]: + internal = item["license"]["internal"].lower() + if "cc-by-sa" in internal or "cc-0" in internal or "pdm" in internal: + item["license"]["oer"] = OerType.ALL return item - - + # convert typicalLearningTime into a integer representing seconds class ConvertTimePipeline: def process_item(self, item, spider): # map lastModified - if 'lastModified' in item: + if "lastModified" in item: try: - item['lastModified'] = float(item['lastModified']) + item["lastModified"] = float(item["lastModified"]) except: try: - date = dateutil.parser.parse(item['lastModified']) - item['lastModified'] = int(date.timestamp()) + date = dateutil.parser.parse(item["lastModified"]) + item["lastModified"] = int(date.timestamp()) except: - logging.warn('Unable to parse given lastModified date ' + item['lastModified']) - del item['lastModified'] + logging.warn( + "Unable to parse given lastModified date " + + item["lastModified"] + ) + del item["lastModified"] - if 'typicalLearningTime' in item['lom']['educational']: - time = item['lom']['educational']['typicalLearningTime'] + if "typicalLearningTime" in item["lom"]["educational"]: + time = item["lom"]["educational"]["typicalLearningTime"] mapped = None - splitted = time.split(':') + splitted = time.split(":") if len(splitted) == 3: - mapped = int(splitted[0])*60*60 + int(splitted[1])*60 + int(splitted[2]) + mapped = ( + int(splitted[0]) * 60 * 60 + + int(splitted[1]) * 60 + + int(splitted[2]) + ) if mapped == None: - logging.warn('Unable to map given typicalLearningTime '+time+' to numeric value') - item['lom']['educational']['typicalLearningTime'] = mapped + logging.warn( + "Unable to map given typicalLearningTime " + + time + + " to numeric value" + ) + item["lom"]["educational"]["typicalLearningTime"] = mapped return item + + # generate de_DE / i18n strings for valuespace fields class ProcessValuespacePipeline: valuespaces = None + def __init__(self): self.valuespaces = Valuespaces() + def process_item(self, item, spider): - json = item['valuespaces'] + json = item["valuespaces"] delete = [] for key in json: # remap to new i18n layout @@ -127,12 +159,14 @@ def process_item(self, item, spider): valuespace = self.valuespaces.data[key] found = False for v in valuespace: - labels = list(v['prefLabel'].values()) - if 'altLabel' in v: - labels = labels + list([x for y in list(v['altLabel'].values()) for x in y]) + labels = list(v["prefLabel"].values()) + if "altLabel" in v: + labels = labels + list( + [x for y in list(v["altLabel"].values()) for x in y] + ) labels = list(map(lambda x: x.casefold(), labels)) - if v['id'].endswith(entry) or entry.casefold() in labels: - id = v['id'] + if v["id"].endswith(entry) or entry.casefold() in labels: + id = v["id"] found = True break if found and len(list(filter(lambda x: x == id, mapped))) == 0: @@ -143,113 +177,172 @@ def process_item(self, item, spider): delete.append(key) for key in delete: del json[key] - item['valuespaces'] = json + item["valuespaces"] = json return item + # generate thumbnails class ProcessThumbnailPipeline: def scaleImage(self, img, maxSize): - w=float(img.width) - h=float(img.height) - while(w*h>maxSize): - w*=0.9 - h*=0.9 - return img.resize((int(w),int(h)), Image.ANTIALIAS).convert("RGB") + w = float(img.width) + h = float(img.height) + while w * h > maxSize: + w *= 0.9 + h *= 0.9 + return img.resize((int(w), int(h)), Image.ANTIALIAS).convert("RGB") + def process_item(self, item, spider): response = None settings = get_project_settings() - if 'thumbnail' in item: - url = item['thumbnail'] + url = False + if "thumbnail" in item: + url = item["thumbnail"] response = requests.get(url) + logging.debug( + "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" + ) elif 'defaultThumbnail' in item: url = item['defaultThumbnail'] response = requests.get(url) - elif 'location' in item['lom']['technical'] and 'format' in item['lom']['technical'] and item['lom']['technical']['format'] == 'text/html': - response = requests.post(settings.get('SPLASH_URL')+'/render.png', json={ - 'url': item['lom']['technical']['location'], - 'wait': settings.get('SPLASH_WAIT'), - 'html5_media': 1, - 'headers': settings.get('SPLASH_HEADERS') - }) + elif ( + "location" in item["lom"]["technical"] + and "format" in item["lom"]["technical"] + and item["lom"]["technical"]["format"] == "text/html" + ): + if settings.get("SPLASH_URL"): + response = requests.post( + settings.get("SPLASH_URL") + "/render.png", + json={ + "url": item["lom"]["technical"]["location"], + "wait": settings.get("SPLASH_WAIT"), + "html5_media": 1, + "headers": settings.get("SPLASH_HEADERS"), + }, + ) + else: + logging.warning( + "No thumbnail provided and SPLASH_URL was not configured for screenshots!" + ) if response == None: - logging.error('Neither thumbnail or technical.location provided! Please provie at least one of them') + logging.error( + "Neither thumbnail or technical.location provided! Please provide at least one of them" + ) else: try: - if response.headers['Content-Type'] == 'image/svg+xml': - if len(response.content) > settings.get('THUMBNAIL_MAX_SIZE'): - raise Exception('SVG images can\'t be converted, and the given image exceeds the maximum allowed size (' + str(len(response.content)) + ' > ' + str(settings.get('THUMBNAIL_MAX_SIZE')) + ')') - item['thumbnail']={} - item['thumbnail']['mimetype'] = response.headers['Content-Type'] - item['thumbnail']['small'] = base64.b64encode(response.content).decode() + if response.headers["Content-Type"] == "image/svg+xml": + if len(response.content) > settings.get("THUMBNAIL_MAX_SIZE"): + raise Exception( + "SVG images can't be converted, and the given image exceeds the maximum allowed size (" + + str(len(response.content)) + + " > " + + str(settings.get("THUMBNAIL_MAX_SIZE")) + + ")" + ) + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = response.headers["Content-Type"] + item["thumbnail"]["small"] = base64.b64encode( + response.content + ).decode() else: img = Image.open(BytesIO(response.content)) small = BytesIO() - self.scaleImage(img, settings.get('THUMBNAIL_SMALL_SIZE')).save(small, 'JPEG', mode = 'RGB', quality = settings.get('THUMBNAIL_SMALL_QUALITY')) + self.scaleImage(img, settings.get("THUMBNAIL_SMALL_SIZE")).save( + small, + "JPEG", + mode="RGB", + quality=settings.get("THUMBNAIL_SMALL_QUALITY"), + ) large = BytesIO() - self.scaleImage(img, settings.get('THUMBNAIL_LARGE_SIZE')).save(large, 'JPEG', mode = 'RGB', quality = settings.get('THUMBNAIL_LARGE_QUALITY')) - item['thumbnail']={} - item['thumbnail']['mimetype'] = 'image/jpeg' - item['thumbnail']['small'] = base64.b64encode(small.getvalue()).decode() - item['thumbnail']['large'] = base64.b64encode(large.getvalue()).decode() + self.scaleImage(img, settings.get("THUMBNAIL_LARGE_SIZE")).save( + large, + "JPEG", + mode="RGB", + quality=settings.get("THUMBNAIL_LARGE_QUALITY"), + ) + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = "image/jpeg" + item["thumbnail"]["small"] = base64.b64encode( + small.getvalue() + ).decode() + item["thumbnail"]["large"] = base64.b64encode( + large.getvalue() + ).decode() except Exception as e: if url: - logging.warn('Could not read thumbnail at ' + url + ': ' + str(e) + ' (falling back to screenshot)') - if 'thumbnail' in item: - del item['thumbnail'] + logging.warn( + "Could not read thumbnail at " + + url + + ": " + + str(e) + + " (falling back to screenshot)" + ) + if "thumbnail" in item: + del item["thumbnail"] return self.process_item(item, spider) elif 'defaultThumbnail' in item: del item['defaultThumbnail'] return self.process_item(item, spider) else: - #item['thumbnail']={} - raise DropItem('No thumbnail provided or ressource was unavailable for fetching') + # item['thumbnail']={} + raise DropItem( + "No thumbnail provided or ressource was unavailable for fetching" + ) return item + class EduSharingCheckPipeline(EduSharing): def process_item(self, item, spider): - if(not 'hash' in item): - logging.error('The spider did not provide a hash on the base object. The hash is required to detect changes on an element. May use the last modified date or something similar') - item['hash'] = time.time() - + if not "hash" in item: + logging.error( + "The spider did not provide a hash on the base object. The hash is required to detect changes on an element. May use the last modified date or something similar" + ) + item["hash"] = time.time() + # @TODO: May this can be done only once? if self.findSource(spider) == None: - logging.info("create new source "+spider.name) + logging.info("create new source " + spider.name) self.createSource(spider) - dbItem = self.findItem(item['sourceId'], spider) + dbItem = self.findItem(item["sourceId"], spider) if dbItem: - if(item['hash'] != dbItem[1]): + if item["hash"] != dbItem[1]: logging.debug("hash has changed, continuing pipelines") else: logging.debug("hash unchanged, skip item") # self.update(item['sourceId'], spider) # for tests, we update everything for now # activate this later - #raise DropItem() + # raise DropItem() return item + class EduSharingStorePipeline(EduSharing): def process_item(self, item, spider): output = io.BytesIO() - exporter = JsonItemExporter(output, fields_to_export = ['lom','valuespaces','license','type','fulltext','ranking','lastModified','thumbnail']) + exporter = JsonItemExporter( + output, + fields_to_export=[ + "lom", + "valuespaces", + "license", + "type", + "origin", + "fulltext", + "ranking", + "lastModified", + "thumbnail", + ], + ) exporter.export_item(item) - json = output.getvalue().decode('UTF-8') - esItem = self.findItem(item['sourceId'], spider) - title = '' - if 'title' in item['lom']['general']: - title = str(item['lom']['general']['title']) - #logging.info(item['lom']) - if esItem: - entryUUID = esItem[0] - self.updateItem(spider, entryUUID, item) - logging.info('item ' + entryUUID + ' updated') - else: - entryUUID = self.buildUUID(item['response']['url']) - self.insertItem(spider, entryUUID, item) - logging.info('item ' + entryUUID + ' created') + title = "" + if "title" in item["lom"]["general"]: + title = str(item["lom"]["general"]["title"]) + entryUUID = self.buildUUID(item["response"]["url"]) + self.insertItem(spider, entryUUID, item) + logging.info("item " + entryUUID + " inserted/updated") # @TODO: We may need to handle Collections - #if 'collection' in item: + # if 'collection' in item: # for collection in item['collection']: # if dbItem: # entryUUID = dbItem[0] @@ -281,3 +374,44 @@ def process_item(self, item, spider): output.close() return item + +class DummyOutPipeline: + # Scrapy will print the item on log level DEBUG anyway + + # class Printer: + # def write(self, byte_str: bytes) -> None: + # logging.debug(byte_str.decode("utf-8")) + + # def open_spider(self, spider): + # self.exporter = JsonItemExporter( + # DummyOutPipeline.Printer(), + # fields_to_export=[ + # "collection", + # "fulltext", + # "hash", + # "lastModified", + # "license", + # "lom", + # "origin", + # "permissions", + # "publisher", + # "ranking", + # # "response", + # "sourceId", + # # "thumbnail", + # "type", + # "uuid", + # "valuespaces", + # ], + # indent=2, + # encoding="utf-8", + # ) + # self.exporter.start_exporting() + + # def close_spider(self, spider): + # self.exporter.finish_exporting() + + def process_item(self, item, spider): + logging.info("DRY RUN scraped {}".format(item["response"]["url"])) + # self.exporter.export_item(item) + return item diff --git a/etl/converter/run.py b/etl/converter/run.py new file mode 100644 index 00000000..7848d439 --- /dev/null +++ b/etl/converter/run.py @@ -0,0 +1,36 @@ +""" +Debugging entry point for VSCode. + +Add the following to the `configurations` array in `.vscode/launch.json`: + + { + "name": "Run scrapy", + "type": "python", + "request": "launch", + "program": "${workspaceFolder}/etl/converter/run.py", + "console": "integratedTerminal" + } +""" + + +import os +from scrapy.cmdline import execute + + +def run(): + os.chdir(os.path.dirname(os.path.realpath(__file__))) + execute( + [ + "scrapy", + "crawl", + "-a", + "cleanrun=true", + "-o", + "out/items.json", + "wirlernenonline_spider", + ] + ) + + +if __name__ == "__main__": + run() diff --git a/etl/converter/settings.py b/etl/converter/settings.py index 95f3f735..063c4d8f 100644 --- a/etl/converter/settings.py +++ b/etl/converter/settings.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- from pathlib import Path # python3 only -from converter.env import Env +import converter.env as env + # Scrapy settings for project # # For simplicity, this file contains only settings considered important or @@ -10,14 +11,14 @@ # https://docs.scrapy.org/en/latest/topics/downloader-middleware.html # https://docs.scrapy.org/en/latest/topics/spider-middleware.html -BOT_NAME = 'converter_search_idx' +BOT_NAME = "converter_search_idx" -SPIDER_MODULES = ['converter.spiders'] -NEWSPIDER_MODULE = 'converter.spiders' +SPIDER_MODULES = ["converter.spiders"] +NEWSPIDER_MODULE = "converter.spiders" -LOG_FILE = Env.get("LOG_FILE", True) -LOG_LEVEL = Env.get("LOG_LEVEL") -LOG_FORMATTER = 'converter.custom_log_formatter.CustomLogFormatter' +LOG_FILE = env.get("LOG_FILE", allow_null=True) +LOG_LEVEL = env.get("LOG_LEVEL", default="INFO") +LOG_FORMATTER = "converter.custom_log_formatter.CustomLogFormatter" # Default behaviour for regular crawlers of non-license-controlled content # When set True, every item will have GROUP_EVERYONE attached in edu-sharing @@ -26,31 +27,37 @@ # Splash (Web Thumbnailer) # Will be rolled out via docker-compose by default -SPLASH_URL = 'http://localhost:8050' -SPLASH_WAIT = 1 # seconds to let the page load -SPLASH_HEADERS = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36'} # use chrome to not create warnings on pages - -#edu-sharing config -EDU_SHARING_BASE_URL=Env.get("EDU_SHARING_BASE_URL") -EDU_SHARING_USERNAME=Env.get("EDU_SHARING_USERNAME") -EDU_SHARING_PASSWORD=Env.get("EDU_SHARING_PASSWORD") +SPLASH_URL = ( + None if env.get_bool("DISABLE_SPLASH", default=False) else "http://localhost:8050" +) +SPLASH_WAIT = 1 # seconds to let the page load +SPLASH_HEADERS = { + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36" +} # use chrome to not create warnings on pages + +# edu-sharing config +EDU_SHARING_BASE_URL = env.get("EDU_SHARING_BASE_URL") +EDU_SHARING_USERNAME = env.get("EDU_SHARING_USERNAME") +EDU_SHARING_PASSWORD = env.get("EDU_SHARING_PASSWORD") # Thumbnail config -THUMBNAIL_SMALL_SIZE = 250*250 +THUMBNAIL_SMALL_SIZE = 250 * 250 THUMBNAIL_SMALL_QUALITY = 40 -THUMBNAIL_LARGE_SIZE = 800*800 +THUMBNAIL_LARGE_SIZE = 800 * 800 THUMBNAIL_LARGE_QUALITY = 60 -THUMBNAIL_MAX_SIZE = 50*1024 # max size for images that can not be converted (e.g. svg) +THUMBNAIL_MAX_SIZE = ( + 50 * 1024 +) # max size for images that can not be converted (e.g. svg) # Crawl responsibly by identifying yourself (and your website) on the user-agent -#USER_AGENT = 'converter_search_idx (+http://www.yourdomain.com)' +# USER_AGENT = 'converter_search_idx (+http://www.yourdomain.com)' # Obey robots.txt rules ROBOTSTXT_OBEY = True # Configure maximum concurrent requests performed by Scrapy (default: 16) -#CONCURRENT_REQUESTS = 32 +# CONCURRENT_REQUESTS = 32 # Configure a delay for requests for the same website (default: 0) # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay @@ -65,67 +72,92 @@ # CONCURRENT_REQUESTS_PER_IP = 16 # Disable cookies (enabled by default) -#COOKIES_ENABLED = False +# COOKIES_ENABLED = False # Disable Telnet Console (enabled by default) -#TELNETCONSOLE_ENABLED = False +# TELNETCONSOLE_ENABLED = False # Override the default request headers: -#DEFAULT_REQUEST_HEADERS = { +# DEFAULT_REQUEST_HEADERS = { # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', # 'Accept-Language': 'en', -#} +# } # Enable or disable spider middlewares # See https://docs.scrapy.org/en/latest/topics/spider-middleware.html -#SPIDER_MIDDLEWARES = { +# SPIDER_MIDDLEWARES = { # 'converter.middlewares.OerScrapySpiderMiddleware': 543, -#} +# } # Enable or disable downloader middlewares # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html -#DOWNLOADER_MIDDLEWARES = { +# DOWNLOADER_MIDDLEWARES = { # 'converter.middlewares.OerScrapyDownloaderMiddleware': 543, -#} +# } # Enable or disable extensions # See https://docs.scrapy.org/en/latest/topics/extensions.html EXTENSIONS = { - # 'scrapy.extensions.telnet.TelnetConsole': None, - # 'scrapy.extensions.closespider.CLOSESPIDER_PAGECOUNT': 4, + # 'scrapy.extensions.telnet.TelnetConsole': None, + # 'scrapy.extensions.closespider.CLOSESPIDER_PAGECOUNT': 4, } # Configure item pipelines # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html ITEM_PIPELINES = { - 'converter.pipelines.EduSharingCheckPipeline': 0, - 'converter.pipelines.FilterSparsePipeline': 25, - 'converter.pipelines.LOMFillupPipeline': 100, - 'converter.pipelines.NormLicensePipeline': 125, - 'converter.pipelines.ConvertTimePipeline': 200, - 'converter.pipelines.ProcessValuespacePipeline': 250, - 'converter.pipelines.ProcessThumbnailPipeline': 300, - #'converter.pipelines.NormLicensePipeline': 100, - 'converter.pipelines.EduSharingStorePipeline': 1000, + "converter.pipelines.EduSharingCheckPipeline": 0, + "converter.pipelines.FilterSparsePipeline": 25, + "converter.pipelines.LOMFillupPipeline": 100, + "converter.pipelines.NormLicensePipeline": 125, + "converter.pipelines.ConvertTimePipeline": 200, + "converter.pipelines.ProcessValuespacePipeline": 250, + "converter.pipelines.ProcessThumbnailPipeline": 300, + "converter.pipelines.DummyOutPipeline" + if env.get_bool("DRY_RUN", default=False) + else "converter.pipelines.EduSharingStorePipeline": 1000, } # Enable and configure the AutoThrottle extension (disabled by default) # See https://docs.scrapy.org/en/latest/topics/autothrottle.html -AUTOTHROTTLE_ENABLED = True +AUTOTHROTTLE_ENABLED = False # The initial download delay AUTOTHROTTLE_START_DELAY = 1 # The maximum download delay to be set in case of high latencies AUTOTHROTTLE_MAX_DELAY = 60 # The average number of requests Scrapy should be sending in parallel to # each remote server -#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 +# AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 # Enable showing throttling stats for every response received: AUTOTHROTTLE_DEBUG = False # Enable and configure HTTP caching (disabled by default) # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings -#HTTPCACHE_ENABLED = True -#HTTPCACHE_EXPIRATION_SECS = 0 -#HTTPCACHE_DIR = 'httpcache' -#HTTPCACHE_IGNORE_HTTP_CODES = [] -#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' +# HTTPCACHE_ENABLED = True +# HTTPCACHE_EXPIRATION_SECS = 0 +# HTTPCACHE_DIR = 'httpcache' +# HTTPCACHE_IGNORE_HTTP_CODES = [] +# HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage' + +# Enables useful test exports with `scrapy crawl -o my-test-output.json ` +FEED_EXPORT_FIELDS = [ + "collection", + "fulltext", + "hash", + "lastModified", + "license", + "lom", + "origin", + "permissions", + "publisher", + "ranking", + # Response cannot be serialized since it has `bytes` keys + # "response", + "sourceId", + # Too much clutter + # "thumbnail", + "type", + "uuid", + "valuespaces", +] +FEED_EXPORT_INDENT = 2 +FEED_EXPORT_ENCODING = "utf-8" diff --git a/etl/converter/spiders/br_rss_spider.py b/etl/converter/spiders/br_rss_spider.py index bf34149f..7c2db710 100644 --- a/etl/converter/spiders/br_rss_spider.py +++ b/etl/converter/spiders/br_rss_spider.py @@ -2,22 +2,22 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.rss_list_base import RSSListBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.rss_list_base import RSSListBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import Constants; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants # Spider to fetch RSS from planet schule class BRRSSSpider(RSSListBase): - name='br_rss_spider' - friendlyName='Bayerischer Rundfunk' - url = 'https://www.br.de/' - version = '0.1.0' + name = "br_rss_spider" + friendlyName = "Bayerischer Rundfunk" + url = "https://www.br.de/" + version = "0.1.0" - def __init__(self, **kwargs): - RSSListBase.__init__(self, 'csv/br_rss.csv', **kwargs) + def __init__(self, **kwargs): + RSSListBase.__init__(self, "csv/br_rss.csv", **kwargs) diff --git a/etl/converter/spiders/csv_base.py b/etl/converter/spiders/csv_base.py index a8f514a6..b2a63506 100644 --- a/etl/converter/spiders/csv_base.py +++ b/etl/converter/spiders/csv_base.py @@ -3,42 +3,53 @@ import time import logging from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.valuespace_helper import ValuespaceHelper; +from converter.spiders.lom_base import LomBase +from converter.valuespace_helper import ValuespaceHelper import csv import hashlib # rss crawler with a list of entries to crawl and map class CSVBase(LomBase): # column names supported: - COLUMN_URL = 'url' - COLUMN_TITLE = 'title' - COLUMN_DESCRIPTION = 'description' - COLUMN_TYPE = 'type' - COLUMN_THUMBNAIL = 'thumbnail' - COLUMN_KEYWORD = 'keyword' - COLUMN_EDUCATIONAL_CONTEXT = 'educationalContext' - COLUMN_TYPICAL_AGE_RANGE_FROM = 'typicalAgeRangeFrom' - COLUMN_TYPICAL_AGE_RANGE_TO = 'typicalAgeRangeTo' - COLUMN_DISCIPLINE = 'discipline' - COLUMN_LEARNING_RESOURCE_TYPE = 'learningResourceType' - COLUMN_LANGUAGE = 'language' - COLUMN_COLLECTION = 'collection' - COLUMN_LICENSE = 'license' + COLUMN_URL = "url" + COLUMN_TITLE = "title" + COLUMN_SOURCE_TITLE = "sourceTitle" + COLUMN_SOURCE_URL = "sourceUrl" + COLUMN_DESCRIPTION = "description" + COLUMN_TYPE = "type" + COLUMN_THUMBNAIL = "thumbnail" + COLUMN_KEYWORD = "keyword" + COLUMN_EDUCATIONAL_CONTEXT = "educationalContext" + COLUMN_TYPICAL_AGE_RANGE_FROM = "typicalAgeRangeFrom" + COLUMN_TYPICAL_AGE_RANGE_TO = "typicalAgeRangeTo" + COLUMN_DISCIPLINE = "discipline" + COLUMN_LEARNING_RESOURCE_TYPE = "learningResourceType" + COLUMN_LANGUAGE = "language" + COLUMN_COLLECTION = "collection" + COLUMN_LICENSE = "license" mappings = None + def transform(self, row): transformed = {} i = 0 for key in row: transformed[self.mappings[i]] = { - 'text': key.strip(), - 'list': list(map(lambda x: x.strip(),key.split(";"))) + "text": key.strip(), + "list": list(map(lambda x: x.strip(), key.split(";"))), } - if len(list(filter(lambda x: x != '', transformed[self.mappings[i]]['list']))) == 0: - transformed[self.mappings[i]]['list'] = None + if ( + len( + list( + filter(lambda x: x != "", transformed[self.mappings[i]]["list"]) + ) + ) + == 0 + ): + transformed[self.mappings[i]]["list"] = None i += 1 return transformed - def readCSV(self, csv, skipLines = 1): + + def readCSV(self, csv, skipLines=1): data = [] i = 0 for row in csv: @@ -50,56 +61,85 @@ def readCSV(self, csv, skipLines = 1): continue data.append(self.transform(row)) return data + def getUri(self, response): - return response.meta['row'][CSVBase.COLUMN_URL]['text'] + return response.meta["row"][CSVBase.COLUMN_URL]["text"] def getId(self, response): - return response.meta['row'][CSVBase.COLUMN_URL]['text'] + return response.meta["row"][CSVBase.COLUMN_URL]["text"] def getHash(self, response): m = hashlib.md5() - m.update(str(response.meta['row']).encode('utf-8')) + m.update(str(response.meta["row"]).encode("utf-8")) return m.hexdigest() + self.version def getBase(self, response): base = LomBase.getBase(self, response) - base.add_value('thumbnail', response.meta['row'][CSVBase.COLUMN_THUMBNAIL]['text']) - base.add_value('collection', response.meta['row'][CSVBase.COLUMN_COLLECTION]['list']) - base.replace_value('type', response.meta['row'][CSVBase.COLUMN_TYPE]['text']) + base.add_value( + "thumbnail", response.meta["row"][CSVBase.COLUMN_THUMBNAIL]["text"] + ) + base.add_value( + "collection", response.meta["row"][CSVBase.COLUMN_COLLECTION]["list"] + ) + base.replace_value("type", response.meta["row"][CSVBase.COLUMN_TYPE]["text"]) return base - + def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) - general.add_value('title', response.meta['row'][CSVBase.COLUMN_TITLE]['text']) - general.replace_value('language', response.meta['row'][CSVBase.COLUMN_LANGUAGE]['text']) - general.add_value('keyword', response.meta['row'][CSVBase.COLUMN_KEYWORD]['list']) - general.add_value('description', response.meta['row'][CSVBase.COLUMN_DESCRIPTION]['text']) + general.add_value("title", response.meta["row"][CSVBase.COLUMN_TITLE]["text"]) + general.replace_value( + "language", response.meta["row"][CSVBase.COLUMN_LANGUAGE]["text"] + ) + general.add_value( + "keyword", response.meta["row"][CSVBase.COLUMN_KEYWORD]["list"] + ) + general.add_value( + "description", response.meta["row"][CSVBase.COLUMN_DESCRIPTION]["text"] + ) return general def getLicense(self, response): license = LomBase.getLicense(self, response) # add as url + internal to support both data formats - license.add_value('url', response.meta['row'][CSVBase.COLUMN_LICENSE]['text']) - license.add_value('internal', response.meta['row'][CSVBase.COLUMN_LICENSE]['text']) + license.add_value("url", response.meta["row"][CSVBase.COLUMN_LICENSE]["text"]) + license.add_value( + "internal", response.meta["row"][CSVBase.COLUMN_LICENSE]["text"] + ) return license + def getLOMEducational(self, response): educational = LomBase.getLOMEducational(self, response) tar = LomAgeRangeItemLoader() - response.meta['row'][CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO]['text'] - tar.add_value('fromRange',response.meta['row'][CSVBase.COLUMN_TYPICAL_AGE_RANGE_FROM]['text']) - tar.add_value('toRange',response.meta['row'][CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO]['text']) - educational.add_value('typicalAgeRange',tar.load_item()) + response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO]["text"] + tar.add_value( + "fromRange", + response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_FROM]["text"], + ) + tar.add_value( + "toRange", response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO]["text"] + ) + educational.add_value("typicalAgeRange", tar.load_item()) return educational def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) - technical.add_value('location', response.meta['row'][CSVBase.COLUMN_URL]['text']) - technical.add_value('format', 'text/html') + technical.add_value( + "location", response.meta["row"][CSVBase.COLUMN_URL]["text"] + ) + technical.add_value("format", "text/html") return technical def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value('educationalContext', response.meta['row'][CSVBase.COLUMN_EDUCATIONAL_CONTEXT]['list']) - valuespaces.add_value('discipline', response.meta['row'][CSVBase.COLUMN_DISCIPLINE]['list']) - valuespaces.add_value('learningResourceType', response.meta['row'][CSVBase.COLUMN_LEARNING_RESOURCE_TYPE]['list']) + valuespaces.add_value( + "educationalContext", + response.meta["row"][CSVBase.COLUMN_EDUCATIONAL_CONTEXT]["list"], + ) + valuespaces.add_value( + "discipline", response.meta["row"][CSVBase.COLUMN_DISCIPLINE]["list"] + ) + valuespaces.add_value( + "learningResourceType", + response.meta["row"][CSVBase.COLUMN_LEARNING_RESOURCE_TYPE]["list"], + ) return valuespaces diff --git a/etl/converter/spiders/digitallearninglab_spider.py b/etl/converter/spiders/digitallearninglab_spider.py index 50525212..ba361b86 100644 --- a/etl/converter/spiders/digitallearninglab_spider.py +++ b/etl/converter/spiders/digitallearninglab_spider.py @@ -2,121 +2,149 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lrmi_base import LrmiBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lrmi_base import LrmiBase +from converter.spiders.json_base import JSONBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import Constants; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants # Spider to fetch RSS from planet schule class DigitallearninglabSpider(scrapy.Spider, LrmiBase): - name='digitallearninglab_spider' - friendlyName='digital.learning.lab' - url = 'https://digitallearninglab.de' - version = '0.1.1' - apiUrl = 'https://digitallearninglab.de/api/%type?q=&sorting=latest&page=%page' + name = "digitallearninglab_spider" + friendlyName = "digital.learning.lab" + url = "https://digitallearninglab.de" + version = "0.1.1" + apiUrl = "https://digitallearninglab.de/api/%type?q=&sorting=latest&page=%page" - def __init__(self, **kwargs): - LrmiBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + LrmiBase.__init__(self, **kwargs) - def mapResponse(self, response): - return LrmiBase.mapResponse(self, response) + def mapResponse(self, response): + return LrmiBase.mapResponse(self, response) - def getId(self, response): - return response.meta['item'].get('id') + def getId(self, response): + return response.meta["item"].get("id") - def getHash(self, response): - modified = self.getLRMI('dateModified', response = response) - if modified: - return modified + self.version - # fallback if lrmi was unparsable - return time.time() + def getHash(self, response): + modified = self.getLRMI("dateModified", response=response) + if modified: + return modified + self.version + # fallback if lrmi was unparsable + return time.time() - def startRequest(self, type, page): - return scrapy.Request(url = self.apiUrl.replace('%page', str(page)).replace('%type', type), callback = self.parseRequest, headers = { - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }, meta = { - 'page': page, - 'type': type - }) + def startRequest(self, type, page): + return scrapy.Request( + url=self.apiUrl.replace("%page", str(page)).replace("%type", type), + callback=self.parseRequest, + headers={"Accept": "application/json", "Content-Type": "application/json"}, + meta={"page": page, "type": type}, + ) - def start_requests(self): - yield self.startRequest('unterrichtsbausteine',1) - yield self.startRequest('tools',1) + def start_requests(self): + yield self.startRequest("unterrichtsbausteine", 1) + yield self.startRequest("tools", 1) - def parseRequest(self, response): - data = json.loads(response.body_as_unicode()) - results = data.get('results') - if results: - for item in results: - copyResponse = response.replace(url = self.url + item.get('url')) - copyResponse.meta['item'] = item - if self.hasChanged(copyResponse): - yield scrapy.Request(url = copyResponse.url, callback = self.handleEntry, meta = { - 'item': item, - 'type': response.meta['type'] - }) - yield self.startRequest(response.meta['type'],response.meta['page'] + 1) - + def parseRequest(self, response): + data = json.loads(response.body_as_unicode()) + results = data.get("results") + if results: + for item in results: + copyResponse = response.replace(url=self.url + item.get("url")) + copyResponse.meta["item"] = item + if self.hasChanged(copyResponse): + yield scrapy.Request( + url=copyResponse.url, + callback=self.handleEntry, + meta={"item": item, "type": response.meta["type"]}, + ) + yield self.startRequest( + response.meta["type"], response.meta["page"] + 1 + ) - def handleEntry(self, response): - return LrmiBase.parse(self, response) - def getType(self, response): - if response.meta['type'] == 'tools': - return Constants.TYPE_TOOL - else: - return Constants.TYPE_MATERIAL + def handleEntry(self, response): + return LrmiBase.parse(self, response) - # thumbnail is always the same, do not use the one from rss - def getBase(self, response): - base = LrmiBase.getBase(self, response) - #base.replace_value('thumbnail', self.url + '/media/' + response.meta['item'].get('image')) - base.replace_value('thumbnail', response.xpath('//img[@class="content-info__image"]/@src').get()) - base.replace_value('type', self.getType(response)) - return base + def getType(self, response): + if response.meta["type"] == "tools": + return Constants.TYPE_TOOL + else: + return Constants.TYPE_MATERIAL - def getLOMGeneral(self, response): - general = LrmiBase.getLOMGeneral(self, response) - general.replace_value('title', HTMLParser().unescape(response.meta['item'].get('name').strip())) - general.add_value('description', HTMLParser().unescape(response.meta['item'].get('teaser'))) - #general.add_value('keyword', list(filter(lambda x: x,map(lambda x: x.strip(), response.xpath('//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()').getall())))) - return general + # thumbnail is always the same, do not use the one from rss + def getBase(self, response): + base = LrmiBase.getBase(self, response) + # base.replace_value('thumbnail', self.url + '/media/' + response.meta['item'].get('image')) + base.replace_value( + "thumbnail", + response.xpath('//img[@class="content-info__image"]/@src').get(), + ) + base.replace_value("type", self.getType(response)) + return base - def getLOMTechnical(self, response): - technical = LrmiBase.getLOMTechnical(self, response) - technical.replace_value('format', 'text/html') - technical.replace_value('location', response.url) - return technical - - def getLicense(self, response): - license = LrmiBase.getLicense(self, response) - return license - - def getValuespaces(self, response): - valuespaces = LrmiBase.getValuespaces(self, response) - try: - range = response.xpath('//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-level")]/parent::*//text()').get().replace('Stufe', '').strip().split(' - ') - if len(range): - valuespaces.add_value('educationalContext', ValuespaceHelper.educationalContextByGrade(range)) - except: - pass - try: - discipline = response.xpath('//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-subject")]/parent::*//text()').getall() - valuespaces.add_value('discipline', discipline) - except: - pass - lrt = response.meta['item'].get('type') - valuespaces.add_value('learningResourceType', lrt) - try: - toolType = list(map(lambda x: x.strip(), response.xpath('//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-settings")]/parent::*//text()').getall())) - # @TODO: proper mapping, maybe specialised tool field? - valuespaces.add_value('learningResourceType', toolType) - except: - pass - return valuespaces + def getLOMGeneral(self, response): + general = LrmiBase.getLOMGeneral(self, response) + general.replace_value( + "title", HTMLParser().unescape(response.meta["item"].get("name").strip()) + ) + general.add_value( + "description", HTMLParser().unescape(response.meta["item"].get("teaser")) + ) + # general.add_value('keyword', list(filter(lambda x: x,map(lambda x: x.strip(), response.xpath('//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()').getall())))) + return general + def getLOMTechnical(self, response): + technical = LrmiBase.getLOMTechnical(self, response) + technical.replace_value("format", "text/html") + technical.replace_value("location", response.url) + return technical + + def getLicense(self, response): + license = LrmiBase.getLicense(self, response) + return license + + def getValuespaces(self, response): + valuespaces = LrmiBase.getValuespaces(self, response) + try: + range = ( + response.xpath( + '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-level")]/parent::*//text()' + ) + .get() + .replace("Stufe", "") + .strip() + .split(" - ") + ) + if len(range): + valuespaces.add_value( + "educationalContext", + ValuespaceHelper.educationalContextByGrade(range), + ) + except: + pass + try: + discipline = response.xpath( + '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-subject")]/parent::*//text()' + ).getall() + valuespaces.add_value("discipline", discipline) + except: + pass + lrt = response.meta["item"].get("type") + valuespaces.add_value("learningResourceType", lrt) + try: + toolType = list( + map( + lambda x: x.strip(), + response.xpath( + '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-settings")]/parent::*//text()' + ).getall(), + ) + ) + # @TODO: proper mapping, maybe specialised tool field? + valuespaces.add_value("learningResourceType", toolType) + except: + pass + return valuespaces diff --git a/etl/converter/spiders/edu_sharing_base.py b/etl/converter/spiders/edu_sharing_base.py new file mode 100644 index 00000000..2f30c839 --- /dev/null +++ b/etl/converter/spiders/edu_sharing_base.py @@ -0,0 +1,178 @@ +import logging +import sys + +import requests + +from converter.spiders.lom_base import LomBase, LomAgeRangeItemLoader +from scrapy.http import JsonRequest +from scrapy.spiders import Spider +import scrapy +import json +import vobject +from converter.es_connector import EduSharingConstants + + +class EduSharingBase(Spider, LomBase): + friendlyName = "Edu-Sharing repository spider" + # the location of the edu-sharing rest api + apiUrl = "http://localhost/edu-sharing/rest/" + # the mds to use for the search request + mdsId = "-default-" + + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + + def buildUrl(self, offset=0): + return ( + self.apiUrl + + "search/v1/queriesV2/-home-/" + + self.mdsId + + "/ngsearch?contentType=FILES&maxItems=100&skipCount=" + + str(offset) + + "&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-" + ) + + def search(self, offset=0): + return JsonRequest( + url=self.buildUrl(offset), + data={"criterias": [{"property": "ngsearchword", "values": [""]}]}, + callback=self.parse, + ) + + def getProperty(self, name, response): + return ( + response.meta["item"]["properties"][name] + if name in response.meta["item"]["properties"] + else None + ) + + def start_requests(self): + yield self.search() + + def parse(self, response): + data = json.loads(response.body_as_unicode()) + if len(data["nodes"]) > 0: + for item in data["nodes"]: + copyResponse = response.replace(url=item["content"]["url"]) + copyResponse.meta["item"] = item + if self.hasChanged(copyResponse): + yield LomBase.parse(self, copyResponse) + yield self.search(data["pagination"]["from"] + data["pagination"]["count"]) + + def getBase(self, response): + base = LomBase.getBase(self, response) + base.replace_value("thumbnail", response.meta["item"]["preview"]["url"]) + base.replace_value( + "origin", self.getProperty("ccm:replicationsource", response) + ) + if self.getProperty("ccm:replicationsource", response): + # imported objects usually have the content as binary text + # TODO: Sometimes, edu-sharing redirects if no local content is found, and this should be html-parsed + try: + r = requests.get(response.meta["item"]["downloadUrl"]) + if r.status_code == 200: + base.replace_value("fulltext", r.text) + except: + logging.warning( + "error fetching data from " + response.meta["item"]["downloadUrl"], + sys.exc_info()[0], + ) + else: + # try to transform using alfresco + r = requests.get( + self.apiUrl + + "/node/v1/nodes/" + + response.meta["item"]["ref"]["repo"] + + "/" + + response.meta["item"]["ref"]["id"] + + "/textContent", + headers={"Accept": "application/json"}, + ).json() + if "text" in r: + base.replace_value("fulltext", r["text"]) + + return base + + # fulltext is handled in base, response is not necessary + def mapResponse(self, response, fetchData=True): + return LomBase.mapResponse(self, response, False) + + def getId(self, response=None) -> str: + return response.meta["item"]["ref"]["id"] + + def getHash(self, response=None) -> str: + return self.version + response.meta["item"]["modifiedAt"] + + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.replace_value("title", response.meta["item"]["title"]) + general.add_value( + "keyword", self.getProperty("cclom:general_keyword", response) + ) + general.add_value( + "description", self.getProperty("cclom:general_description", response) + ) + return general + + def getLOMEducational(self, response): + educational = LomBase.getLOMEducational(self, response) + tar_from = self.getProperty("ccm:educationaltypicalagerange_from", response) + tar_to = self.getProperty("ccm:educationaltypicalagerange_to", response) + if tar_from and tar_to: + range = LomAgeRangeItemLoader() + range.add_value("from", tar_from) + range.add_value("to", tar_to) + educational.add_value("typicalAgeRange", range.load_item()) + return educational + + def getLOMLifecycle(self, response): + lifecycle = LomBase.getLOMLifecycle(self, response) + for role in EduSharingConstants.LIFECYCLE_ROLES_MAPPING.keys(): + entry = self.getProperty("ccm:lifecyclecontributer_" + role, response) + if entry and entry[0]: + # TODO: we currently only support one author per role + vcard = vobject.readOne(entry[0]) + if hasattr(vcard, "n"): + given = vcard.n.value.given + family = vcard.n.value.family + lifecycle.add_value("role", role) + lifecycle.add_value("firstName", given) + lifecycle.add_value("lastName", family) + return lifecycle + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.replace_value("format", "text/html") + technical.replace_value("location", response.url) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", response.meta["item"]["license"]["url"]) + license.add_value( + "internal", self.getProperty("ccm:commonlicense_key", response) + ) + license.add_value("author", self.getProperty("ccm:author_freetext", response)) + return license + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value("discipline", self.getProperty("ccm:taxonid", response)) + valuespaces.add_value( + "intendedEndUserRole", + self.getProperty("ccm:educationalintendedenduserrole", response), + ) + valuespaces.add_value( + "educationalContext", self.getProperty("ccm:educationalcontext", response) + ) + valuespaces.add_value( + "learningResourceType", + self.getProperty("ccm:educationallearningresourcetype", response), + ) + valuespaces.add_value( + "sourceContentType", self.getProperty("ccm:sourceContentType", response) + ) + valuespaces.add_value( + "toolCategory", self.getProperty("ccm:toolCategory", response) + ) + return valuespaces diff --git a/etl/converter/spiders/geogebra_spider.py b/etl/converter/spiders/geogebra_spider.py index af5e4f26..be25abcc 100644 --- a/etl/converter/spiders/geogebra_spider.py +++ b/etl/converter/spiders/geogebra_spider.py @@ -3,106 +3,114 @@ from converter.constants import Constants from datetime import datetime from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import time # spider for GeoGebra class GeoGebraSpider(CrawlSpider, LomBase, JSONBase): - name='geogebra_spider' - friendlyName = 'GeoGebra' - url = 'https://www.geogebra.org' - version = '0.1' - start_urls = [ - 'https://www.geogebra.org/m-sitemap-1.xml', - 'https://www.geogebra.org/m-sitemap-2.xml', - 'https://www.geogebra.org/m-sitemap-3.xml', - 'https://www.geogebra.org/m-sitemap-4.xml', - 'https://www.geogebra.org/m-sitemap-5.xml', - 'https://www.geogebra.org/m-sitemap-6.xml', - 'https://www.geogebra.org/m-sitemap-7.xml', - 'https://www.geogebra.org/m-sitemap-8.xml', - 'https://www.geogebra.org/m-sitemap-9.xml', - 'https://www.geogebra.org/m-sitemap-10.xml', - 'https://www.geogebra.org/m-sitemap-11.xml', - ] + name = "geogebra_spider" + friendlyName = "GeoGebra" + url = "https://www.geogebra.org" + version = "0.1" + start_urls = [ + "https://www.geogebra.org/m-sitemap-1.xml", + "https://www.geogebra.org/m-sitemap-2.xml", + "https://www.geogebra.org/m-sitemap-3.xml", + "https://www.geogebra.org/m-sitemap-4.xml", + "https://www.geogebra.org/m-sitemap-5.xml", + "https://www.geogebra.org/m-sitemap-6.xml", + "https://www.geogebra.org/m-sitemap-7.xml", + "https://www.geogebra.org/m-sitemap-8.xml", + "https://www.geogebra.org/m-sitemap-9.xml", + "https://www.geogebra.org/m-sitemap-10.xml", + "https://www.geogebra.org/m-sitemap-11.xml", + ] - apiUrl = 'https://api.geogebra.org/v1.0/materials/%id?scope=extended&embed=creator,tags,topics' + apiUrl = "https://api.geogebra.org/v1.0/materials/%id?scope=extended&embed=creator,tags,topics" - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) - def get(self, *params, response): - data = json.loads(response.body_as_unicode()) - return JSONBase.get(self, *params, json = data) + def get(self, *params, response): + data = json.loads(response.body_as_unicode()) + return JSONBase.get(self, *params, json=data) - def parse(self, response): - i = 0 - for url in response.xpath('//*[name()="url"]/*[name()="loc"]//text()').getall(): - split = url.split('/') - id = split[len(split)-1] - apiCall = self.apiUrl.replace('%id', id) - yield scrapy.Request(url = apiCall, callback = self.parseEntry, meta = {'url': url}) - i += 1 - - def parseEntry(self, response): - if self.get('language', response = response) == 'de': - return LomBase.parse(self, response) - logging.info('Skpping entry with language ' + self.get('language', response = response)) - return None - - def getId(self, response): - return self.get('id', response = response) + def parse(self, response): + i = 0 + for url in response.xpath('//*[name()="url"]/*[name()="loc"]//text()').getall(): + split = url.split("/") + id = split[len(split) - 1] + apiCall = self.apiUrl.replace("%id", id) + yield scrapy.Request( + url=apiCall, callback=self.parseEntry, meta={"url": url} + ) + i += 1 - def getHash(self, response): - return self.version + str(self.get('date_modified', response = response)) + def parseEntry(self, response): + if self.get("language", response=response) == "de": + return LomBase.parse(self, response) + logging.info( + "Skpping entry with language " + self.get("language", response=response) + ) + return None - def getBase(self, response): - base = LomBase.getBase(self, response) - #print(response.url) - #print(self.get('thumbUrl', response = response)) - #print(self.get('thumbUrl', response = response).replace('$1', '@l')) - base.add_value('thumbnail', str(self.get('thumbUrl', response = response)).replace('$1', '@l')) - base.add_value('lastModified', self.get('date_modified', response = response)) - return base + def getId(self, response): + return self.get("id", response=response) - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('identifier', self.get('id', response = response)) - general.add_value('title', self.get('title', response = response)) - general.add_value('keyword', self.get('keywords', response = response)) - general.add_value('language', self.get('language', response = response)) - general.add_value('description', self.get('description', response = response)) - return general - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value('discipline', self.get('topics', response = response)) - t = self.get('type', response = response) - if t == 'ws': - valuespaces.add_value('learningResourceType', 'worksheet') - if t == 'book': - valuespaces.add_value('learningResourceType', 'text') - c = self.get('categories', response = response) - if 'game' in c: - valuespaces.add_value('learningResourceType', 'educational_game') - if 'practice' in c: - valuespaces.add_value('learningResourceType', 'drill_and_practice') - return valuespaces + def getHash(self, response): + return self.version + str(self.get("date_modified", response=response)) - def getLOMEducational(self, response): - educational = LomBase.getLOMEducational(self, response) - #educational.add_value('typicalLearningTime', self.get('timeRequired')) - return educational + def getBase(self, response): + base = LomBase.getBase(self, response) + # print(response.url) + # print(self.get('thumbUrl', response = response)) + # print(self.get('thumbUrl', response = response).replace('$1', '@l')) + base.add_value( + "thumbnail", + str(self.get("thumbUrl", response=response)).replace("$1", "@l"), + ) + base.add_value("lastModified", self.get("date_modified", response=response)) + return base - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('url', Constants.LICENSE_CC_BY_SA_30) - return license + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value("identifier", self.get("id", response=response)) + general.add_value("title", self.get("title", response=response)) + general.add_value("keyword", self.get("keywords", response=response)) + general.add_value("language", self.get("language", response=response)) + general.add_value("description", self.get("description", response=response)) + return general - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', self.get('fileFormat', response = response)) - technical.add_value('size', self.get('ContentSize', response = response)) - technical.add_value('location', response.meta['url']) - return technical + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value("discipline", self.get("topics", response=response)) + t = self.get("type", response=response) + if t == "ws": + valuespaces.add_value("learningResourceType", "worksheet") + if t == "book": + valuespaces.add_value("learningResourceType", "text") + c = self.get("categories", response=response) + if "game" in c: + valuespaces.add_value("learningResourceType", "educational_game") + if "practice" in c: + valuespaces.add_value("learningResourceType", "drill_and_practice") + return valuespaces + + def getLOMEducational(self, response): + educational = LomBase.getLOMEducational(self, response) + # educational.add_value('typicalLearningTime', self.get('timeRequired')) + return educational + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", Constants.LICENSE_CC_BY_SA_30) + return license + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("format", self.get("fileFormat", response=response)) + technical.add_value("size", self.get("ContentSize", response=response)) + technical.add_value("location", response.meta["url"]) + return technical diff --git a/etl/converter/spiders/irights_spider.py b/etl/converter/spiders/irights_spider.py index ba3f80ba..e480b3e7 100644 --- a/etl/converter/spiders/irights_spider.py +++ b/etl/converter/spiders/irights_spider.py @@ -2,45 +2,47 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.rss_base import RSSBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.rss_base import RSSBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import Constants; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants # Spider to fetch RSS from planet schule class IRightsSpider(RSSBase): - name='irights_spider' - friendlyName='iRights.info' - start_urls = ['https://irights.info/feed'] - version = '0.1.0' + name = "irights_spider" + friendlyName = "iRights.info" + start_urls = ["https://irights.info/feed"] + version = "0.1.0" - def __init__(self, **kwargs): - RSSBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + RSSBase.__init__(self, **kwargs) - def getLOMGeneral(self, response): - general = RSSBase.getLOMGeneral(self, response) - general.add_value('keyword', response.meta['item'].xpath('category//text()').getall()) - return general + def getLOMGeneral(self, response): + general = RSSBase.getLOMGeneral(self, response) + general.add_value( + "keyword", response.meta["item"].xpath("category//text()").getall() + ) + return general - def getLOMLifecycle(self, response): - lifecycle = RSSBase.getLOMLifecycle(self, response) - name = response.meta['item'].xpath('creator//text()').get().split(' ') - lifecycle.add_value('role', 'author') - lifecycle.add_value('firstName', name[0]) - del name[0] - lifecycle.add_value('lastName', ' '.join(name)) - return lifecycle + def getLOMLifecycle(self, response): + lifecycle = RSSBase.getLOMLifecycle(self, response) + name = response.meta["item"].xpath("creator//text()").get().split(" ") + lifecycle.add_value("role", "author") + lifecycle.add_value("firstName", name[0]) + del name[0] + lifecycle.add_value("lastName", " ".join(name)) + return lifecycle - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value('educationalContext', 'sekundarstufe_2') - valuespaces.add_value('educationalContext', 'berufliche_bildung') - valuespaces.add_value('educationalContext', 'erwachsenenbildung') - valuespaces.add_value('discipline', '700') # Wirtschaftskunde - valuespaces.add_value('discipline', '48005') # Gesellschaftskunde - return valuespaces \ No newline at end of file + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value("educationalContext", "sekundarstufe_2") + valuespaces.add_value("educationalContext", "berufliche_bildung") + valuespaces.add_value("educationalContext", "erwachsenenbildung") + valuespaces.add_value("discipline", "700") # Wirtschaftskunde + valuespaces.add_value("discipline", "48005") # Gesellschaftskunde + return valuespaces diff --git a/etl/converter/spiders/json_base.py b/etl/converter/spiders/json_base.py index 07a302b8..6333de56 100644 --- a/etl/converter/spiders/json_base.py +++ b/etl/converter/spiders/json_base.py @@ -2,24 +2,24 @@ from converter.items import * from datetime import datetime from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; +from converter.spiders.lom_base import LomBase import time # base for spiders using local 'json' data and need to access them class JSONBase: - json = None + json = None - def get(self, *params,mode = 'first', json = None): - if json == None: - json = self.json - - for param in params: - value=json - for key in param.split('.'): - if value: - value=value.get(key) - else: - return None - if value != None: - return value - return None \ No newline at end of file + def get(self, *params, mode="first", json=None): + if json == None: + json = self.json + + for param in params: + value = json + for key in param.split("."): + if value: + value = value.get(key) + else: + return None + if value != None: + return value + return None diff --git a/etl/converter/spiders/leifi_spider.py b/etl/converter/spiders/leifi_spider.py index 2c8a5e2d..1a2bbf23 100644 --- a/etl/converter/spiders/leifi_spider.py +++ b/etl/converter/spiders/leifi_spider.py @@ -1,87 +1,111 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.valuespace_helper import Valuespaces; +from converter.spiders.lom_base import LomBase +from converter.valuespace_helper import Valuespaces import requests from html.parser import HTMLParser -from converter.constants import Constants; +from converter.constants import Constants # LEIFIphysik spider for xml data file class LeifiSpider(scrapy.Spider, LomBase): - name='leifi_spider' - friendlyName = 'LEIFIphysik' - url = 'https://www.leifiphysik.de/' - # rssUrl = 'http://localhost/sources/leifi_feed_rss.xml' - rssUrl = 'https://www.leifiphysik.de/sites/default/files/elixier/leifi_feed_rss.xml' + name = "leifi_spider" + friendlyName = "LEIFIphysik" + url = "https://www.leifiphysik.de/" + #rssUrl = "http://localhost/sources/leifi_feed_rss.xml" + rssUrl = "https://www.leifiphysik.de/sites/default/files/elixier/leifi_feed_rss.xml" - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) - self.valuespacesMapping = Valuespaces() + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + self.valuespacesMapping = Valuespaces() - def getUri(self, response): - return response.meta['item'].xpath('url_datensatz//text()').get() + def getUri(self, response): + return response.meta["item"].xpath("url_datensatz//text()").get() - def start_requests(self): - yield scrapy.Request(url=self.rssUrl, callback=self.parseList) - def parseList(self, response): - ids = [] - for item in response.xpath('//elixier/datensatz'): - id = item.xpath('id_local//text()').get() - if not id in ids: - ids.append(id) - copyResponse = response.copy() - copyResponse.meta['item'] = item - yield self.parse(copyResponse) + def start_requests(self): + yield scrapy.Request(url=self.rssUrl, callback=self.parseList) - def parse(self, response): - return LomBase.parse(self, response) + def parseList(self, response): + ids = [] + for item in response.xpath("//elixier/datensatz"): + id = item.xpath("id_local//text()").get() + if not id in ids: + ids.append(id) + copyResponse = response.copy() + copyResponse.meta["item"] = item + yield self.parse(copyResponse) - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - text = response.meta['item'].xpath('systematikpfad//text()').get() - for entry in self.valuespacesMapping.data['discipline']: - if entry['prefLabel']['de'].casefold() in text.casefold(): - valuespaces.add_value('discipline',entry['id']) - return valuespaces + def parse(self, response): + return LomBase.parse(self, response) - def mapResponse(self, response): - r = ResponseItemLoader() - r.add_value('url', self.getUri(response)) - r.add_value('text', requests.get(response.meta['item'].xpath('url_datensatz//text()').get()).content.decode('UTF-8')) - return r + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + text = response.meta["item"].xpath("systematikpfad//text()").get() + for entry in self.valuespacesMapping.data["discipline"]: + if entry["prefLabel"]["de"].casefold() in text.casefold(): + valuespaces.add_value("discipline", entry["id"]) + return valuespaces - def getId(self, response): - return response.meta['item'].xpath('id_local//text()').get() + def mapResponse(self, response): + r = ResponseItemLoader() + r.add_value("url", self.getUri(response)) + r.add_value( + "text", + requests.get( + response.meta["item"].xpath("url_datensatz//text()").get() + ).content.decode("UTF-8"), + ) + return r - def getHash(self, response): - return response.meta['item'].xpath('letzte_aenderung//text()').get() + def getId(self, response): + return response.meta["item"].xpath("id_local//text()").get() - def getBase(self, response): - base = LomBase.getBase(self, response) - base.add_value('lastModified', response.meta['item'].xpath('letzte_aenderung//text()').get()) - return base + def getHash(self, response): + return response.meta["item"].xpath("letzte_aenderung//text()").get() - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('title', HTMLParser().unescape(response.meta['item'].xpath('titel//text()').get())) - general.add_value('language', response.meta['item'].xpath('sprache//text()').get()) - general.add_value('keyword', HTMLParser().unescape(response.meta['item'].xpath('schlagwort//text()').get()).split('; ')) - desc = response.meta['item'].xpath('beschreibung//text()').get().strip() - # dirty cleaning of invalid descriptions - # not perfect yet, these objects also appear inside the content - if not desc.startswith('swiffyobject_'): - general.add_value('description', HTMLParser().unescape(desc)) - return general + def getBase(self, response): + base = LomBase.getBase(self, response) + base.add_value( + "lastModified", + response.meta["item"].xpath("letzte_aenderung//text()").get(), + ) + return base - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', 'text/html') - technical.add_value('location', response.meta['item'].xpath('url_datensatz//text()').get()) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - if response.meta['item'].xpath('rechte//text()').get() == 'Keine Angabe, es gilt die gesetzliche Regelung': - license.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) - return license + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value( + "title", + HTMLParser().unescape(response.meta["item"].xpath("titel//text()").get()), + ) + general.add_value( + "language", response.meta["item"].xpath("sprache//text()").get() + ) + general.add_value( + "keyword", + HTMLParser() + .unescape(response.meta["item"].xpath("schlagwort//text()").get()) + .split("; "), + ) + desc = response.meta["item"].xpath("beschreibung//text()").get().strip() + # dirty cleaning of invalid descriptions + # not perfect yet, these objects also appear inside the content + if not desc.startswith("swiffyobject_"): + general.add_value("description", HTMLParser().unescape(desc)) + return general + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("format", "text/html") + technical.add_value( + "location", response.meta["item"].xpath("url_datensatz//text()").get() + ) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + if ( + response.meta["item"].xpath("rechte//text()").get() + == "Keine Angabe, es gilt die gesetzliche Regelung" + ): + license.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) + return license diff --git a/etl/converter/spiders/lom_base.py b/etl/converter/spiders/lom_base.py index 88f81da7..b28c6cea 100644 --- a/etl/converter/spiders/lom_base.py +++ b/etl/converter/spiders/lom_base.py @@ -12,160 +12,177 @@ class LomBase: - friendlyName = 'LOM Based spider' - ranking = 1 - version = '1.0' # you can override this locally and use it for your getHash() function - - uuid = None - remoteId = None - forceUpdate = False - def __init__(self, **kwargs): - if 'uuid' in kwargs: - self.uuid = kwargs['uuid'] - if 'remoteId' in kwargs: - self.remoteId = kwargs['remoteId'] - if 'cleanrun' in kwargs and kwargs['cleanrun'] == 'true': - logging.info('cleanrun requested, will force update for crawler ' + self.name) - #EduSharing().deleteAll(self) - self.forceUpdate = True - if 'resetVersion' in kwargs and kwargs['resetVersion'] == 'true': - logging.info('resetVersion requested, will force update + reset versions for crawler ' + self.name) - #EduSharing().deleteAll(self) - EduSharing.resetVersion = True - self.forceUpdate = True - - - # override to improve performance and automatically handling id - def getId(self, response = None) -> str: - return None - # override to improve performance and automatically handling hash - def getHash(self, response = None) -> str: - return None - - # return the unique uri for the entry - def getUri(self, response = None) -> str: - return response.url - - def getUUID(self, response = None) -> str: - return EduSharing().buildUUID(self.getUri(response)) - - def hasChanged(self, response = None) -> bool: - if self.forceUpdate: - return True - if self.uuid: - if self.getUUID(response) == self.uuid: - logging.info('matching requested id: ' + self.uuid) - return True - return False - if self.remoteId: - if self.getId(response) == self.remoteId: - logging.info('matching requested id: ' + self.remoteId) - return True - return False - db = EduSharing().findItem(self.getId(response), self) - changed = db == None or db[1] != self.getHash(response) - if not changed: - logging.info('Item ' + db[0] + ' has not changed') - return changed - - # you might override this method if you don't want to import specific entries - def shouldImport(self, response = None) -> bool: - return True - - def parse(self, response): - if self.shouldImport(response) == False: - logging.info('Skipping entry ' + str(self.getId(response)) + ' because shouldImport() returned false') - return None - if self.getId(response) != None and self.getHash(response) != None: - if not self.hasChanged(response): + friendlyName = "LOM Based spider" + ranking = 1 + version = ( + "1.0" # you can override this locally and use it for your getHash() function + ) + + uuid = None + remoteId = None + forceUpdate = False + + def __init__(self, **kwargs): + if "uuid" in kwargs: + self.uuid = kwargs["uuid"] + if "remoteId" in kwargs: + self.remoteId = kwargs["remoteId"] + if "cleanrun" in kwargs and kwargs["cleanrun"] == "true": + logging.info( + "cleanrun requested, will force update for crawler " + self.name + ) + # EduSharing().deleteAll(self) + self.forceUpdate = True + if "resetVersion" in kwargs and kwargs["resetVersion"] == "true": + logging.info( + "resetVersion requested, will force update + reset versions for crawler " + + self.name + ) + # EduSharing().deleteAll(self) + EduSharing.resetVersion = True + self.forceUpdate = True + + # override to improve performance and automatically handling id + def getId(self, response=None) -> str: + return None + + # override to improve performance and automatically handling hash + def getHash(self, response=None) -> str: return None - # Avoid stressing the servers across calls of this method. - settings = get_project_settings() - if 'PARSE_DELAY' in settings and float(settings.get('PARSE_DELAY')) > 0: - time.sleep(float(settings.get('PARSE_DELAY'))) - - main = self.getBase(response) - main.add_value('lom', self.getLOM(response).load_item()) - main.add_value('valuespaces', self.getValuespaces(response).load_item()) - main.add_value('license', self.getLicense(response).load_item()) - main.add_value('permissions', self.getPermissions(response).load_item()) - logging.debug(main.load_item()) - main.add_value('response', self.mapResponse(response).load_item()) - return main.load_item() - - def html2Text(self, html): - h = html2text.HTML2Text() - h.ignore_links = True - h.ignore_images = True - return h.handle(html) - - def getUrlData(self, url): - settings = get_project_settings() - html = requests.post(settings.get('SPLASH_URL')+'/render.html', json={ - 'url': url, - 'wait': settings.get('SPLASH_WAIT'), - 'headers': settings.get('SPLASH_HEADERS') - }).content.decode('UTF-8') - return { - 'html': html, - 'text': self.html2Text(html) - } - def mapResponse(self, response, fetchData = True): - r = ResponseItemLoader(response = response) - r.add_value('status',response.status) - #r.add_value('body',response.body.decode('utf-8')) - - # render via splash to also get the full javascript rendered content. - if fetchData: - data = self.getUrlData(response.url) - r.add_value('html',data['html']) - r.add_value('text',data['text']) - r.add_value('headers',response.headers) - r.add_value('url',self.getUri(response)) - return r - - def getValuespaces(self, response): - return ValuespaceItemLoader(response = response) - - def getLOM(self, response): - lom = LomBaseItemloader(response = response) - lom.add_value('general', self.getLOMGeneral(response).load_item()) - lom.add_value('lifecycle', self.getLOMLifecycle(response).load_item()) - lom.add_value('technical', self.getLOMTechnical(response).load_item()) - lom.add_value('educational', self.getLOMEducational(response).load_item()) - lom.add_value('classification', self.getLOMClassification(response).load_item()) - return lom - - def getBase(self, response = None): - base = BaseItemLoader() - base.add_value('sourceId', self.getId(response)) - base.add_value('hash', self.getHash(response)) - # we assume that content is imported. Please use replace_value if you import something different - base.add_value('type', Constants.TYPE_MATERIAL) - return base - - def getLOMGeneral(self, response = None): - return LomGeneralItemloader(response = response) - - def getLOMLifecycle(self, response = None): - return LomLifecycleItemloader(response = response) - - def getLOMTechnical(self, response = None): - return LomTechnicalItemLoader(response = response) - - def getLOMEducational(self, response = None): - return LomEducationalItemLoader(response = response) - - def getLicense(self, response = None): - return LicenseItemLoader(response = response) - - def getLOMClassification(self, response = None): - return LomClassificationItemLoader(response = response) - - def getPermissions(self, response = None): - permissions = PermissionItemLoader(response = response) - # default all materials to public, needs to be changed depending on the spider! - settings = get_project_settings() - permissions.add_value('public', settings.get('DEFAULT_PUBLIC_STATE')) - return permissions \ No newline at end of file + # return the unique uri for the entry + def getUri(self, response=None) -> str: + return response.url + + def getUUID(self, response=None) -> str: + return EduSharing().buildUUID(self.getUri(response)) + + def hasChanged(self, response=None) -> bool: + if self.forceUpdate: + return True + if self.uuid: + if self.getUUID(response) == self.uuid: + logging.info("matching requested id: " + self.uuid) + return True + return False + if self.remoteId: + if self.getId(response) == self.remoteId: + logging.info("matching requested id: " + self.remoteId) + return True + return False + db = EduSharing().findItem(self.getId(response), self) + changed = db == None or db[1] != self.getHash(response) + if not changed: + logging.info("Item " + db[0] + " has not changed") + return changed + + # you might override this method if you don't want to import specific entries + def shouldImport(self, response=None) -> bool: + return True + + def parse(self, response): + if self.shouldImport(response) == False: + logging.info( + "Skipping entry " + + str(self.getId(response)) + + " because shouldImport() returned false" + ) + return None + if self.getId(response) != None and self.getHash(response) != None: + if not self.hasChanged(response): + return None + + # Avoid stressing the servers across calls of this method. + settings = get_project_settings() + if "PARSE_DELAY" in settings and float(settings.get('PARSE_DELAY')) > 0: + time.sleep(float(settings.get("PARSE_DELAY"))) + + main = self.getBase(response) + main.add_value("lom", self.getLOM(response).load_item()) + main.add_value("valuespaces", self.getValuespaces(response).load_item()) + main.add_value("license", self.getLicense(response).load_item()) + main.add_value("permissions", self.getPermissions(response).load_item()) + logging.debug(main.load_item()) + main.add_value("response", self.mapResponse(response).load_item()) + return main.load_item() + + def html2Text(self, html): + h = html2text.HTML2Text() + h.ignore_links = True + h.ignore_images = True + return h.handle(html) + + def getUrlData(self, url): + settings = get_project_settings() + html = None + if settings.get("SPLASH_URL"): + html = requests.post( + settings.get("SPLASH_URL") + "/render.html", + json={ + "url": url, + "wait": settings.get("SPLASH_WAIT"), + "headers": settings.get("SPLASH_HEADERS"), + }, + ).content.decode("UTF-8") + return {"html": html, "text": self.html2Text(html)} + else: + return {"html": None, "text": None} + + def mapResponse(self, response, fetchData=True): + r = ResponseItemLoader(response=response) + r.add_value("status", response.status) + # r.add_value('body',response.body.decode('utf-8')) + + # render via splash to also get the full javascript rendered content. + if fetchData: + data = self.getUrlData(response.url) + r.add_value("html", data["html"]) + r.add_value("text", data["text"]) + r.add_value("headers", response.headers) + r.add_value("url", self.getUri(response)) + return r + + def getValuespaces(self, response): + return ValuespaceItemLoader(response=response) + + def getLOM(self, response) -> LomBaseItemloader: + lom = LomBaseItemloader(response=response) + lom.add_value("general", self.getLOMGeneral(response).load_item()) + lom.add_value("lifecycle", self.getLOMLifecycle(response).load_item()) + lom.add_value("technical", self.getLOMTechnical(response).load_item()) + lom.add_value("educational", self.getLOMEducational(response).load_item()) + lom.add_value("classification", self.getLOMClassification(response).load_item()) + return lom + + def getBase(self, response=None) -> BaseItemLoader: + base = BaseItemLoader() + base.add_value("sourceId", self.getId(response)) + base.add_value("hash", self.getHash(response)) + # we assume that content is imported. Please use replace_value if you import something different + base.add_value("type", Constants.TYPE_MATERIAL) + return base + + def getLOMGeneral(self, response=None) -> LomGeneralItemloader: + return LomGeneralItemloader(response=response) + + def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: + return LomLifecycleItemloader(response=response) + + def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: + return LomTechnicalItemLoader(response=response) + + def getLOMEducational(self, response=None) -> LomEducationalItemLoader: + return LomEducationalItemLoader(response=response) + + def getLicense(self, response=None) -> LicenseItemLoader: + return LicenseItemLoader(response=response) + + def getLOMClassification(self, response=None) -> LomClassificationItemLoader: + return LomClassificationItemLoader(response=response) + + def getPermissions(self, response=None) -> PermissionItemLoader: + permissions = PermissionItemLoader(response=response) + # default all materials to public, needs to be changed depending on the spider! + settings = get_project_settings() + permissions.add_value("public", settings.get("DEFAULT_PUBLIC_STATE")) + return permissions diff --git a/etl/converter/spiders/lrmi_base.py b/etl/converter/spiders/lrmi_base.py index 8b5be6db..8e6644a5 100644 --- a/etl/converter/spiders/lrmi_base.py +++ b/etl/converter/spiders/lrmi_base.py @@ -1,88 +1,107 @@ from converter.items import * from datetime import datetime from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import time import logging from html.parser import HTMLParser # base spider mapping data via LRMI inside the html pages -# Please override the lrmi_path if necessary and add your sitemap_urls +# Please override the lrmi_path if necessary and add your sitemap_urls class LrmiBase(LomBase, JSONBase): - friendlyName = 'LRMI-Header Based spider' - lrmi_path = '//script[@type="application/ld+json"]//text()' - sitemap_urls = [] + friendlyName = "LRMI-Header Based spider" + lrmi_path = '//script[@type="application/ld+json"]//text()' + sitemap_urls = [] - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) - def getLRMI(self, *params, response): - try: - lrmi = list(map(lambda x: json.loads(x.replace('\r','').replace('\n',' ')),response.xpath(self.lrmi_path).getall())) - except: - logging.warning('failed parsing lrmi at ' + response.url + ', please check source') - return None - for l in lrmi: - value = JSONBase.get(self, *params, json = l) - if value != None: - return HTMLParser().unescape(value) - return None - - def parse(self, response): - return LomBase.parse(self, response) + def getLRMI(self, *params, response): + try: + lrmi = list( + map( + lambda x: json.loads(x.replace("\r", "").replace("\n", " ")), + response.xpath(self.lrmi_path).getall(), + ) + ) + except: + logging.warning( + "failed parsing lrmi at " + response.url + ", please check source" + ) + return None + for l in lrmi: + value = JSONBase.get(self, *params, json=l) + if value != None: + return HTMLParser().unescape(value) + return None + def parse(self, response): + return LomBase.parse(self, response) - def getId(self, response): - return self.getLRMI('identifier','url','name', response = response) + def getId(self, response): + return self.getLRMI("identifier", "url", "name", response=response) - def getHash(self, response): - if self.get('version') != None: - return self.getLRMI('version', response = response) - return time.time() + def getHash(self, response): + if self.get("version") != None: + return self.getLRMI("version", response=response) + return time.time() - def getBase(self, response): - base = LomBase.getBase(self, response) - base.add_value('thumbnail', self.getLRMI('thumbnailUrl', response = response)) - base.add_value('lastModified', self.getLRMI('dateModified', 'datePublished', response = response)) - return base + def getBase(self, response): + base = LomBase.getBase(self, response) + base.add_value("thumbnail", self.getLRMI("thumbnailUrl", response=response)) + base.add_value( + "lastModified", + self.getLRMI("dateModified", "datePublished", response=response), + ) + return base - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('identifier', self.getLRMI('identifier', response = response)) - general.add_value('title', self.getLRMI('name', response = response)) - general.add_value('keyword', self.getLRMI('keywords', response = response)) - general.add_value('language', self.getLRMI('inLanguage', response = response)) - general.add_value('description', self.getLRMI('description','about', response = response)) - return general - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value('intendedEndUserRole', self.getLRMI('audience.educationalRole', response = response)) - return valuespaces + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value("identifier", self.getLRMI("identifier", response=response)) + general.add_value("title", self.getLRMI("name", response=response)) + general.add_value("keyword", self.getLRMI("keywords", response=response)) + general.add_value("language", self.getLRMI("inLanguage", response=response)) + general.add_value( + "description", self.getLRMI("description", "about", response=response) + ) + return general - def getLOMEducational(self, response): - educational = LomBase.getLOMEducational(self, response) - educational.add_value('typicalLearningTime', self.getLRMI('timeRequired', response = response)) - return educational + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value( + "intendedEndUserRole", + self.getLRMI("audience.educationalRole", response=response), + ) + return valuespaces - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value('learningResourceType', self.getLRMI('learningResourceType', response = response)) - return valuespaces + def getLOMEducational(self, response): + educational = LomBase.getLOMEducational(self, response) + educational.add_value( + "typicalLearningTime", self.getLRMI("timeRequired", response=response) + ) + return educational - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('url', self.getLRMI('license', response = response)) - return license + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value( + "learningResourceType", + self.getLRMI("learningResourceType", response=response), + ) + return valuespaces - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', self.getLRMI('fileFormat', response = response)) - technical.add_value('size', self.getLRMI('ContentSize', response = response)) - url = self.getLRMI('url', response = response) - if not url: - url = response.url - technical.add_value('location', url) - return technical + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", self.getLRMI("license", response=response)) + return license + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("format", self.getLRMI("fileFormat", response=response)) + technical.add_value("size", self.getLRMI("ContentSize", response=response)) + url = self.getLRMI("url", response=response) + if not url: + url = response.url + technical.add_value("location", url) + return technical diff --git a/etl/converter/spiders/mediothek_pixiothek_spider.py b/etl/converter/spiders/mediothek_pixiothek_spider.py index 9cfd5c1c..de1641fb 100644 --- a/etl/converter/spiders/mediothek_pixiothek_spider.py +++ b/etl/converter/spiders/mediothek_pixiothek_spider.py @@ -4,9 +4,8 @@ from scrapy.spiders import CrawlSpider from converter.items import * -from converter.offline_mode.mediothek_pixiothek_spider_offline import encode_url_for_local from converter.spiders.lom_base import LomBase -from converter.constants import *; +from converter.constants import * class MediothekPixiothekSpider(CrawlSpider, LomBase): @@ -15,12 +14,14 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): Author: Timur Yure, timur.yure@capgemini.com , Capgemini for Schul-Cloud, Content team. """ - name = 'mediothek_pixiothek_spider' - url = 'https://www.schulportal-thueringen.de/' # the url which will be linked as the primary link to your source (should be the main url of your site) - friendlyName = 'MediothekPixiothek' # name as shown in the search ui - version = '0.1' # the version of your crawler, used to identify if a reimport is necessary - # start_urls = ['https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] - start_urls = ['http://localhost:8080/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei'] + + name = "mediothek_pixiothek_spider" + url = "https://www.schulportal-thueringen.de/" # the url which will be linked as the primary link to your source (should be the main url of your site) + friendlyName = "MediothekPixiothek" # name as shown in the search ui + version = "0.1" # the version of your crawler, used to identify if a reimport is necessary + start_urls = [ + "https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" + ] def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -32,13 +33,14 @@ def parse(self, response: scrapy.http.Response): response.meta["rendered_data"] = data elements = json.loads(response.body_as_unicode()) - grouped_elements = self.group_elements(elements) + # grouped_elements = self.group_elements_by_medium_id(elements) + grouped_elements = self.group_elements_by_sammlung(elements) for i, element in enumerate(grouped_elements): copyResponse = response.copy() # Passing the dictionary for easier access to attributes. - copyResponse.meta['item'] = element + copyResponse.meta["item"] = element # In case JSON string representation is preferred: json_str = json.dumps(element, indent=4, sort_keys=True, ensure_ascii=False) @@ -51,7 +53,7 @@ def parse(self, response: scrapy.http.Response): # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. LomBase.parse(self, copyResponse) - def group_elements(self, elements): + def group_elements_by_medium_id(self, elements): """ This method groups the corresponding elements based on their mediumId. This changes the logic so that every element in the end maps to an educational element in the https://www.schulportal-thueringen.de. @@ -88,6 +90,43 @@ def group_elements(self, elements): return grouped_elements + def group_elements_by_sammlung(self, elements): + """ + In this method we identify elements that have a keyword (Stichwort) ending in "collection" (sammlung). + These elements are parents of other elements that have a serienTitel same as the einzeltitel of these collection + items. Then, we remove these children from the elements and we only have collections or single items, not part + of any collection. + """ + + # Step 1 - Identify collection elements + collections_elements = set() + for idx, element in enumerate(elements): + keywords = element["listeStichwort"] + element_collections_keywords = set() + for keyword in keywords: + if keyword.endswith("sammlung"): + element_collections_keywords.add(keyword) + break + if len(element_collections_keywords) > 0: + collections_elements.add(idx) + + # Step 2 - Get a dictionary of "Einzeltitel" --> element index, for the collection elements. + # collections_einzeltitel = {elements[idx]["einzeltitel"]: idx for idx in collections_elements} + collections_einzeltitel = {} + for idx in collections_elements: + collection_einzeltitel = elements[idx]["einzeltitel"] + if collection_einzeltitel not in collections_einzeltitel: + collections_einzeltitel[collection_einzeltitel] = list() + collections_einzeltitel[collection_einzeltitel].append(elements[idx]) + # if "serientitel" in elements[idx]: + # collections_einzeltitel[collection_einzeltitel].append(elements[idx]["serientitel"]) + # else: + # collections_einzeltitel[collection_einzeltitel].append(None) + print("hi") + + + + def get_or_default(self, element, attribute, default_value=""): if attribute in element: return element[attribute] @@ -110,10 +149,10 @@ def getHash(self, response): return id + pts def mapResponse(self, response): - r = ResponseItemLoader(response = response) - r.add_value('status',response.status) - r.add_value('headers',response.headers) - r.add_value('url', self.getUri(response)) + r = ResponseItemLoader(response=response) + r.add_value("status", response.status) + r.add_value("headers", response.headers) + r.add_value("url", self.getUri(response)) return r def handleEntry(self, response): @@ -127,14 +166,7 @@ def getBase(self, response): # TODO: "For licensing reasons, this content is only available to users registered in the Thuringian school # portal." - # base.add_value('thumbnail', element_dict['previewImageUrl']) - - # TODO: Remove this. This is only for a local execution of Mediothek to check whether Edu-Sharing has issues. - thumbnail = element_dict['previewImageUrl'] - thumbnail = thumbnail.replace("https://www.schulportal-thueringen.de/", "http://localhost:8080/thumbnails/") - # Fix the encoding - thumbnail = encode_url_for_local(thumbnail) - base.add_value('thumbnail', thumbnail) + base.add_value("thumbnail", element_dict["previewImageUrl"]) return base @@ -146,14 +178,16 @@ def getLOMGeneral(self, response): # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel - general.add_value('title', element_dict["titel"]) + general.add_value("title", element_dict["titel"]) # self._if_exists_add(general, element_dict, "description", "kurzinhalt") if "kurzinhalt" in element_dict: - general.add_value('description', element_dict["kurzinhalt"]) + general.add_value("description", element_dict["kurzinhalt"]) - liste_stichwort = element_dict["listeStichwort"] if "listeStichwort" in element_dict else None + liste_stichwort = ( + element_dict["listeStichwort"] if "listeStichwort" in element_dict else None + ) if liste_stichwort is not None and len(liste_stichwort) > 0: - general.add_value('keyword', liste_stichwort) + general.add_value("keyword", liste_stichwort) return general @@ -161,7 +195,7 @@ def getUri(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - return element_dict['downloadUrl'] + return element_dict["downloadUrl"] def getLicense(self, response): license = LomBase.getLicense(self, response) @@ -170,18 +204,18 @@ def getLicense(self, response): element_dict = response.meta["item"] if "oeffentlich" in element_dict and element_dict["oeffentlich"] == "0": # private - license.replace_value('internal', Constants.LICENSE_NONPUBLIC) + license.replace_value("internal", Constants.LICENSE_NONPUBLIC) else: - license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public + license.replace_value("internal", Constants.LICENSE_COPYRIGHT_LAW) # public return license def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', 'text/html') - technical.add_value('location', self.getUri(response)) - technical.add_value('size', len(response.body)) + technical.add_value("format", "text/html") + technical.add_value("location", self.getUri(response)) + technical.add_value("size", len(response.body)) return technical diff --git a/etl/converter/spiders/memucho_spider.py b/etl/converter/spiders/memucho_spider.py index 15317d25..73168eb8 100644 --- a/etl/converter/spiders/memucho_spider.py +++ b/etl/converter/spiders/memucho_spider.py @@ -2,79 +2,106 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import Constants; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants # Spider to fetch RSS from planet schule class MemuchoSpider(CrawlSpider, LomBase, JSONBase): - name='memucho_spider' - friendlyName='memucho' - url = 'https://memucho.de' - start_urls = ['https://memucho.de/api/edusharing/search?pageSize=999999'] - version = '0.1' + name = "memucho_spider" + friendlyName = "memucho" + url = "https://memucho.de" + start_urls = ["https://memucho.de/api/edusharing/search?pageSize=999999"] + version = "0.1" - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + def mapResponse(self, response): + return LomBase.mapResponse(self, response) - def mapResponse(self, response): - return LomBase.mapResponse(self, response) + def getId(self, response): + return response.meta["item"].get("TopicId") - def getId(self, response): - return response.meta['item'].get('TopicId') - def getHash(self, response): - # @TODO: Api currently does not seem to have a hash value - return time.time() + def getHash(self, response): + # @TODO: Api currently does not seem to have a hash value + return time.time() - def parse(self, response): - data = json.loads(response.body_as_unicode()) - - for item in data.get('Items'): - copyResponse = response.copy() - copyResponse.meta['item'] = item - if self.hasChanged(copyResponse): - yield scrapy.Request(url = item.get('ItemUrl'), callback = self.handleLink, meta = {'item': item}) + def parse(self, response): + data = json.loads(response.body_as_unicode()) - def handleLink(self, response): - return LomBase.parse(self, response) + for item in data.get("Items"): + copyResponse = response.copy() + copyResponse.meta["item"] = item + if self.hasChanged(copyResponse): + yield scrapy.Request( + url=item.get("ItemUrl"), + callback=self.handleLink, + meta={"item": item}, + ) - # thumbnail is always the same, do not use the one from rss - def getBase(self, response): - base = LomBase.getBase(self, response) - thumb = response.xpath('//meta[@property="og:image"]//@content').get() - if thumb: - base.add_value('thumbnail', self.url + thumb.replace('_350','_1000')) - # base.add_value('thumbnail', self.url + '/Images/Categories/' + str(self.getId(response)) + '_1000.jpg') - return base + def handleLink(self, response): + return LomBase.parse(self, response) - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('title', response.meta['item'].get('Name').strip()) - general.add_value('keyword', list(filter(lambda x: x,map(lambda x: x.strip(), response.xpath('//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()').getall())))) - description = '\n'.join(list(filter(lambda x: x,map(lambda x: x.strip(), response.xpath('//*[@id="ContentModuleApp"]//*[@content-module-type="inlinetext"]//p//text()').getall())))).strip() - general.add_value('description', description) - return general - + # thumbnail is always the same, do not use the one from rss + def getBase(self, response): + base = LomBase.getBase(self, response) + thumb = response.xpath('//meta[@property="og:image"]//@content').get() + if thumb: + base.add_value("thumbnail", self.url + thumb.replace("_350", "_1000")) + # base.add_value('thumbnail', self.url + '/Images/Categories/' + str(self.getId(response)) + '_1000.jpg') + return base - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', 'text/html') - technical.add_value('location', response.url) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('url', Constants.LICENSE_CC_BY_40) - return license - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - return valuespaces + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value("title", response.meta["item"].get("Name").strip()) + general.add_value( + "keyword", + list( + filter( + lambda x: x, + map( + lambda x: x.strip(), + response.xpath( + '//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()' + ).getall(), + ), + ) + ), + ) + description = "\n".join( + list( + filter( + lambda x: x, + map( + lambda x: x.strip(), + response.xpath( + '//*[@id="ContentModuleApp"]//*[@content-module-type="inlinetext"]//p//text()' + ).getall(), + ), + ) + ) + ).strip() + general.add_value("description", description) + return general + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("format", "text/html") + technical.add_value("location", response.url) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", Constants.LICENSE_CC_BY_40) + return license + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + return valuespaces diff --git a/etl/converter/spiders/merlin_spider.py b/etl/converter/spiders/merlin_spider.py index 14add229..bb6886a7 100644 --- a/etl/converter/spiders/merlin_spider.py +++ b/etl/converter/spiders/merlin_spider.py @@ -1,5 +1,3 @@ -from datetime import datetime - import xmltodict as xmltodict from lxml import etree from scrapy.spiders import CrawlSpider @@ -16,12 +14,12 @@ class MerlinSpider(CrawlSpider, LomBase): Author: Ioannis Koumarelas, ioannis.koumarelas@hpi.de, Schul-Cloud, Content team. """ - name = 'merlin_spider' - domain = 'https://merlin.nibis.de' - url = 'https://merlin.nibis.de/index.php' # the url which will be linked as the primary link to your source (should be the main url of your site) - friendlyName = 'Merlin' # name as shown in the search ui - version = '0.1' # the version of your crawler, used to identify if a reimport is necessary - apiUrl = 'https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*' # * regular expression, to represent all possible values. + + name = "merlin_spider" + url = "https://merlin.nibis.de/index.php" # the url which will be linked as the primary link to your source (should be the main url of your site) + friendlyName = "Merlin" # name as shown in the search ui + version = "0.1" # the version of your crawler, used to identify if a reimport is necessary + apiUrl = "https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*" # * regular expression, to represent all possible values. limit = 100 page = 0 @@ -30,12 +28,13 @@ def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def start_requests(self): - yield scrapy.Request(url=self.apiUrl.replace('%start', str(self.page * self.limit)) - .replace('%anzahl', str(self.limit)), - callback=self.parse, headers={ - 'Accept': 'application/xml', - 'Content-Type': 'application/xml' - }) + yield scrapy.Request( + url=self.apiUrl.replace("%start", str(self.page * self.limit)).replace( + "%anzahl", str(self.limit) + ), + callback=self.parse, + headers={"Accept": "application/xml", "Content-Type": "application/xml"}, + ) def parse(self, response: scrapy.http.Response): print("Parsing URL: " + response.url) @@ -49,61 +48,81 @@ def parse(self, response: scrapy.http.Response): root = etree.XML(response.body) tree = etree.ElementTree(root) + # Get the total number of possible elements + elements_total = int(tree.xpath('/root/sum')[0].text) + # If results are returned. - elements = tree.xpath('/root/items/*') + elements = tree.xpath("/root/items/*") if len(elements) > 0: for element in elements: copyResponse = response.copy() - element_xml_str = etree.tostring(element, pretty_print=True, encoding='unicode') + element_xml_str = etree.tostring( + element, pretty_print=True, encoding="unicode" + ) element_dict = xmltodict.parse(element_xml_str) + try: + # TODO: It's probably a pointless attribute. + # del element_dict["data"]["score"] - # TODO: It's probably a pointless attribute. - #del element_dict["data"]["score"] + # Passing the dictionary for easier access to attributes. + copyResponse.meta["item"] = element_dict["data"] - # Passing the dictionary for easier access to attributes. - copyResponse.meta['item'] = element_dict["data"] + # In case JSON string representation is preferred: + # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) + copyResponse._set_body(element_xml_str) - # In case JSON string representation is preferred: - # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) - copyResponse._set_body(element_xml_str) + if self.hasChanged(copyResponse): + yield self.handleEntry(copyResponse) - if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) + # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. + LomBase.parse(self, copyResponse) + except Exception as e: + print("Issues with the element: " + str(element_dict["id_local"]) if "id_local" in element_dict else "") + print(str(e)) - # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. - LomBase.parse(self, copyResponse) + current_expected_count = (self.page+1) * self.limit # TODO: To not stress the Rest APIs. # time.sleep(0.1) - # If the number of returned results is equal to the imposed limit, it means that there are more to be returned. - if len(elements) == self.limit: + # If we are below the total available numbers continue fetching more pages. + if current_expected_count < elements_total: self.page += 1 - url = self.apiUrl.replace('%start', str(self.page * self.limit)).replace('%anzahl', str(self.limit)) - yield scrapy.Request(url=url, callback=self.parse, headers={ - 'Accept': 'application/xml', - 'Content-Type': 'application/xml' - }) + url = self.apiUrl.replace("%start", str(self.page * self.limit)).replace( + "%anzahl", str(self.limit) + ) + yield scrapy.Request( + url=url, + callback=self.parse, + headers={ + "Accept": "application/xml", + "Content-Type": "application/xml", + }, + ) def getId(self, response): - return response.xpath('/data/id_local/text()').get() + return response.xpath("/data/id_local/text()").get() def getHash(self, response): """ Since we have no 'last_modified' date from the elements we cannot do something better. Therefore, the current implementation takes into account (1) the code version, (2) the item's ID, and (3) the date (day, month, year). """ - return hash(self.version) + hash(self.getId(response)) + self._date_to_integer(datetime.date(datetime.now())) + return ( + hash(self.version) + + hash(self.getId(response)) + # + self._date_to_integer(datetime.date(datetime.now())) + ) - def _date_to_integer(self, dt_time): - """ Converting the date to an integer, so it is useful in the getHash method - Using prime numbers for less collisions. """ - return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day + # def _date_to_integer(self, dt_time): + # """ Converting the date to an integer, so it is useful in the getHash method + # Using prime numbers for less collisions. """ + # return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day def mapResponse(self, response): - r = ResponseItemLoader(response = response) - r.add_value('status',response.status) - r.add_value('headers',response.headers) - r.add_value('url', self.getUri(response)) + r = ResponseItemLoader(response=response) + r.add_value("status", response.status) + r.add_value("headers", response.headers) + r.add_value("url", self.getUri(response)) return r def handleEntry(self, response): @@ -111,23 +130,28 @@ def handleEntry(self, response): def getBase(self, response): base = LomBase.getBase(self, response) - base.add_value('thumbnail', response.xpath('/data/thumbnail/text()').get()) - if response.xpath('/data/srcLogoUrl/text()').get(): - base.add_value('defaultThumbnail', self.domain + response.xpath('/data/srcLogoUrl/text()').get()) - elif response.xpath('/data/logo/text()').get(): - base.add_value('defaultThumbnail', self.domain + response.xpath('/data/logo/text()').get()) + base.add_value("thumbnail", response.xpath("/data/thumbnail/text()").get()) + if response.xpath("/data/srcLogoUrl/text()").get(): + base.add_value("defaultThumbnail", "https://merlin.nibis.de" + response.xpath("/data/srcLogoUrl/text()").get()) + elif response.xpath("/data/logo/text()").get(): + base.add_value("defaultThumbnail", "https://merlin.nibis.de" + response.xpath("/data/logo/text()").get()) + else: # backup thumbnail hard-coded. + base.add_value('defaultThumbnail', 'https://merlin.nibis.de/logos/bs_logos/merlin.png') + return base def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) - general.add_value('title', response.xpath('/data/titel/text()').get()) - general.add_value('description', response.xpath('/data/beschreibung/text()').get()) + general.add_value("title", response.xpath("/data/titel/text()").get()) + general.add_value( + "description", response.xpath("/data/beschreibung/text()").get() + ) return general def getUri(self, response): - location = response.xpath('/data/media_url/text()').get() + location = response.xpath("/data/media_url/text()").get() return "http://merlin.nibis.de" + location def getLicense(self, response): @@ -146,26 +170,26 @@ def getLicense(self, response): def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', 'text/html') - technical.add_value('location', self.getUri(response)) - technical.add_value('size', len(response.body)) + technical.add_value("format", "text/html") + technical.add_value("location", self.getUri(response)) + technical.add_value("size", len(response.body)) return technical def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) - bildungsebene = response.xpath('/data/bildungsebene/text()').get() + bildungsebene = response.xpath("/data/bildungsebene/text()").get() if bildungsebene is not None: - valuespaces.add_value('intendedEndUserRole', bildungsebene.split(';')) + valuespaces.add_value("intendedEndUserRole", bildungsebene.split(";")) # Use the dictionary when it is easier. element_dict = response.meta["item"] - if len(response.xpath('/data/fach/*')) > 0: + if len(response.xpath("/data/fach/*")) > 0: element_dict = response.meta["item"] discipline = list(element_dict["fach"].values())[0] - valuespaces.add_value('discipline', discipline) + valuespaces.add_value("discipline", discipline) # Consider https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/learningResourceType/index.html ressource = element_dict["ressource"] if "ressource" in element_dict else None @@ -184,9 +208,12 @@ def getValuespaces(self, response): "Weiteres_Material": "Anderes Material", "Diagramm": "Veranschaulichung", } - resource_types = [merlin_to_oeh_types[rt] if rt in merlin_to_oeh_types else rt.lower() for rt in resource_types] + resource_types = [ + merlin_to_oeh_types[rt] if rt in merlin_to_oeh_types else rt.lower() + for rt in resource_types + ] - valuespaces.add_value('learningResourceType', resource_types) + valuespaces.add_value("learningResourceType", resource_types) return valuespaces def getPermissions(self, response): @@ -200,22 +227,23 @@ def getPermissions(self, response): element_dict = response.meta["item"] - permissions.replace_value('public', True) + permissions.replace_value("public", False) + permissions.add_value("autoCreateGroups", True) # If the license is private. if "kreis_id" in element_dict and element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: # Self-explained. 1 media center per Kreis-code in this case. - permissions.add_value("autoCreateGroups", True) # permissions.add_value("autoCreateMediacenters", True) - kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... if not isinstance(kreis_ids, list): # one element kreis_ids = [kreis_ids] kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) # kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix - permissions.replace_value('public', False) - permissions.add_value('groups', ['Lower Saxony']) + # permissions.add_value('groups', ['Lower Saxony']) + permissions.add_value("groups", ["LowerSaxony-private"]) # permissions.add_value('mediacenters', kreis_ids) + else: + permissions.add_value("groups", ["LowerSaxony-public"]) return permissions diff --git a/etl/converter/spiders/oai_base.py b/etl/converter/spiders/oai_base.py index 736ffa37..7887d0fd 100644 --- a/etl/converter/spiders/oai_base.py +++ b/etl/converter/spiders/oai_base.py @@ -1,10 +1,11 @@ from converter.items import * -from converter.spiders.lom_base import LomBase; +from converter.spiders.lom_base import LomBase import logging import vobject + class OAIBase(scrapy.Spider, LomBase): - verb="ListIdentifiers" + verb = "ListIdentifiers" baseUrl = None metadataPrefix = None set = None @@ -14,160 +15,226 @@ def __init__(self, **kwargs): def getId(self, response): response.selector.remove_namespaces() - if 'header' in response.meta: - header = response.meta['header'] + if "header" in response.meta: + header = response.meta["header"] else: - header = response.xpath('//OAI-PMH/GetRecord/record/header') - return header.xpath('identifier//text()').extract_first() + header = response.xpath("//OAI-PMH/GetRecord/record/header") + return header.xpath("identifier//text()").extract_first() def getHash(self, response): response.selector.remove_namespaces() - if 'header' in response.meta: - header = response.meta['header'] + if "header" in response.meta: + header = response.meta["header"] else: - header = response.xpath('//OAI-PMH/GetRecord/record/header') - return header.xpath('datestamp//text()').extract_first() + self.version + header = response.xpath("//OAI-PMH/GetRecord/record/header") + return header.xpath("datestamp//text()").extract_first() + self.version def start_requests(self): - listIdentifiersUrl = self.baseUrl + "?verb=" + self.verb + "&set=" + self.set +"&metadataPrefix=" + self.metadataPrefix - logging.info('OAI starting at ' + listIdentifiersUrl) + listIdentifiersUrl = ( + self.baseUrl + + "?verb=" + + self.verb + + "&set=" + + self.set + + "&metadataPrefix=" + + self.metadataPrefix + ) + logging.info("OAI starting at " + listIdentifiersUrl) yield scrapy.Request(url=listIdentifiersUrl, callback=self.parse) + def getRecordUrl(self, identifier): - return self.baseUrl +"?verb=GetRecord&identifier=" +identifier+"&metadataPrefix="+self.metadataPrefix + return ( + self.baseUrl + + "?verb=GetRecord&identifier=" + + identifier + + "&metadataPrefix=" + + self.metadataPrefix + ) + def parse(self, response): response.selector.remove_namespaces() - for header in response.xpath('//OAI-PMH/ListIdentifiers/header'): + for header in response.xpath("//OAI-PMH/ListIdentifiers/header"): copyResponse = response.copy() - copyResponse.meta['header'] = header + copyResponse.meta["header"] = header if self.hasChanged(copyResponse): - identifier = header.xpath('identifier//text()').extract_first() + identifier = header.xpath("identifier//text()").extract_first() getrecordUrl = self.getRecordUrl(identifier) - self.logger.debug('getrecordUrl: %s', getrecordUrl) + self.logger.debug("getrecordUrl: %s", getrecordUrl) yield scrapy.Request(url=getrecordUrl, callback=self.parseRecord) - resumptionToken = response.xpath('//OAI-PMH/ListIdentifiers/resumptionToken//text()').extract_first() + resumptionToken = response.xpath( + "//OAI-PMH/ListIdentifiers/resumptionToken//text()" + ).extract_first() if resumptionToken: - self.logger.info('resumptionToken: %s', resumptionToken) - nextUrl = self.baseUrl + "?verb=" + self.verb +"&resumptionToken=" +resumptionToken + self.logger.info("resumptionToken: %s", resumptionToken) + nextUrl = ( + self.baseUrl + + "?verb=" + + self.verb + + "&resumptionToken=" + + resumptionToken + ) yield scrapy.Request(url=nextUrl, callback=self.parse) - def parseRecord(self, response): + def parseRecord(self, response): lom = LomBase.parse(self, response) return lom def getBase(self, response): base = LomBase.getBase(self, response) response.selector.remove_namespaces() - record = response.xpath('//OAI-PMH/GetRecord/record') - base.add_value('fulltext', record.xpath('metadata/lom/general/description/string//text()').extract_first()) - thumbnail = record.xpath('metadata/lom/relation/kind/value[text()="hasthumbnail"]/parent::*/parent::*/resource/description/string//text()').get() + record = response.xpath("//OAI-PMH/GetRecord/record") + base.add_value( + "fulltext", + record.xpath( + "metadata/lom/general/description/string//text()" + ).extract_first(), + ) + thumbnail = record.xpath( + 'metadata/lom/relation/kind/value[text()="hasthumbnail"]/parent::*/parent::*/resource/description/string//text()' + ).get() if thumbnail: - base.add_value('thumbnail', thumbnail) - #publisher - contributers = record.xpath('metadata/lom/lifeCycle/contribute') + base.add_value("thumbnail", thumbnail) + # publisher + contributers = record.xpath("metadata/lom/lifeCycle/contribute") for contributer in contributers: - role = contributer.xpath('role/value//text()').extract_first() - if role == 'publisher': - vcardStr = contributer.xpath('entity//text()').extract_first() + role = contributer.xpath("role/value//text()").extract_first() + if role == "publisher": + vcardStr = contributer.xpath("entity//text()").extract_first() vcard = vobject.readOne(vcardStr) - if hasattr(vcard, 'fn'): - base.add_value('publisher',vcard.fn.value) + if hasattr(vcard, "fn"): + base.add_value("publisher", vcard.fn.value) return base def getLOMGeneral(self, response): response.selector.remove_namespaces() - record = response.xpath('//OAI-PMH/GetRecord/record') + record = response.xpath("//OAI-PMH/GetRecord/record") general = LomBase.getLOMGeneral(response) - general.add_value('identifier', record.xpath('header/identifier//text()').extract_first()) - general.add_value('title', record.xpath('metadata/lom/general/title/string//text()').extract_first()) - general.add_value('description', record.xpath('metadata/lom/general/description/string//text()').extract_first()) - keywords = record.xpath('metadata/lom/general/keyword/string//text()').getall() - general.add_value('keyword', keywords ) + general.add_value( + "identifier", record.xpath("header/identifier//text()").extract_first() + ) + general.add_value( + "title", + record.xpath("metadata/lom/general/title/string//text()").extract_first(), + ) + general.add_value( + "description", + record.xpath( + "metadata/lom/general/description/string//text()" + ).extract_first(), + ) + keywords = record.xpath("metadata/lom/general/keyword/string//text()").getall() + general.add_value("keyword", keywords) return general def getLOMEducational(self, response): response.selector.remove_namespaces() - record = response.xpath('//OAI-PMH/GetRecord/record') + record = response.xpath("//OAI-PMH/GetRecord/record") educational = LomBase.getLOMEducational(response) - tarString = record.xpath('metadata/lom/educational/typicalAgeRange/string//text()').extract_first() + tarString = record.xpath( + "metadata/lom/educational/typicalAgeRange/string//text()" + ).extract_first() if tarString: tar = LomAgeRangeItemLoader() - tarSplitted = tarString.split('-') + tarSplitted = tarString.split("-") if len(tarSplitted) > 1: - tar.add_value('fromRange',tarSplitted[0]) - tar.add_value('toRange',tarSplitted[1]) - educational.add_value('typicalAgeRange',tar.load_item()) + tar.add_value("fromRange", tarSplitted[0]) + tar.add_value("toRange", tarSplitted[1]) + educational.add_value("typicalAgeRange", tar.load_item()) else: - self.logger.info('unknown agerange %s',tarString) - educational.add_value('language',record.xpath('metadata/lom/educational/language//text()').extract_first()) + self.logger.info("unknown agerange %s", tarString) + educational.add_value( + "language", + record.xpath("metadata/lom/educational/language//text()").extract_first(), + ) return educational - + def getLOMTechnical(self, response): response.selector.remove_namespaces() - record = response.xpath('//OAI-PMH/GetRecord/record') + record = response.xpath("//OAI-PMH/GetRecord/record") technical = LomBase.getLOMTechnical(response) - technicalEntries = record.xpath('metadata/lom/technical') + technicalEntries = record.xpath("metadata/lom/technical") found = False for entry in technicalEntries: - format = entry.xpath('format//text()').extract_first() - if format == 'text/html': + format = entry.xpath("format//text()").extract_first() + if format == "text/html": found = True - technical.add_value('format', entry.xpath('format//text()').extract_first()) - technical.add_value('size', entry.xpath('size//text()').extract_first()) - technical.add_value('location', entry.xpath('location//text()').extract_first()) + technical.add_value( + "format", entry.xpath("format//text()").extract_first() + ) + technical.add_value("size", entry.xpath("size//text()").extract_first()) + technical.add_value( + "location", entry.xpath("location//text()").extract_first() + ) if not found: - technical.add_value('format', record.xpath('metadata/lom/technical/format//text()').extract_first()) - technical.add_value('size', record.xpath('metadata/lom/technical/size//text()').extract_first()) - technical.add_value('location', record.xpath('metadata/lom/technical/location//text()').extract_first()) + technical.add_value( + "format", + record.xpath("metadata/lom/technical/format//text()").extract_first(), + ) + technical.add_value( + "size", + record.xpath("metadata/lom/technical/size//text()").extract_first(), + ) + technical.add_value( + "location", + record.xpath("metadata/lom/technical/location//text()").extract_first(), + ) return technical def getLOMLifecycle(self, response): response.selector.remove_namespaces() - record = response.xpath('//OAI-PMH/GetRecord/record') + record = response.xpath("//OAI-PMH/GetRecord/record") - role = record.xpath('metadata/lom/lifeCycle/contribute/role/value//text()').extract_first() + role = record.xpath( + "metadata/lom/lifeCycle/contribute/role/value//text()" + ).extract_first() lifecycle = LomBase.getLOMLifecycle(response) - lifecycle.add_value('role',role) - entity = record.xpath('metadata/lom/lifeCycle/contribute/entity//text()').extract_first() + entity = record.xpath( + "metadata/lom/lifeCycle/contribute/entity//text()" + ).extract_first() if entity: vcard = vobject.readOne(entity) - if hasattr(vcard, 'n'): + if hasattr(vcard, "n"): given = vcard.n.value.given family = vcard.n.value.family - lifecycle.add_value('firstName',given) - lifecycle.add_value('lastName',family) + lifecycle.add_value("role", role) + lifecycle.add_value("firstName", given) + lifecycle.add_value("lastName", family) return lifecycle - - def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) - record = response.xpath('//OAI-PMH/GetRecord/record') - - lrts = record.xpath('metadata/lom/educational/learningResourceType/value//text()').getall() - valuespaces.add_value('learningResourceType', lrts) - ier = record.xpath('metadata/lom/educational/intendedEndUserRole/value//text()').getall() - valuespaces.add_value('intendedEndUserRole', ier) - context = record.xpath('metadata/lom/educational/context/value//text()').getall() - valuespaces.add_value('educationalContext',context) - taxonIds = record.xpath('metadata/lom/classification/taxonPath/taxon/id//text()').getall() - valuespaces.add_value('discipline', taxonIds) + record = response.xpath("//OAI-PMH/GetRecord/record") + + lrts = record.xpath( + "metadata/lom/educational/learningResourceType/value//text()" + ).getall() + valuespaces.add_value("learningResourceType", lrts) + ier = record.xpath( + "metadata/lom/educational/intendedEndUserRole/value//text()" + ).getall() + valuespaces.add_value("intendedEndUserRole", ier) + context = record.xpath( + "metadata/lom/educational/context/value//text()" + ).getall() + valuespaces.add_value("educationalContext", context) + taxonIds = record.xpath( + "metadata/lom/classification/taxonPath/taxon/id//text()" + ).getall() + valuespaces.add_value("discipline", taxonIds) return valuespaces - def getLicense(self, response = None): - license = LomBase.getLicense(self,response); - record = response.xpath('//OAI-PMH/GetRecord/record') - for desc in record.xpath('metadata/lom/rights/description/string'): - id = desc.xpath('text()').get() - if id.startswith('http'): - license.add_value('url', id); + def getLicense(self, response=None): + license = LomBase.getLicense(self, response) + record = response.xpath("//OAI-PMH/GetRecord/record") + for desc in record.xpath("metadata/lom/rights/description/string"): + id = desc.xpath("text()").get() + if id.startswith("http"): + license.add_value("url", id) else: - license.add_value('internal', id); + license.add_value("internal", id) return license - - - diff --git a/etl/converter/spiders/oai_sodis_spider.py b/etl/converter/spiders/oai_sodis_spider.py index f0472d6b..4fbf82ed 100644 --- a/etl/converter/spiders/oai_sodis_spider.py +++ b/etl/converter/spiders/oai_sodis_spider.py @@ -1,44 +1,49 @@ from converter.spiders.oai_base import OAIBase + class OAISodis(OAIBase): - verb="listIdentifiers" + verb = "listIdentifiers" baseUrl = "https://sodis.de/cp/oai_pmh/oai.php" metadataPrefix = "oai_lom-de" set = "oer_mebis_activated" - name="oai_sodis_spider" - friendlyName='FWU Sodis Contentpool' + name = "oai_sodis_spider" + friendlyName = "FWU Sodis Contentpool" url = "https://fwu.de/" - version = '0.1' + version = "0.1" def __init__(self, **kwargs): OAIBase.__init__(self, **kwargs) - #def getRecordUrl(self, identifier): + # def getRecordUrl(self, identifier): # return self.baseUrl +"?verb=GetRecord&identifier=" +identifier+"&metadataPrefix="+self.metadataPrefix+"&set="+self.set def getBase(self, response): base = OAIBase.getBase(self, response) - record = response.xpath('//OAI-PMH/GetRecord/record') - for relation in record.xpath('metadata/lom/relation'): - kind = relation.xpath('kind/value//text()').extract_first() - if kind == 'hasthumbnail': - thumbUrl = relation.xpath('resource/description/string//text()').extract_first() - base.add_value('thumbnail', thumbUrl) + record = response.xpath("//OAI-PMH/GetRecord/record") + for relation in record.xpath("metadata/lom/relation"): + kind = relation.xpath("kind/value//text()").extract_first() + if kind == "hasthumbnail": + thumbUrl = relation.xpath( + "resource/description/string//text()" + ).extract_first() + base.add_value("thumbnail", thumbUrl) return base - def parseRecord(self, response): + def parseRecord(self, response): lom = OAIBase.parseRecord(self, response) try: - if 'publisher' in lom: - publisher = lom['publisher'] + if "publisher" in lom: + publisher = lom["publisher"] if publisher: publisher = publisher.lower() - if 'siemens' in publisher: - id = lom['sourceId'] - self.logger.info('PUBLISHER contains siemens return None: %s',id); + if "siemens" in publisher: + id = lom["sourceId"] + self.logger.info( + "PUBLISHER contains siemens return None: %s", id + ) return None except: - self.logger.info('PUBLISHER was not parsable, will skip entry') + self.logger.info("PUBLISHER was not parsable, will skip entry") return None return lom diff --git a/etl/converter/spiders/oeh_rss_spider.py b/etl/converter/spiders/oeh_rss_spider.py new file mode 100644 index 00000000..5b52b0bb --- /dev/null +++ b/etl/converter/spiders/oeh_rss_spider.py @@ -0,0 +1,42 @@ +from scrapy.spiders import CrawlSpider +from converter.items import * +import time +from w3lib.html import remove_tags, replace_escape_chars + +from converter.spiders.csv_base import CSVBase +from converter.spiders.lom_base import LomBase +from converter.spiders.rss_list_base import RSSListBase +import json +import logging +from html.parser import HTMLParser +from converter.pipelines import ProcessValuespacePipeline +import re +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants + +# Spider to fetch RSS from planet schule +class OEHRSSSpider(RSSListBase): + name = "oeh_rss_spider" + friendlyName = "Open Edu Hub RSS" + version = "0.1.0" + + def __init__(self, **kwargs): + RSSListBase.__init__(self, "csv/oeh_rss.csv", **kwargs) + + def getBase(self, response): + base = RSSListBase.getBase(self, response) + base.replace_value( + "origin", self.getCSVValue(response, CSVBase.COLUMN_SOURCE_TITLE) + ) + return base + + def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: + lifecycle = RSSListBase.getLOMLifecycle(self, response) + lifecycle.add_value("role", "author") + lifecycle.add_value( + "organization", self.getCSVValue(response, CSVBase.COLUMN_SOURCE_TITLE) + ) + lifecycle.add_value( + "url", self.getCSVValue(response, CSVBase.COLUMN_SOURCE_URL) + ) + return lifecycle diff --git a/etl/converter/spiders/oeh_spider.py b/etl/converter/spiders/oeh_spider.py new file mode 100644 index 00000000..59f0f706 --- /dev/null +++ b/etl/converter/spiders/oeh_spider.py @@ -0,0 +1,29 @@ +import logging + +from converter.spiders.edu_sharing_base import EduSharingBase + + +class OEHSpider(EduSharingBase): + name = "oeh_spider" + friendlyName = "Open Edu Hub" + url = "https://redaktion.openeduhub.net/edu-sharing/" + apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" + version = "0.1.0" + mdsId = "mds_oeh" + + def __init__(self, **kwargs): + EduSharingBase.__init__(self, **kwargs) + + def getBase(self, response): + base = EduSharingBase.getBase(self, response) + base.replace_value("type", self.getProperty("ccm:objecttype", response)) + return base + + def shouldImport(self, response=None): + if "ccm:collection_io_reference" in response.meta["item"]["aspects"]: + logging.info( + "Skipping collection_io_reference with id " + + response.meta["item"]["ref"]["id"] + ) + return False + return True diff --git a/etl/converter/spiders/planet_schule_spider.py b/etl/converter/spiders/planet_schule_spider.py index 6cba5394..4dca257e 100644 --- a/etl/converter/spiders/planet_schule_spider.py +++ b/etl/converter/spiders/planet_schule_spider.py @@ -2,72 +2,90 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.rss_base import RSSBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.rss_base import RSSBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import Constants; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants # Spider to fetch RSS from planet schule class PlanetSchuleSpider(RSSBase): - name='planet_schule_spider' - friendlyName='planet schule' - url = 'https://www.planet-schule.de' - start_urls = ['https://www.planet-schule.de/data/planet-schule-vodcast-komplett.rss'] - version = '0.1' + name = "planet_schule_spider" + friendlyName = "planet schule" + url = "https://www.planet-schule.de" + start_urls = [ + "https://www.planet-schule.de/data/planet-schule-vodcast-komplett.rss" + ] + version = "0.1" - def __init__(self, **kwargs): - RSSBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + RSSBase.__init__(self, **kwargs) - def mapResponse(self, response): - return LomBase.mapResponse(self, response) + def mapResponse(self, response): + return LomBase.mapResponse(self, response) - def startHandler(self, response): - for item in response.xpath('//rss/channel/item'): - copyResponse = response.copy() - copyResponse.meta['item'] = item - if self.hasChanged(copyResponse): - yield scrapy.Request(url = item.xpath('link//text()').get(), callback = self.handleLink, meta = {'item': item}) + def startHandler(self, response): + for item in response.xpath("//rss/channel/item"): + copyResponse = response.copy() + copyResponse.meta["item"] = item + if self.hasChanged(copyResponse): + yield scrapy.Request( + url=item.xpath("link//text()").get(), + callback=self.handleLink, + meta={"item": item}, + ) - def handleLink(self, response): - return LomBase.parse(self, response) + def handleLink(self, response): + return LomBase.parse(self, response) - # thumbnail is always the same, do not use the one from rss - def getBase(self, response): - return LomBase.getBase(self, response) + # thumbnail is always the same, do not use the one from rss + def getBase(self, response): + return LomBase.getBase(self, response) - def getLOMGeneral(self, response): - general = RSSBase.getLOMGeneral(self, response) - general.add_value('keyword', response.xpath('//div[@class="sen_info_v2"]//p[contains(text(),"Schlagworte")]/parent::*/parent::*/div[last()]/p/a//text()').getall()) - return general + def getLOMGeneral(self, response): + general = RSSBase.getLOMGeneral(self, response) + general.add_value( + "keyword", + response.xpath( + '//div[@class="sen_info_v2"]//p[contains(text(),"Schlagworte")]/parent::*/parent::*/div[last()]/p/a//text()' + ).getall(), + ) + return general - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('format', 'text/html') - technical.add_value('location', response.url) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) - return license - - def getValuespaces(self, response): - valuespaces = RSSBase.getValuespaces(self, response) - try: - range = response.xpath('//div[@class="sen_info_v2"]//p[contains(text(),"Klassenstufe")]/parent::*/parent::*/div[last()]/p//text()').get() - range = range.split(" - ") - valuespaces.add_value('educationalContext', ValuespaceHelper.educationalContextByGrade(range)) - except: - pass - discipline = response.xpath('//div[@class="sen_info_v2"]//p[contains(text(),"Fächer")]/parent::*/parent::*/div[last()]/p/a//text()').getall() - valuespaces.add_value('discipline',discipline) - lrt = ValuespaceHelper.mimetypeToLearningResourceType(response.meta['item'].xpath('enclosure/@type').get()) - if lrt: - valuespaces.add_value('learningResourceType', lrt) - return valuespaces + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("format", "text/html") + technical.add_value("location", response.url) + return technical + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) + return license + + def getValuespaces(self, response): + valuespaces = RSSBase.getValuespaces(self, response) + try: + range = response.xpath( + '//div[@class="sen_info_v2"]//p[contains(text(),"Klassenstufe")]/parent::*/parent::*/div[last()]/p//text()' + ).get() + range = range.split(" - ") + valuespaces.add_value( + "educationalContext", ValuespaceHelper.educationalContextByGrade(range) + ) + except: + pass + discipline = response.xpath( + '//div[@class="sen_info_v2"]//p[contains(text(),"Fächer")]/parent::*/parent::*/div[last()]/p/a//text()' + ).getall() + valuespaces.add_value("discipline", discipline) + lrt = ValuespaceHelper.mimetypeToLearningResourceType( + response.meta["item"].xpath("enclosure/@type").get() + ) + if lrt: + valuespaces.add_value("learningResourceType", lrt) + return valuespaces diff --git a/etl/converter/spiders/rlp_spider.py b/etl/converter/spiders/rlp_spider.py index 86cab717..f9a96e12 100644 --- a/etl/converter/spiders/rlp_spider.py +++ b/etl/converter/spiders/rlp_spider.py @@ -2,28 +2,35 @@ from converter.spiders.oai_base import OAIBase from scrapy.spiders import SitemapSpider + class RLPSpider(OAIBase): - name = 'rlp_spider' - friendlyName = 'Schulcampus RLP' - #sitemap_urls = ['https://cloud.schulcampus-rlp.de/edu-sharing/eduservlet/sitemap'] - url = 'https://cloud.schulcampus-rlp.de' - baseUrl = 'https://cloud.schulcampus-rlp.de/edu-sharing/eduservlet/oai/provider' - set = 'default' - metadataPrefix = 'lom' - version = '0.1.0' + name = "rlp_spider" + friendlyName = "Schulcampus RLP" + # sitemap_urls = ['https://cloud.schulcampus-rlp.de/edu-sharing/eduservlet/sitemap'] + url = "https://cloud.schulcampus-rlp.de" + baseUrl = "https://cloud.schulcampus-rlp.de/edu-sharing/eduservlet/oai/provider" + set = "default" + metadataPrefix = "lom" + version = "0.1.0" - def __init__(self, **kwargs): - OAIBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + OAIBase.__init__(self, **kwargs) - def shouldImport(self, response): - response.selector.remove_namespaces() - record = response.xpath('//OAI-PMH/GetRecord/record') - rightsDescriptions = record.xpath('metadata/lom/rights/description/string//text()').get() - if not rightsDescriptions: - return False - return ( - rightsDescriptions.startswith('https://creativecommons.org/licenses/pdm') or - rightsDescriptions.startswith('https://creativecommons.org/publicdomain/zero') or - rightsDescriptions.startswith('https://creativecommons.org/licenses/by') or - rightsDescriptions.startswith('https://creativecommons.org/licenses/by-sa') - ) + def shouldImport(self, response): + response.selector.remove_namespaces() + record = response.xpath("//OAI-PMH/GetRecord/record") + rightsDescriptions = record.xpath( + "metadata/lom/rights/description/string//text()" + ).get() + if not rightsDescriptions: + return False + return ( + rightsDescriptions.startswith("https://creativecommons.org/licenses/pdm") + or rightsDescriptions.startswith( + "https://creativecommons.org/publicdomain/zero" + ) + or rightsDescriptions.startswith("https://creativecommons.org/licenses/by") + or rightsDescriptions.startswith( + "https://creativecommons.org/licenses/by-sa" + ) + ) diff --git a/etl/converter/spiders/rss_base.py b/etl/converter/spiders/rss_base.py index 067c5de2..0a43137d 100644 --- a/etl/converter/spiders/rss_base.py +++ b/etl/converter/spiders/rss_base.py @@ -3,7 +3,8 @@ import time import logging from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; +from converter.spiders.lom_base import LomBase + class RSSBase(CrawlSpider, LomBase): start_urls = [] @@ -15,25 +16,33 @@ def __init__(self, **kwargs): def parse(self, response): response.selector.remove_namespaces() - #common properties - self.commonProperties['language'] = response.xpath('//rss/channel/language//text()').get() - self.commonProperties['source'] = response.xpath('//rss/channel/generator//text()').get() - self.commonProperties['publisher'] = response.xpath('//rss/channel/author//text()').get() - self.commonProperties['thumbnail'] = response.xpath('//rss/channel/image/url//text()').get() + # common properties + self.commonProperties["language"] = response.xpath( + "//rss/channel/language//text()" + ).get() + self.commonProperties["source"] = response.xpath( + "//rss/channel/generator//text()" + ).get() + self.commonProperties["publisher"] = response.xpath( + "//rss/channel/author//text()" + ).get() + self.commonProperties["thumbnail"] = response.xpath( + "//rss/channel/image/url//text()" + ).get() self.response = response return self.startHandler(response) - + def startHandler(self, response): - for item in response.xpath('//rss/channel/item'): - responseCopy = response.replace(url = item.xpath('link//text()').get()) - responseCopy.meta['item'] = item + for item in response.xpath("//rss/channel/item"): + responseCopy = response.replace(url=item.xpath("link//text()").get()) + responseCopy.meta["item"] = item yield LomBase.parse(self, responseCopy) def getId(self, response): - return response.meta['item'].xpath('link//text()').get() + return response.meta["item"].xpath("link//text()").get() def getHash(self, response): - return self.version + response.meta['item'].xpath('pubDate//text()').get() + return self.version + str(response.meta["item"].xpath("pubDate//text()").get()) def mapResponse(self, response): r = LomBase.mapResponse(self, response) @@ -41,27 +50,35 @@ def mapResponse(self, response): def getBase(self, response): base = LomBase.getBase(self, response) - thumbnail = self.commonProperties['thumbnail'] + thumbnail = self.commonProperties["thumbnail"] if thumbnail: - base.add_value('thumbnail', thumbnail) + base.add_value("thumbnail", thumbnail) return base def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) - general.add_value('identifier', response.meta['item'].xpath('guid//text()').get()) - general.add_value('title', response.meta['item'].xpath('title//text()').get()) - general.add_value('language', self.commonProperties['language']) - description = response.meta['item'].xpath('description//text()').get() + general.add_value( + "identifier", response.meta["item"].xpath("guid//text()").get() + ) + general.add_value( + "title", response.meta["item"].xpath("title//text()").get().strip() + ) + general.add_value("language", self.commonProperties["language"]) + description = response.meta["item"].xpath("description//text()").get() if not description: - description = response.meta['item'].xpath('//*[name()="summary"]//text()').get() - general.add_value('description', description) + description = ( + response.meta["item"].xpath('//*[name()="summary"]//text()').get() + ) + general.add_value("description", description) return general def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) - #technical.add_value('format', item.xpath('enclosure/@type').get()) - #technical.add_value('size', item.xpath('enclosure/@length').get()) - #technical.add_value('location', item.xpath('enclosure/@url').get()) - technical.add_value('format', 'text/html') - technical.add_value('location', response.meta['item'].xpath('link//text()').get()) - return technical \ No newline at end of file + # technical.add_value('format', item.xpath('enclosure/@type').get()) + # technical.add_value('size', item.xpath('enclosure/@length').get()) + # technical.add_value('location', item.xpath('enclosure/@url').get()) + technical.add_value("format", "text/html") + technical.add_value( + "location", response.meta["item"].xpath("link//text()").get() + ) + return technical diff --git a/etl/converter/spiders/rss_list_base.py b/etl/converter/spiders/rss_list_base.py index ce15b651..8981ffc2 100644 --- a/etl/converter/spiders/rss_list_base.py +++ b/etl/converter/spiders/rss_list_base.py @@ -3,32 +3,39 @@ import time import logging from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.rss_base import RSSBase; -from converter.spiders.csv_base import CSVBase; -from converter.valuespace_helper import ValuespaceHelper; +from converter.spiders.lom_base import LomBase +from converter.spiders.rss_base import RSSBase +from converter.spiders.csv_base import CSVBase +from converter.valuespace_helper import ValuespaceHelper import csv import os + # rss crawler with a list of entries to crawl and map # for supported columns, please check the CSVbase class class RSSListBase(RSSBase, LomBase): mappings = {} start_urls = [] rows = {} + def getCSVValue(self, response, fieldName): - data = list(map(lambda x: x.strip(),response.meta['row'][self.mappings[fieldName]].split(";"))) - if len(list(filter(lambda x: x != '', data))) > 0: + data = list( + map( + lambda x: x.strip(), + response.meta["row"][self.mappings[fieldName]].split(";"), + ) + ) + if len(list(filter(lambda x: x != "", data))) > 0: return data return None - def __init__(self, file, delimiter = ',', **kwargs): + def __init__(self, file, delimiter=",", **kwargs): LomBase.__init__(self, **kwargs) dir = os.path.dirname(os.path.realpath(__file__)) - with open(dir + '/../../' + file) as csvFile: + with open(dir + "/../../" + file, encoding="utf-8") as csvFile: csvReader = csv.reader(csvFile, delimiter=delimiter) i = 0 for row in csvReader: - if i==0: + if i == 0: j = 0 for c in row: self.mappings[c] = j @@ -37,32 +44,50 @@ def __init__(self, file, delimiter = ',', **kwargs): continue url = row[self.mappings[CSVBase.COLUMN_URL]] self.rows[url] = row - #self.start_urls.append(url) + # self.start_urls.append(url) i += 1 + def start_requests(self): requests = [] for url in self.rows: - requests.append(scrapy.Request(url=url, callback=self.parse, meta = {'row': self.rows[url]})) + requests.append( + scrapy.Request( + url=url, callback=self.parse, meta={"row": self.rows[url]} + ) + ) return requests def getLOMGeneral(self, response): general = RSSBase.getLOMGeneral(self, response) - general.replace_value('language', self.getCSVValue(response, CSVBase.COLUMN_LANGUAGE)) - general.replace_value('keyword', self.getCSVValue(response, CSVBase.COLUMN_KEYWORD)) + general.replace_value( + "language", self.getCSVValue(response, CSVBase.COLUMN_LANGUAGE) + ) + general.replace_value( + "keyword", self.getCSVValue(response, CSVBase.COLUMN_KEYWORD) + ) return general - + def getLicense(self, response): license = LomBase.getLicense(self, response) - license.add_value('internal', self.getCSVValue(response, CSVBase.COLUMN_LICENSE)) + license.add_value( + "internal", self.getCSVValue(response, CSVBase.COLUMN_LICENSE) + ) return license def getValuespaces(self, response): valuespaces = RSSBase.getValuespaces(self, response) - valuespaces.add_value('educationalContext', ValuespaceHelper.educationalContextByAgeRange([ - self.getCSVValue(response, CSVBase.COLUMN_TYPICAL_AGE_RANGE_FROM)[0], - self.getCSVValue(response, CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO)[0] - ])) - - valuespaces.add_value('discipline', self.getCSVValue(response, CSVBase.COLUMN_DISCIPLINE)) - valuespaces.add_value('learningResourceType', self.getCSVValue(response, CSVBase.COLUMN_LEARNING_RESOURCE_TYPE)) + tar_from = self.getCSVValue(response, CSVBase.COLUMN_TYPICAL_AGE_RANGE_FROM) + tar_to = self.getCSVValue(response, CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO) + if tar_from and tar_to: + valuespaces.add_value( + "educationalContext", + ValuespaceHelper.educationalContextByAgeRange([tar_from[0], tar_to[0]]), + ) + valuespaces.add_value( + "discipline", self.getCSVValue(response, CSVBase.COLUMN_DISCIPLINE) + ) + valuespaces.add_value( + "learningResourceType", + self.getCSVValue(response, CSVBase.COLUMN_LEARNING_RESOURCE_TYPE), + ) return valuespaces diff --git a/etl/converter/spiders/sample_spider.py b/etl/converter/spiders/sample_spider.py index b380121d..45447aef 100644 --- a/etl/converter/spiders/sample_spider.py +++ b/etl/converter/spiders/sample_spider.py @@ -2,74 +2,75 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; +from converter.spiders.lom_base import LomBase import json # Sample Spider, using a SitemapSpider to crawl your web page # Can be used as a template for your custom spider class SampleSpider(CrawlSpider, LomBase): - name = 'sample_spider' - url = 'https://edu-sharing.com' # the url which will be linked as the primary link to your source (should be the main url of your site) - friendlyName = 'Sample Source' # name as shown in the search ui - start_urls = ['https://edu-sharing.com'] - version = '0.1' # the version of your crawler, used to identify if a reimport is necessary + name = "sample_spider" + url = "https://edu-sharing.com" # the url which will be linked as the primary link to your source (should be the main url of your site) + friendlyName = "Sample Source" # name as shown in the search ui + start_urls = ["https://edu-sharing.com"] + version = "0.1" # the version of your crawler, used to identify if a reimport is necessary - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) - def parse(self, response): - return LomBase.parse(self, response) + def parse(self, response): + return LomBase.parse(self, response) - # return a (stable) id of the source - def getId(self, response): - return response.xpath('//title//text()').get() + # return a (stable) id of the source + def getId(self, response): + return response.xpath("//title//text()").get() - # return a stable hash to detect content changes - # if there is no hash available, may use the current time as "always changing" info - # Please include your crawler version as well - def getHash(self, response): - return self.version + time.time() + # return a stable hash to detect content changes + # if there is no hash available, may use the current time as "always changing" info + # Please include your crawler version as well + def getHash(self, response): + return self.version + time.time() - def getBase(self, response): - base = LomBase.getBase(self, response) - # optionlly provide thumbnail. If empty, it will tried to be generated from the getLOMTechnical 'location' (if format is 'text/html') - # base.add_value('thumbnail', 'https://url/to/thumbnail') - return base + def getBase(self, response): + base = LomBase.getBase(self, response) + # optionlly provide thumbnail. If empty, it will tried to be generated from the getLOMTechnical 'location' (if format is 'text/html') + # base.add_value('thumbnail', 'https://url/to/thumbnail') + return base - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('title', response.xpath('//title//text()').get()) - general.add_value('language', response.xpath('//meta[@property="og:locale"]/@content').get()) - return general + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value("title", response.xpath("//title//text()").get()) + general.add_value( + "language", response.xpath('//meta[@property="og:locale"]/@content').get() + ) + return general - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('location', response.url) - technical.add_value('format', 'text/html') - technical.add_value('size', len(response.body)) - return technical + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("location", response.url) + technical.add_value("format", "text/html") + technical.add_value("size", len(response.body)) + return technical - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('title', response.xpath('//title//text()').get()) - general.add_value('language', response.xpath('//meta[@property="og:locale"]/@content').get()) - return general + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value("title", response.xpath("//title//text()").get()) + general.add_value( + "language", response.xpath('//meta[@property="og:locale"]/@content').get() + ) + return general - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - # Provide valuespace data. This data will later get automatically mapped - # Please take a look at the valuespaces here: - # https://vocabs.openeduhub.de/ - # You can either use full identifiers or also labels. The system will auto-map them accordingly + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + # Provide valuespace data. This data will later get automatically mapped + # Please take a look at the valuespaces here: + # https://vocabs.openeduhub.de/ + # You can either use full identifiers or also labels. The system will auto-map them accordingly - # Please also checkout the ValuespaceHelper class which provides usefull mappers for common data - - #valuespaces.add_value('educationalContext', context) - #valuespaces.add_value('discipline',discipline) - #valuespaces.add_value('learningResourceType', lrt) - return valuespaces + # Please also checkout the ValuespaceHelper class which provides usefull mappers for common data + # valuespaces.add_value('educationalContext', context) + # valuespaces.add_value('discipline',discipline) + # valuespaces.add_value('learningResourceType', lrt) + return valuespaces - - # You may override more functions here, please checkout LomBase class \ No newline at end of file + # You may override more functions here, please checkout LomBase class diff --git a/etl/converter/spiders/serlo_spider.py b/etl/converter/spiders/serlo_spider.py index 328cc9c7..faaeccbc 100644 --- a/etl/converter/spiders/serlo_spider.py +++ b/etl/converter/spiders/serlo_spider.py @@ -2,123 +2,139 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.constants import Constants; +from converter.constants import Constants # Spider to fetch API from Serlo class SerloSpider(scrapy.Spider, LomBase, JSONBase): - name = 'serlo_spider' - friendlyName = 'Serlo' - url = 'https://de.serlo.org' - version = '0.1.0' - - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) - - def start_requests(self): - url = self.url + '/entity/api/json/export/article' - # current dummy fallback since the Serlo API is basically down - # url = 'http://localhost/sources/serlo.json' - yield scrapy.Request(url=url, callback=self.parseList) - - # some fields are having xml entities (for whatever reason), we will unescape them here - def get(self, *params, response): - data = JSONBase.get(self, *params, json = response.meta['json']) - try: - return HTMLParser().unescape(data) - except: - try: - result = [] - for p in data: - result.append(HTMLParser().unescape(p)) - return result - except: - return data - - def parseList(self, response): - data = json.loads(response.body) - for j in data: - responseCopy = response.replace(url = self.url + j['link'] + '?contentOnly') - responseCopy.meta['json'] = j - if self.hasChanged(responseCopy): - yield scrapy.Request(url = responseCopy.url, callback = self.parse, meta = {'json': j, 'url': responseCopy.url}) - - def getId(self, response = None): - return self.get('guid', response = response) - - def getHash(self, response = None): - return self.version + self.get('lastModified.date', response = response) - - def parse(self, response): - if not self.get('description', response = response): - logging.info('skipping empty entry in serlo') - return None - return LomBase.parse(self, response) - - def mapResponse(self, response): - r = LomBase.mapResponse(self, response) - text = r.load_item()['text'].split('Dieses Werk steht unter der freien Lizenz CC BY-SA 4.0 Information')[0] - r.replace_value('text', text) - return r - - def getBase(self, response): - base = LomBase.getBase(self, response) - base.add_value('lastModified', self.get('lastModified.date', response = response)) - base.add_value('ranking', 0.9 + (float(self.get('revisionsCount', response = response))/2 + float(self.get('authorsCount', response = response)))/50) - return base - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response = response) - text = self.get('categories', response = response)[0].split('/')[0] - # manual mapping to Mathematik - if text == 'Mathe': - text = 'Mathematik' - valuespaces.add_value('discipline', text) - #for entry in ProcessValuespacePipeline.valuespaces['discipline']: - # if len(list(filter(lambda x:x['@value'].casefold() == text.casefold(), entry['label']))): - # valuespaces.add_value('discipline',entry['id']) - - primarySchool = re.compile('Klasse\s[1-4]$', re.IGNORECASE) - if len(list(filter(lambda x: primarySchool.match(x), self.getKeywords(response)))): - valuespaces.add_value('educationalContext', 'Grundschule') - sek1 = re.compile('Klasse\s([5-9]|10)$', re.IGNORECASE) - if len(list(filter(lambda x: sek1.match(x), self.getKeywords(response)))): - valuespaces.add_value('educationalContext', 'Sekundarstufe 1') - sek2 = re.compile('Klasse\s1[1-2]', re.IGNORECASE) - if len(list(filter(lambda x: sek2.match(x), self.getKeywords(response)))): - valuespaces.add_value('educationalContext', 'Sekundarstufe 2') - return valuespaces - - def getKeywords(self, response): - try: - keywords = list(self.get('keywords', response = response).values()) - except: - keywords = self.get('keywords', response = response) - for c in self.get('categories', response = response): - keywords += c.split('/') - return set(keywords) - - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response = response) - general.add_value('title', self.get('title', response = response)) - general.add_value('keyword', self.getKeywords(response)) - general.add_value('description', self.get('description', response = response)) - return general - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('location', response.url) - technical.add_value('format', 'text/html') - technical.add_value('size', len(response.body)) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('url', Constants.LICENSE_CC_BY_SA_40) - return license + name = "serlo_spider" + friendlyName = "Serlo" + url = "https://de.serlo.org" + version = "0.1.0" + + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + + def start_requests(self): + url = self.url + "/entity/api/json/export/article" + # current dummy fallback since the Serlo API is basically down + # url = "http://localhost/sources/serlo.json" + yield scrapy.Request(url=url, callback=self.parseList) + + # some fields are having xml entities (for whatever reason), we will unescape them here + def get(self, *params, response): + data = JSONBase.get(self, *params, json=response.meta["json"]) + try: + return HTMLParser().unescape(data) + except: + try: + result = [] + for p in data: + result.append(HTMLParser().unescape(p)) + return result + except: + return data + + def parseList(self, response): + data = json.loads(response.body) + for j in data: + responseCopy = response.replace(url=self.url + j["link"] + "?contentOnly") + responseCopy.meta["json"] = j + if self.hasChanged(responseCopy): + yield scrapy.Request( + url=responseCopy.url, + callback=self.parse, + meta={"json": j, "url": responseCopy.url}, + ) + + def getId(self, response=None): + return self.get("guid", response=response) + + def getHash(self, response=None): + return self.version + self.get("lastModified.date", response=response) + + def parse(self, response): + if not self.get("description", response=response): + logging.info("skipping empty entry in serlo") + return None + return LomBase.parse(self, response) + + def mapResponse(self, response): + r = LomBase.mapResponse(self, response) + text = r.load_item()["text"].split( + "Dieses Werk steht unter der freien Lizenz CC BY-SA 4.0 Information" + )[0] + r.replace_value("text", text) + return r + + def getBase(self, response): + base = LomBase.getBase(self, response) + base.add_value("lastModified", self.get("lastModified.date", response=response)) + base.add_value( + "ranking", + 0.9 + + ( + float(self.get("revisionsCount", response=response)) / 2 + + float(self.get("authorsCount", response=response)) + ) + / 50, + ) + return base + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response=response) + text = self.get("categories", response=response)[0].split("/")[0] + # manual mapping to Mathematik + if text == "Mathe": + text = "Mathematik" + valuespaces.add_value("discipline", text) + # for entry in ProcessValuespacePipeline.valuespaces['discipline']: + # if len(list(filter(lambda x:x['@value'].casefold() == text.casefold(), entry['label']))): + # valuespaces.add_value('discipline',entry['id']) + + primarySchool = re.compile("Klasse\s[1-4]$", re.IGNORECASE) + if len( + list(filter(lambda x: primarySchool.match(x), self.getKeywords(response))) + ): + valuespaces.add_value("educationalContext", "Grundschule") + sek1 = re.compile("Klasse\s([5-9]|10)$", re.IGNORECASE) + if len(list(filter(lambda x: sek1.match(x), self.getKeywords(response)))): + valuespaces.add_value("educationalContext", "Sekundarstufe 1") + sek2 = re.compile("Klasse\s1[1-2]", re.IGNORECASE) + if len(list(filter(lambda x: sek2.match(x), self.getKeywords(response)))): + valuespaces.add_value("educationalContext", "Sekundarstufe 2") + return valuespaces + + def getKeywords(self, response): + try: + keywords = list(self.get("keywords", response=response).values()) + except: + keywords = self.get("keywords", response=response) + for c in self.get("categories", response=response): + keywords += c.split("/") + return set(keywords) + + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response=response) + general.add_value("title", self.get("title", response=response)) + general.add_value("keyword", self.getKeywords(response)) + general.add_value("description", self.get("description", response=response)) + return general + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("location", response.url) + technical.add_value("format", "text/html") + technical.add_value("size", len(response.body)) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", Constants.LICENSE_CC_BY_SA_40) + return license diff --git a/etl/converter/spiders/tutory_spider.py b/etl/converter/spiders/tutory_spider.py index 9e1fa7e5..57ff5aae 100644 --- a/etl/converter/spiders/tutory_spider.py +++ b/etl/converter/spiders/tutory_spider.py @@ -2,89 +2,105 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re import sys -from converter.constants import Constants; +from converter.constants import Constants from scrapy.selector import Selector # Spider to fetch API from Serlo class TutorySpider(scrapy.Spider, LomBase, JSONBase): - name = 'tutory_spider' - friendlyName = 'tutory' - url = 'https://www.tutory.de/' - baseUrl = 'https://www.tutory.de/api/v1/' - version = '0.1.0' + name = "tutory_spider" + friendlyName = "tutory" + url = "https://www.tutory.de/" + baseUrl = "https://www.tutory.de/api/v1/" + version = "0.1.0" - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) - def start_requests(self): - url = self.baseUrl + 'worksheet?pageSize=999999' - yield scrapy.Request(url=url, callback=self.parseList) + def start_requests(self): + url = self.baseUrl + "worksheet?pageSize=999999" + yield scrapy.Request(url=url, callback=self.parseList) - def parseList(self, response): - data = json.loads(response.body) - for j in data['worksheets']: - responseCopy = response.replace(url = self.url+ 'worksheet/' + j['id']) - responseCopy.meta['item'] = j - if self.hasChanged(responseCopy): - yield scrapy.Request(url=responseCopy.url, callback=self.parse, meta = {'item': j}) + def parseList(self, response): + data = json.loads(response.body) + for j in data["worksheets"]: + responseCopy = response.replace(url=self.url + "worksheet/" + j["id"]) + responseCopy.meta["item"] = j + if self.hasChanged(responseCopy): + yield scrapy.Request( + url=responseCopy.url, callback=self.parse, meta={"item": j} + ) - def getId(self, response): - return str(response.meta['item']['id']) + def getId(self, response): + return str(response.meta["item"]["id"]) - def getHash(self, response): - return response.meta['item']['updatedAt'] + self.version + def getHash(self, response): + return response.meta["item"]["updatedAt"] + self.version - def parse(self, response): - print(response.url) - return LomBase.parse(self, response) + def parse(self, response): + print(response.url) + return LomBase.parse(self, response) - def getBase(self, response): - base = LomBase.getBase(self, response) - base.add_value('lastModified', response.meta['item']['updatedAt']) - base.add_value('thumbnail', self.url + 'worksheet/' + response.meta['item']['id'] + '.jpg?width=1000') - return base + def getBase(self, response): + base = LomBase.getBase(self, response) + base.add_value("lastModified", response.meta["item"]["updatedAt"]) + base.add_value( + "thumbnail", + self.url + "worksheet/" + response.meta["item"]["id"] + ".jpg?width=1000", + ) + return base - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - discipline = list(map(lambda x: x['code'],filter(lambda x: x['type'] == 'subject', response.meta['item']['metaValues']))) - valuespaces.add_value('discipline', discipline) - return valuespaces + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + discipline = list( + map( + lambda x: x["code"], + filter( + lambda x: x["type"] == "subject", + response.meta["item"]["metaValues"], + ), + ) + ) + valuespaces.add_value("discipline", discipline) + return valuespaces - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) - return license + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) + return license - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.add_value('title', response.meta['item']['name']) - if response.meta['item']['description'] != '': - general.add_value('description', response.meta['item']['description']) - else: - html = self.getUrlData(response.url)['html'] - data = Selector(text=html).xpath('//ul[contains(@class,"worksheet-pages")]//text()').getall() - cutoff = 4 - if len(data)>cutoff: - for i in range(cutoff): - del data[0] + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.add_value("title", response.meta["item"]["name"]) + if response.meta["item"]["description"] != "": + general.add_value("description", response.meta["item"]["description"]) + else: + html = self.getUrlData(response.url)["html"] + data = ( + Selector(text=html) + .xpath('//ul[contains(@class,"worksheet-pages")]//text()') + .getall() + ) + cutoff = 4 + if len(data) > cutoff: + for i in range(cutoff): + del data[0] - text = ' '.join(data) - text = text[:1000] - general.add_value('description', text) - return general + text = " ".join(data) + text = text[:1000] + general.add_value("description", text) + return general - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value('location', response.url) - technical.add_value('format', 'text/html') - technical.add_value('size', len(response.body)) - return technical \ No newline at end of file + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("location", response.url) + technical.add_value("format", "text/html") + technical.add_value("size", len(response.body)) + return technical diff --git a/etl/converter/spiders/wirlernenonline_gsheet_spider.py b/etl/converter/spiders/wirlernenonline_gsheet_spider.py index 7f52ee3a..66ee3e82 100644 --- a/etl/converter/spiders/wirlernenonline_gsheet_spider.py +++ b/etl/converter/spiders/wirlernenonline_gsheet_spider.py @@ -9,33 +9,35 @@ from scrapy import * import csv -class WirLernenOnlineGSheetSpider(Spider, CSVBase, LomBase): - ranking = 5 - name = 'wirlernenonline_gsheet_spider' - friendlyName='Themenportal' - url = 'https://docs.google.com/spreadsheets/d/e/2PACX-1vTmqeYqGD0TADaSkON3zgK66BGTOcPGtsrE280j0wZ8WKtuGL8LZtnKFRIH6HU1FEYIAP28mOWsJYiN/pub?gid=0&single=true&output=csv' - sourceType = Constants.SOURCE_TYPE_SPIDER - COLUMN_UUID = 'uuid' +class WirLernenOnlineGSheetSpider(Spider, CSVBase, LomBase): + ranking = 5 + name = "wirlernenonline_gsheet_spider" + friendlyName = "Themenportal" + url = "https://docs.google.com/spreadsheets/d/e/2PACX-1vTmqeYqGD0TADaSkON3zgK66BGTOcPGtsrE280j0wZ8WKtuGL8LZtnKFRIH6HU1FEYIAP28mOWsJYiN/pub?gid=0&single=true&output=csv" + sourceType = Constants.SOURCE_TYPE_SPIDER - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + COLUMN_UUID = "uuid" + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) - def getBase(self, response = None): - base = CSVBase.getBase(self, response) - uuid = response.meta['row'][WirLernenOnlineGSheetSpider.COLUMN_UUID]['text'] - if uuid: - base.add_value('uuid', uuid) - return base + def getBase(self, response=None): + base = CSVBase.getBase(self, response) + uuid = response.meta["row"][WirLernenOnlineGSheetSpider.COLUMN_UUID]["text"] + if uuid: + base.add_value("uuid", uuid) + return base - def start_requests(self): - yield Request(url=self.url, callback=self.parse) + def start_requests(self): + yield Request(url=self.url, callback=self.parse) - def parse(self, response): - rows = self.readCSV(csv.reader(StringIO(response.body.decode('UTF-8')), delimiter=','), 2) - for row in rows: - copyResponse = response.copy() - copyResponse.meta['row'] = row - if self.getId(copyResponse): - yield LomBase.parse(self, copyResponse) + def parse(self, response): + rows = self.readCSV( + csv.reader(StringIO(response.body.decode("UTF-8")), delimiter=","), 2 + ) + for row in rows: + copyResponse = response.copy() + copyResponse.meta["row"] = row + if self.getId(copyResponse): + yield LomBase.parse(self, copyResponse) diff --git a/etl/converter/spiders/wirlernenonline_spider.py b/etl/converter/spiders/wirlernenonline_spider.py index 8451d907..8d1dd145 100644 --- a/etl/converter/spiders/wirlernenonline_spider.py +++ b/etl/converter/spiders/wirlernenonline_spider.py @@ -2,133 +2,162 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import logging import requests from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import *; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import * # Spider to fetch RSS from planet schule class WirLernenOnlineSpider(scrapy.Spider, LomBase, JSONBase): - name='wirlernenonline_spider' - friendlyName='WirLernenOnline' - url = 'https://wirlernenonline.de/' - version = '0.1.2' - apiUrl = 'https://wirlernenonline.de/wp-json/wp/v2/%type/?per_page=50&page=%page' - keywords = {} - - - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) - - def mapResponse(self, response): - r = LomBase.mapResponse(self, response, fetchData = False) - r.replace_value('text', '') - r.replace_value('html', '') - r.replace_value('url', response.meta['item'].get('link')) - return r - - def getId(self, response): - return response.meta['item'].get('id') - - def getHash(self, response): - return response.meta['item'].get('modified') + self.version - - def startRequest(self, type, page = 1): - return scrapy.Request(url = self.apiUrl.replace('%page', str(page)).replace('%type', type), callback = self.parseRequest, headers = { - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }, meta = { - 'page': page, - 'type': type - }) - - def start_requests(self): - keywords = json.loads(requests.get('https://wirlernenonline.de/wp-json/wp/v2/tags/?per_page=100').content.decode('UTF-8')) - for keyword in keywords: - self.keywords[keyword['id']] = keyword['name'] - - yield self.startRequest('edusource') - yield self.startRequest('edutool') - - def parseRequest(self, response): - results = json.loads(response.body_as_unicode()) - if results: - for item in results: - copyResponse = response.copy() - copyResponse.meta['item'] = item - if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) - yield self.startRequest(response.meta['type'], response.meta['page'] + 1) - - - def handleEntry(self, response): - return LomBase.parse(self, response) - - def getType(self, response): - if response.meta['type'] == 'edusource': - return Constants.TYPE_SOURCE - elif response.meta['type'] == 'edutool': - return Constants.TYPE_TOOL - return None - - # thumbnail is always the same, do not use the one from rss - def getBase(self, response): - base = LomBase.getBase(self, response) - base.replace_value('thumbnail', self.get('acf.thumbnail.url', json = response.meta['item'])) - base.replace_value('type', self.getType(response)) - fulltext = self.get('acf.long_text', json = response.meta['item']) - base.replace_value('fulltext', HTMLParser().unescape(fulltext)) - return base - - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.replace_value('title', HTMLParser().unescape(self.get('title.rendered', json = response.meta['item']))) - keywords = self.get('tags', json = response.meta['item']) - if keywords: - keywords = list(map(lambda x: self.keywords[x], keywords)) - general.add_value('keyword', keywords) - general.add_value('description', HTMLParser().unescape(self.get('acf.short_text', json = response.meta['item']))) - return general - - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.replace_value('format', 'text/html') - technical.replace_value('location', self.get('acf.url', json = response.meta['item'])) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - try: - licenseId = self.get('acf.licence', json = response.meta['item'])[0]['value'] - if licenseId == '10': - license.add_value('oer', OerType.ALL) - elif licenseId == '11': - license.add_value('oer', OerType.MIXED) - elif licenseId == '12': - license.add_value('oer', OerType.NONE) - except: - pass - return license - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - discipline = list(map(lambda x: x['value'], self.get('acf.fachgebiet', json = response.meta['item']))) - valuespaces.add_value('discipline', discipline) - acf = self.get('acf.lernresourcentyp', json = response.meta['item']) - if acf: - sourceContentType = list(map(lambda x: x['value'], acf)) - valuespaces.add_value('sourceContentType', sourceContentType) - - context = list(map(lambda x: x['value'], self.get('acf.schulform', json = response.meta['item']))) - valuespaces.add_value('educationalContext', context) - role = list(map(lambda x: x['value'], self.get('acf.role', json = response.meta['item']))) - valuespaces.add_value('intendedEndUserRole', role) - return valuespaces - + name = "wirlernenonline_spider" + friendlyName = "WirLernenOnline" + url = "https://wirlernenonline.de/" + version = "0.1.2" + apiUrl = "https://wirlernenonline.de/wp-json/wp/v2/%type/?per_page=50&page=%page" + keywords = {} + + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + + def mapResponse(self, response): + r = LomBase.mapResponse(self, response, fetchData=False) + r.replace_value("text", "") + r.replace_value("html", "") + r.replace_value("url", response.meta["item"].get("link")) + return r + + def getId(self, response): + return response.meta["item"].get("id") + + def getHash(self, response): + return response.meta["item"].get("modified") + self.version + + def startRequest(self, type, page=1): + return scrapy.Request( + url=self.apiUrl.replace("%page", str(page)).replace("%type", type), + callback=self.parseRequest, + headers={"Accept": "application/json", "Content-Type": "application/json"}, + meta={"page": page, "type": type}, + ) + + def start_requests(self): + keywords = json.loads( + requests.get( + "https://wirlernenonline.de/wp-json/wp/v2/tags/?per_page=100" + ).content.decode("UTF-8") + ) + for keyword in keywords: + self.keywords[keyword["id"]] = keyword["name"] + + yield self.startRequest("edusource") + yield self.startRequest("edutool") + + def parseRequest(self, response): + results = json.loads(response.body_as_unicode()) + if results: + for item in results: + copyResponse = response.copy() + copyResponse.meta["item"] = item + if self.hasChanged(copyResponse): + yield self.handleEntry(copyResponse) + yield self.startRequest(response.meta["type"], response.meta["page"] + 1) + + def handleEntry(self, response): + return LomBase.parse(self, response) + + def getType(self, response): + if response.meta["type"] == "edusource": + return Constants.TYPE_SOURCE + elif response.meta["type"] == "edutool": + return Constants.TYPE_TOOL + return None + + # thumbnail is always the same, do not use the one from rss + def getBase(self, response): + base = LomBase.getBase(self, response) + base.replace_value( + "thumbnail", self.get("acf.thumbnail.url", json=response.meta["item"]) + ) + base.replace_value("type", self.getType(response)) + fulltext = self.get("acf.long_text", json=response.meta["item"]) + base.replace_value("fulltext", HTMLParser().unescape(fulltext)) + return base + + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.replace_value( + "title", + HTMLParser().unescape( + self.get("title.rendered", json=response.meta["item"]) + ), + ) + keywords = self.get("tags", json=response.meta["item"]) + if keywords: + keywords = list(map(lambda x: self.keywords[x], keywords)) + general.add_value("keyword", keywords) + general.add_value( + "description", + HTMLParser().unescape( + self.get("acf.short_text", json=response.meta["item"]) + ), + ) + return general + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.replace_value("format", "text/html") + technical.replace_value( + "location", self.get("acf.url", json=response.meta["item"]) + ) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + try: + licenseId = self.get("acf.licence", json=response.meta["item"])[0]["value"] + if licenseId == "10": + license.add_value("oer", OerType.ALL) + elif licenseId == "11": + license.add_value("oer", OerType.MIXED) + elif licenseId == "12": + license.add_value("oer", OerType.NONE) + except: + pass + return license + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + discipline = list( + map( + lambda x: x["value"], + self.get("acf.fachgebiet", json=response.meta["item"]), + ) + ) + valuespaces.add_value("discipline", discipline) + lernresourcentyp = self.get("acf.lernresourcentyp", json=response.meta["item"]) + if lernresourcentyp: + lernresourcentyp = list(map(lambda x: x["value"], lernresourcentyp)) + valuespaces.add_value("sourceContentType", lernresourcentyp) + category = self.get("acf.category", json=response.meta["item"]) + if category: + category = list(map(lambda x: x["value"], category)) + valuespaces.add_value("toolCategory", category) + + context = list( + map( + lambda x: x["value"], + self.get("acf.schulform", json=response.meta["item"]), + ) + ) + valuespaces.add_value("educationalContext", context) + role = list( + map(lambda x: x["value"], self.get("acf.role", json=response.meta["item"])) + ) + valuespaces.add_value("intendedEndUserRole", role) + return valuespaces diff --git a/etl/converter/spiders/youtube_spider.py b/etl/converter/spiders/youtube_spider.py new file mode 100644 index 00000000..564d19da --- /dev/null +++ b/etl/converter/spiders/youtube_spider.py @@ -0,0 +1,330 @@ +import csv +import json +import logging +import os +import re +from typing import Generator, List +from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse + +from overrides import overrides +from scrapy.http import Request, Response +from scrapy.loader import ItemLoader +from scrapy.spiders import Spider + +import converter.env as env +import converter.items as items +from converter.spiders.lom_base import LomBase + +# TODO: Find suitable target field for channel/playlist information: +# - Title (channel title included as organization in lifecycle-author) +# - Description +# - URL (channel url included as url in lifecycle-author) +# +# TODO: Find out whether `publishedAt` reflects modification +# - Find another way to set `hash` if not +# +# Unhandled csv columns: +# - typicalAgeRangeFrom +# - typicalAgeRangeTo +# TODO: Replace with educationalContext +# +# Example item: +# ``` +# { +# "kind": "youtube#playlistItem", +# "etag": "q9_JO0nhU1k7HI7HuTUMsNOd6KM", +# "id": "UExuX1JYWEUxZk1tUGRHbkVvYW00aGI0VDlJdjhNM2Joei4yODlGNEE0NkRGMEEzMEQy", +# "snippet": { +# "publishedAt": "2015-06-29T20:16:59Z", +# "channelId": "UC_cCcxd8yUwIu1-rt5dpBdw", +# "title": "BIOLOGIE NACHHILFE - Evolution & Entwicklung - die neue Serie | Evolution 1", +# "description": "ALLE THEMEN AUS DIESEM VID[...]", +# "thumbnails": { +# [...] +# }, +# "channelTitle": "Die Merkhilfe", +# "playlistId": "PLn_RXXE1fMmPdGnEoam4hb4T9Iv8M3bhz", +# "position": 0, +# "resourceId": { +# "kind": "youtube#video", +# "videoId": "BF4st6XBViI" +# } +# }, +# "contentDetails": { +# "videoId": "BF4st6XBViI", +# "videoPublishedAt": "2015-07-05T13:00:01Z" +# }, +# "status": { +# "privacyStatus": "public" +# } +# } +# ``` + + +class YoutubeSpider(Spider): + name = "youtube_spider" + friendlyName = "Youtube" + url = "https://www.youtube.com/" + version = "0.1.0" + + @staticmethod + def get_video_url(item: dict) -> str: + assert item["snippet"]["resourceId"]["kind"] == "youtube#video" + return "https://www.youtube.com/watch?v={}".format( + item["snippet"]["resourceId"]["videoId"] + ) + + @staticmethod + def update_url_query(url: str, params: dict) -> str: + """Take a url and update selected query parameters.""" + url_parts = list(urlparse(url)) + query = dict(parse_qsl(url_parts[4])) + query.update(params) + url_parts[4] = urlencode(query) + return urlunparse(url_parts) + + @staticmethod + def get_csv_rows(filename: str) -> Generator[dict, None, None]: + csv_file_path = os.path.realpath( + os.path.join(os.path.dirname(__file__), "..", "..", "csv", filename) + ) + with open(csv_file_path, newline="", encoding="utf-8") as csv_file: + reader = csv.DictReader(csv_file) + for row in reader: + yield row + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.lomLoader = YoutubeLomLoader(self.name, self.version, **kwargs) + + @overrides # Spider + def start_requests(self): + for row in YoutubeSpider.get_csv_rows("youtube.csv"): + request = self.request_row(row) + if request is not None: + yield request + + def request_row(self, row: dict) -> Request: + if row["url"].startswith("https://www.youtube.com"): + url = urlparse(row["url"]) + if url.path == "/playlist": + playlist_id = dict(parse_qsl(url.query))["list"] + return self.request_playlist(playlist_id, meta={"row": row}) + elif url.path.startswith("/channel/"): + channel_id = url.path.split("/")[2] + return self.request_channel(channel_id, meta={"row": row}) + else: + # Youtube offers custom URLs to popular channels of the form + # - https://www.youtube.com/c/ + # - https://www.youtube.com/ + # - https://www.youtube.com/user/ + # - https://www.youtube.com/ + # + # All of these lead to an ordinary channel, but we need to read its ID from the page + # body. + return Request( + row["url"], meta={"row": row}, callback=self.parse_custom_url, + ) + + def request_channel(self, channel_id: str, meta: dict) -> Request: + part = ["snippet", "contentDetails", "statistics"] + request_url = ( + "https://www.googleapis.com/youtube/v3/channels" + + "?part={}&id={}&key={}".format( + "%2C".join(part), channel_id, env.get("YOUTUBE_API_KEY") + ) + ) + return Request(url=request_url, meta=meta, callback=self.parse_channel) + + def parse_channel(self, response: Response) -> Request: + body = json.loads(response.body) + assert body["kind"] == "youtube#channelListResponse" + assert "items" in body + assert len(body["items"]) == 1 + playlist_id = body["items"][0]["contentDetails"]["relatedPlaylists"]["uploads"] + response.meta["channel"] = body["items"][0] + return self.request_playlist(playlist_id, response.meta) + + def request_playlist(self, playlist_id: str, meta: dict) -> Request: + part = ["snippet"] + request_url = ( + "https://www.googleapis.com/youtube/v3/playlists" + + "?part={}&id={}&key={}".format( + "%2C".join(part), playlist_id, env.get("YOUTUBE_API_KEY"), + ) + ) + return Request(request_url, meta=meta, callback=self.parse_playlist) + + def parse_playlist(self, response: Response): + body = json.loads(response.body) + assert body["kind"] == "youtube#playlistListResponse" + assert body["pageInfo"]["totalResults"] == 1 + response.meta["playlist"] = body["items"][0] + return self.request_playlist_items(body["items"][0]["id"], response.meta) + + def request_playlist_items(self, playlist_id: str, meta: dict) -> Request: + part = ["snippet"] + request_url = ( + "https://www.googleapis.com/youtube/v3/playlistItems" + + "?part={}&playlistId={}&key={}".format( + "%2C".join(part), playlist_id, env.get("YOUTUBE_API_KEY"), + ) + ) + return Request(request_url, meta=meta, callback=self.parse_playlist_items) + + def parse_playlist_items(self, response: Response): + body = json.loads(response.body) + assert body["kind"] == "youtube#playlistItemListResponse" + for item in body["items"]: + response_copy = response.replace(url=self.get_video_url(item)) + response_copy.meta["item"] = item + yield self.lomLoader.parse(response_copy) + if "nextPageToken" in body: + request_url = YoutubeSpider.update_url_query( + response.url, {"pageToken": body["nextPageToken"]} + ) + yield response.follow( + request_url, meta=response.meta, callback=self.parse_playlist_items + ) + + def parse_custom_url(self, response: Response) -> Request: + match = re.search('', response.text) + if match is not None: + channel_id = match.group(1) + return self.request_channel(channel_id, meta=response.meta) + else: + logging.warn("Could not extract channel id for {}".format(response.url)) + + +class YoutubeLomLoader(LomBase): + # The `response.meta` field is populated as follows: + # - `row`: The row of the CSV file containing the channel or playlist to be scraped with some + # additional information regarding all found videos. + # - `item`: Information about the video, obtained from the Youtube API. + # - `channel`: Information about the Youtube channel, obtained from the Youtuber API. Only + # populated if an entire channel was given in the CSV row. + # - `playlist`: Information about the Youtube playlist, obtained from the Youtuber API. These + # information are more relevant than the channel information when a specific playlist was + # given in the CSV row. However, when an entire channel was requested, we fall back to the + # `uploads` playlist, which provides only a generated title. + + @staticmethod + def parse_csv_field(field: str) -> List[str]: + """Parse semicolon-separated string.""" + values = [value.strip() for value in field.split(";") if value.strip()] + if len(values): + return values + + def __init__(self, name, version, **kwargs): + self.name = name + self.version = version + super().__init__(**kwargs) + + @overrides # LomBase + def getId(self, response: Response) -> str: + return YoutubeSpider.get_video_url(response.meta["item"]) + + @overrides # LomBase + def getHash(self, response: Response) -> str: + return self.version + response.meta["item"]["snippet"]["publishedAt"] + + @overrides # LomBase + def mapResponse(self, response) -> items.ResponseItemLoader: + return LomBase.mapResponse(self, response, False) + + @overrides # LomBase + def getBase(self, response: Response) -> items.BaseItemLoader: + base = LomBase.getBase(self, response) + base.add_value("origin", response.meta["row"]["sourceTitle"].strip()) + base.add_value("lastModified", response.meta["item"]["snippet"]["publishedAt"]) + base.add_value("thumbnail", self.getThumbnailUrl(response)) + base.add_value("fulltext", self.getFulltext(response)) + return base + + def getThumbnailUrl(self, response: Response) -> str: + thumbnails = response.meta["item"]["snippet"]["thumbnails"] + thumbnail = ( + thumbnails["maxres"] + if "maxres" in thumbnails + else thumbnails["standard"] + if "standard" in thumbnails + else thumbnails["high"] + ) + return thumbnail["url"] + + def getFulltext(self, response: Response) -> str: + item = response.meta["item"]["snippet"] + # If `channel` is populated, it has more relevant information than `playlist` (see comments + # to `meta` field above). + if "channel" in response.meta: + channel = response.meta["channel"]["snippet"] + fulltext = "\n\n".join([channel["title"], channel["description"], item["title"]],) + else: + playlist = response.meta["playlist"]["snippet"] + fulltext = "\n\n".join( + [playlist["channelTitle"], playlist["title"], playlist["description"],], + ) + return fulltext + + @overrides # LomBase + def getLOMGeneral(self, response: Response) -> items.LomGeneralItemloader: + general = LomBase.getLOMGeneral(self, response) + general.add_value("title", response.meta["item"]["snippet"]["title"]) + general.add_value("description", self.getDescription(response)) + general.add_value( + "keyword", self.parse_csv_field(response.meta["row"]["keyword"]) + ) + general.add_value( + "language", self.parse_csv_field(response.meta["row"]["language"]) + ) + return general + + def getDescription(self, response: Response) -> str: + return ( + response.meta["item"]["snippet"]["description"] + # Fall back to playlist title when no description was given. + or response.meta["playlist"]["snippet"]["title"] + ) + + @overrides # LomBase + def getLOMTechnical(self, response: Response) -> items.LomTechnicalItemLoader: + technical = LomBase.getLOMTechnical(self, response) + technical.add_value("format", "text/html") + technical.add_value( + "location", YoutubeSpider.get_video_url(response.meta["item"]) + ) + return technical + + @overrides # LomBase + def getLOMLifecycle(self, response: Response) -> items.LomLifecycleItemloader: + lifecycle = LomBase.getLOMLifecycle(self, response) + lifecycle.add_value("role", "author") + lifecycle.add_value("organization", response.meta["item"]["snippet"]["channelTitle"]) + lifecycle.add_value("url", self.getChannelUrl(response)) + return lifecycle + + def getChannelUrl(self, response: Response) -> str: + channel_id = response.meta["item"]["snippet"]["channelId"] + return "https://www.youtube.com/channel/{}".format(channel_id) + + @overrides # LomBase + def getLicense(self, response: Response) -> items.LicenseItemLoader: + license = LomBase.getLicense(self, response) + license.add_value( + "internal", self.parse_csv_field(response.meta["row"]["license"]) + ) + return license + + @overrides # LomBase + def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: + valuespaces = LomBase.getValuespaces(self, response) + row = response.meta["row"] + valuespaces.add_value( + "learningResourceType", self.parse_csv_field(row["learningResourceType"]), + ) + valuespaces.add_value("discipline", self.parse_csv_field(row["discipline"])) + valuespaces.add_value( + "intendedEndUserRole", self.parse_csv_field(row["intendedEndUserRole"]), + ) + return valuespaces + diff --git a/etl/converter/spiders/zdf_rss_spider.py b/etl/converter/spiders/zdf_rss_spider.py index ab3bad88..518e04d8 100644 --- a/etl/converter/spiders/zdf_rss_spider.py +++ b/etl/converter/spiders/zdf_rss_spider.py @@ -2,22 +2,22 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.rss_list_base import RSSListBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.rss_list_base import RSSListBase import json import logging from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import Constants; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import Constants # Spider to fetch RSS from planet schule class ZDFRSSSpider(RSSListBase): - name='zdf_rss_spider' - friendlyName='ZDF' - url = 'https://www.zdf.de/' - version = '0.1.0' + name = "zdf_rss_spider" + friendlyName = "ZDF" + url = "https://www.zdf.de/" + version = "0.1.0" - def __init__(self, **kwargs): - RSSListBase.__init__(self, 'csv/zdf_rss.csv', **kwargs) + def __init__(self, **kwargs): + RSSListBase.__init__(self, "csv/zdf_rss.csv", **kwargs) diff --git a/etl/converter/spiders/zoerr_spider.py b/etl/converter/spiders/zoerr_spider.py index ef3ebb35..b5bda42e 100644 --- a/etl/converter/spiders/zoerr_spider.py +++ b/etl/converter/spiders/zoerr_spider.py @@ -2,18 +2,19 @@ from converter.spiders.oai_base import OAIBase from scrapy.spiders import SitemapSpider + class ZoerrSpider(SitemapSpider, LrmiBase): - name = 'zoerr_spider' - friendlyName = 'OER-Repositorium Baden-Württemberg (ZOERR)' - url = 'https://www.oerbw.de' - baseUrl = 'https://www.oerbw.de/edu-sharing/eduservlet/oai/provider' - set = 'default' - metadataPrefix = 'lom' - sitemap_urls = ['https://www.oerbw.de/edu-sharing/eduservlet/sitemap'] + name = "zoerr_spider" + friendlyName = "OER-Repositorium Baden-Württemberg (ZOERR)" + url = "https://www.oerbw.de" + baseUrl = "https://www.oerbw.de/edu-sharing/eduservlet/oai/provider" + set = "default" + metadataPrefix = "lom" + sitemap_urls = ["https://www.oerbw.de/edu-sharing/eduservlet/sitemap"] - def __init__(self, **kwargs): - SitemapSpider.__init__(self) - LrmiBase.__init__(self, **kwargs) + def __init__(self, **kwargs): + SitemapSpider.__init__(self) + LrmiBase.__init__(self, **kwargs) - def parse(self, response): - return LrmiBase.parse(self, response) + def parse(self, response): + return LrmiBase.parse(self, response) diff --git a/etl/converter/spiders/zum_spider.py b/etl/converter/spiders/zum_spider.py index 78d56e0a..c629ccb6 100644 --- a/etl/converter/spiders/zum_spider.py +++ b/etl/converter/spiders/zum_spider.py @@ -2,107 +2,130 @@ from converter.items import * import time from w3lib.html import remove_tags, replace_escape_chars -from converter.spiders.lom_base import LomBase; -from converter.spiders.json_base import JSONBase; +from converter.spiders.lom_base import LomBase +from converter.spiders.json_base import JSONBase import json import logging import requests from html.parser import HTMLParser -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline import re -from converter.valuespace_helper import ValuespaceHelper; -from converter.constants import *; +from converter.valuespace_helper import ValuespaceHelper +from converter.constants import * # Spider to fetch RSS from planet schule class ZUMSpider(scrapy.Spider, LomBase, JSONBase): - name='zum_spider' - friendlyName='ZUM-Unterrichten' - url = 'https://unterrichten.zum.de/' - version = '0.1.0' - apiUrl = 'https://unterrichten.zum.de/api.php?action=query&format=json&list=allpages&apcontinue=%continue&aplimit=100' - apiEntryUrl = 'https://unterrichten.zum.de/api.php?action=parse&format=json&pageid=%pageid' - entryUrl = 'https://unterrichten.zum.de/wiki/%page' - keywords = {} - - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + name = "zum_spider" + friendlyName = "ZUM-Unterrichten" + url = "https://unterrichten.zum.de/" + version = "0.1.0" + apiUrl = "https://unterrichten.zum.de/api.php?action=query&format=json&list=allpages&apcontinue=%continue&aplimit=100" + apiEntryUrl = ( + "https://unterrichten.zum.de/api.php?action=parse&format=json&pageid=%pageid" + ) + entryUrl = "https://unterrichten.zum.de/wiki/%page" + keywords = {} - def mapResponse(self, response): - r = LomBase.mapResponse(self, response, fetchData = False) - r.replace_value('url', response.meta['item'].get('link')) - return r + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) - def getId(self, response): - return self.get('parse.pageid', json = response.meta['item']) + def mapResponse(self, response): + r = LomBase.mapResponse(self, response, fetchData=False) + r.replace_value("url", response.meta["item"].get("link")) + return r - def getHash(self, response): - return str(self.get('parse.revid', json = response.meta['item'])) + self.version + def getId(self, response): + return self.get("parse.pageid", json=response.meta["item"]) - def startRequest(self, continueToken = ''): - return scrapy.Request(url = self.apiUrl.replace('%continue', continueToken), callback = self.parseRequest, headers = { - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }, meta = { - 'continueToken': continueToken - }) + def getHash(self, response): + return str(self.get("parse.revid", json=response.meta["item"])) + self.version - def start_requests(self): - keywords = json.loads(requests.get('https://wirlernenonline.de/wp-json/wp/v2/tags/?per_page=100').content.decode('UTF-8')) - for keyword in keywords: - self.keywords[keyword['id']] = keyword['name'] + def startRequest(self, continueToken=""): + return scrapy.Request( + url=self.apiUrl.replace("%continue", continueToken), + callback=self.parseRequest, + headers={"Accept": "application/json", "Content-Type": "application/json"}, + meta={"continueToken": continueToken}, + ) - yield self.startRequest('') + def start_requests(self): + keywords = json.loads( + requests.get( + "https://wirlernenonline.de/wp-json/wp/v2/tags/?per_page=100" + ).content.decode("UTF-8") + ) + for keyword in keywords: + self.keywords[keyword["id"]] = keyword["name"] - def parseRequest(self, response): - results = json.loads(response.body_as_unicode()) - if results: - for item in results['query']['allpages']: - yield scrapy.Request(url = self.apiEntryUrl.replace('%pageid', str(item['pageid'])), callback = self.handleEntry) - if 'continue' in results: - yield self.startRequest(results['continue']['apcontinue']) - + yield self.startRequest("") - def handleEntry(self, response): - response.meta['item'] = json.loads(response.body_as_unicode()) - return LomBase.parse(self, response) + def parseRequest(self, response): + results = json.loads(response.body_as_unicode()) + if results: + for item in results["query"]["allpages"]: + yield scrapy.Request( + url=self.apiEntryUrl.replace("%pageid", str(item["pageid"])), + callback=self.handleEntry, + ) + if "continue" in results: + yield self.startRequest(results["continue"]["apcontinue"]) - def getBase(self, response): - base = LomBase.getBase(self, response) - fulltext = self.get('parse.text.*', json = response.meta['item']) - base.replace_value('fulltext', self.html2Text(fulltext)) # crashes! - return base + def handleEntry(self, response): + response.meta["item"] = json.loads(response.body_as_unicode()) + return LomBase.parse(self, response) - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.replace_value('title', self.get('parse.title', json = response.meta['item'])) - keywords = self.get('parse.links', json = response.meta['item']) - if keywords: - keywords = list(map(lambda x: x['*'], keywords)) - general.add_value('keyword', keywords) - props = self.get('parse.properties') - if props: - description = list(map(lambda x: x['*'], filter(lambda x: x['name'] == 'description', props))) - general.add_value('description', description) - return general - + def getBase(self, response): + base = LomBase.getBase(self, response) + fulltext = self.get("parse.text.*", json=response.meta["item"]) + base.replace_value("fulltext", self.html2Text(fulltext)) # crashes! + return base - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.replace_value('format', 'text/html') - technical.replace_value('location', self.entryUrl.replace('%page',self.get('parse.title', json = response.meta['item']))) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value('url', Constants.LICENSE_CC_BY_SA_40) - return license - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - categories = list(map(lambda x: x['*'], self.get('parse.categories', json = response.meta['item']))) - if categories: - valuespaces.add_value('discipline', categories) - valuespaces.add_value('educationalContext', categories) - valuespaces.add_value('intendedEndUserRole', categories) - return valuespaces + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.replace_value( + "title", self.get("parse.title", json=response.meta["item"]) + ) + keywords = self.get("parse.links", json=response.meta["item"]) + if keywords: + keywords = list(map(lambda x: x["*"], keywords)) + general.add_value("keyword", keywords) + props = self.get("parse.properties") + if props: + description = list( + map( + lambda x: x["*"], + filter(lambda x: x["name"] == "description", props), + ) + ) + general.add_value("description", description) + return general + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.replace_value("format", "text/html") + technical.replace_value( + "location", + self.entryUrl.replace( + "%page", self.get("parse.title", json=response.meta["item"]) + ), + ) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", Constants.LICENSE_CC_BY_SA_40) + return license + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + categories = list( + map( + lambda x: x["*"], + self.get("parse.categories", json=response.meta["item"]), + ) + ) + if categories: + valuespaces.add_value("discipline", categories) + valuespaces.add_value("educationalContext", categories) + valuespaces.add_value("intendedEndUserRole", categories) + return valuespaces diff --git a/etl/converter/valuespace_helper.py b/etl/converter/valuespace_helper.py index 15232c56..cac9f9f5 100644 --- a/etl/converter/valuespace_helper.py +++ b/etl/converter/valuespace_helper.py @@ -1,46 +1,48 @@ -from converter.pipelines import ProcessValuespacePipeline; +from converter.pipelines import ProcessValuespacePipeline from valuespace_converter.app.valuespaces import Valuespaces + class ValuespaceHelper: @staticmethod def mimetypeToLearningResourceType(mimetype): - if mimetype.startswith('video/'): - return 'video' - if mimetype.startswith('image/'): - return 'image' - if mimetype.startswith('audio/'): - return 'audio' + if mimetype.startswith("video/"): + return "video" + if mimetype.startswith("image/"): + return "image" + if mimetype.startswith("audio/"): + return "audio" return None # range must be an array [from, to] @staticmethod def educationalContextByGrade(range): context = [] - if len(range)<2: + if len(range) < 2: range = list(range) range.append(range[0]) - if int(range[0])<=4: - context.append('Grundschule') - if int(range[1])>=4 and int(range[0])<=10: - context.append('Sekundarstufe 1') - if int(range[0])>=11 or int(range[1])>=11: - context.append('Sekundarstufe 2') + if int(range[0]) <= 4: + context.append("Grundschule") + if int(range[1]) >= 4 and int(range[0]) <= 10: + context.append("Sekundarstufe 1") + if int(range[0]) >= 11 or int(range[1]) >= 11: + context.append("Sekundarstufe 2") if len(context): return context return None + # range must be an array [from, to] @staticmethod def educationalContextByAgeRange(range): context = [] - if len(range)<2: + if len(range) < 2: range = list(range) range.append(range[0]) - if int(range[0])<=10: - context.append('Grundschule') - if int(range[1])>=10 and int(range[0])<=16: - context.append('Sekundarstufe I') - if int(range[0])>=16 or int(range[1])>=18: - context.append('Sekundarstufe II') + if int(range[0]) <= 10: + context.append("Grundschule") + if int(range[1]) >= 10 and int(range[0]) <= 16: + context.append("Sekundarstufe I") + if int(range[0]) >= 16 or int(range[1]) >= 18: + context.append("Sekundarstufe II") if len(context): return context - return None \ No newline at end of file + return None diff --git a/etl/crawl_schulcloud.sh b/etl/crawl_schulcloud.sh index 42ac3f94..a216849b 100644 --- a/etl/crawl_schulcloud.sh +++ b/etl/crawl_schulcloud.sh @@ -59,7 +59,9 @@ do echo "Executing $spider spider." # Execute the spider and save its output to two files: "nohup_SPIDER.out" (individual log) and "nohup.out" (collective logs). - nohup scrapy crawl ${spider}_spider | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null & 2>&1 + #nohup scrapy crawl ${spider}_spider -a resetVersion=true | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null 2>&1 & + #nohup scrapy crawl ${spider}_spider -a cleanrun=true | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null 2>&1 & + nohup scrapy crawl ${spider}_spider | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null & 2>&1 # Execute the spider in the background. #scrapy crawl ${spider}_spider & diff --git a/etl/csv/youtube.csv b/etl/csv/youtube.csv new file mode 100644 index 00000000..6bf20870 --- /dev/null +++ b/etl/csv/youtube.csv @@ -0,0 +1,115 @@ +url,sourceTitle,learningResourceType,discipline,intendedEndUserRole,typicalAgeRangeFrom,typicalAgeRangeTo,language,keyword,license +https://www.youtube.com/c/musstewissenDeutsch/featured,musstewissen Deutsch,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/channel/UCZF4IKRZQJUCwl-0O64Y_KQ/featured,Nele Hirsch,video,720,learner; teacher,10,18,de,, +https://www.youtube.com/c/100SekundenPhysik/featured,100 Sekunden Physik,video,460,learner; teacher,10,18,de,, +https://www.youtube.com/c/BreakingLab/featured,Breaking Lab,video,4003,learner; teacher,10,18,de,, +https://www.youtube.com/c/musstewissenChemie/featured,musstewissen Chemie,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/c/musstewissenMathe/featured,musstewissen Mathematik,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/c/musstewissenPhysik/featured,musstewissen Physik,video,460,learner; teacher,10,18,de,, +https://www.youtube.com/c/MrWissen2goGeschichte/featured,MrWissen2go Geschichte,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/c/MathebyDanielJung/featured,Mathe by Daniel Jung,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/c/brainfaqk/featured,brainfaqk,video,04003; 100; 320,learner; teacher,10,18,de,, +https://www.youtube.com/c/maiLab/featured,mailab,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/c/DorFuchs/featured,DorFuchs,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/user/weblehrer/featured,Geographie Plus,video,220,learner; teacher,10,18,de,, +https://www.youtube.com/DerersteWeltkrieg/featured,Der Erste Weltkrieg,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/c/RLScience/featured,RLScience,video,46014; 100; 460; 04003,learner; teacher,10,18,de,, +https://www.youtube.com/c/mwstubes/featured,Sommers Weltliteratur to go,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/user/schoolseasy/featured,schoolseasy ,video,720; 480; 240; 120; 380; 20001; ,learner; teacher,10,18,de,, +https://www.youtube.com/c/FlippedMathe/featured,Sebastian Schmidt,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/c/EnglishClass101/featured,"Learn English with EnglishClass101.com ",video,20001,learner; teacher,10,18,en,, +https://www.youtube.com/user/dwlearngerman/featured,Deutsch lernen mit der DW,video,28002,learner; teacher,10,18,de,, +https://www.youtube.com/channel/UCs7cxycjh7vKIBluSka0I8A/featured,Kunst mit Teachanies,video,60,learner; teacher,10,18,de,, +https://www.youtube.com/channel/UCFt8Sh8cbg-K5hZs3hhKbmA/featured,Der Kunsterklärer,video,60,learner; teacher,10,18,de,, +https://www.youtube.com/c/explainity-erklaert/featured,Explainity,video,720; 480; 700; 260,learner; teacher,10,18,de,, +https://www.youtube.com/user/DoktorKlawonn/featured,Dr.Klawonn - Chemie und Physik-Experimente,video,100; 460,learner; teacher,10,18,de,, +https://www.youtube.com/user/periodicvideos/featured,Periodic Videos zu einzelnen Elementen,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/c/chemistryexperiments/featured,Sven Sommer - Chemie und Physik-Experimente,video,100; 460,learner; teacher,10,18,de,, +https://www.youtube.com/c/EinfachmatheTv/featured,einfachMathe - Erklärvideos bis zum Abi,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/user/ChemieExperimente/featured,Chemie Experimente,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/c/MrWissen2go/featured,MrWissen2go,video,480,learner; teacher,10,18,de,, +https://www.youtube.com/c/scobel/featured,Scobel,video,450; 160,learner; teacher,10,18,de,, +https://www.youtube.com/c/EINFACHGESCHICHTE/featured,Einfach Geschichte,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/c/TheGreatWarSeries/featured,The Great War,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/channel/UCNOsl2b57wNgN7l13TOzNJQ/featured,Jule Sommersberg,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/user/seguGeschichte/feed,seguGeschichte,video,240,learner; teacher,10,18,de,, +,,,,,,,,, +,,,,,,,,, +,,,,,,,,, +Playlists,,,,,,,,, +https://www.youtube.com/playlist?list=PLC9D2mzTyJeXYa6E1y_d0fc_7-V7BJnSq,DigiFernunterricht,video,720,teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLFhPjADeGDodbVSSL8LE00SNjQIPiyamr,Webinare Deutsches Lehrkräfteforum,video,720,teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMbx6lR3Iqr4lx2LZhdOGvh,Die Merkhilfe,video,720,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMkEP2oJInr6cGn2RR-e_Ab,Die Merkhilfe,video,240; 480; 48005; 120; 220; 700,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNUPitIsXFXH25BVaT30Mm8,Die Merkhilfe,video,720; 120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOhFouDVMB3A2qKuTlJyQ0X,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPpnBgRyNkkjEyjtr6tSSrM,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMfiJhYBmXijbEQA56W1flc,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmODHAgYWLZ9h1q2-HgUrpvc,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPdGnEoam4hb4T9Iv8M3bhz,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNwPtN6bnWzQnasKm1N0TrK,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOaD8ga75DCcNlVPtPokO9S,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmN1le4jGlEfZzevKBWQ9ANx,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPiXM-O6399a30fNAOn80kW,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPsXomKrdYGyqbwJ48LeGg5,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNlUnZAYhiJMZlA5ptWH61g,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNrrmwyqJFbd_ivlG587dqL,Die Merkhilfe,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPdU0sc15r_Q3YE4LgUTC85,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOhFouDVMB3A2qKuTlJyQ0X,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOBXFsYk6mOZeGHlHTkmCpW,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMMlabZV8x9OuvkqUXN6gnU,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmP2k3YkNSrDjVvyzrfd9-B3,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMsW8Aef4OOFcw9c1A_Yhn5,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmP2k3YkNSrDjVvyzrfd9-B3,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMsW8Aef4OOFcw9c1A_Yhn5,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNeDDS9gA0WuVE28yPq9pyQ,Die Merkhilfe,video,120,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMlN0wwv9nq3aa0tJQbBqJ2,Die Merkhilfe,video,20001,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmN-rEv91RgjDyUqNMkVIghJ,Die Merkhilfe,video,20001,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPbLCtmi9b9CgzRxNIeIVLl,Die Merkhilfe,video,20001,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPWhScoDC9sDxBi490_2q5r,Die Merkhilfe,video,20001,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMQdCfccGVScOgbmfNFG_qH,Die Merkhilfe,video,20001,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPQ7Piohz-NXr6575bQi2Ak,Die Merkhilfe,video,220,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOa6LsnuMSy5v0stFXxkgOi,Die Merkhilfe,video,220,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPd89RZoFxJ96g5Sr10iw24,Die Merkhilfe,video,220,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNiHGOYn4_0yI_4Zi_dU8Ev,Die Merkhilfe,video,220,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNeNOlWe1jCRPaGWo_Ic9cS,Die Merkhilfe,video,220,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPN_8qL9Oeh1DaFjbt3JLyF,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNIdDK4zxYPbGfMfYoCqdaE,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNthaWTssmt5Tp3zToUnebB,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmNyR_rYuUOfhzvXU2TtXXDc,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPF4PylCfVHknAUojWYmt5_,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMSOO2tscOG0FpX1zX_K0NG,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMDYqZ5kddBAYOLnI3IzoJI,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPTubs4fXxxC96KQmmKf5QZ,Die Merkhilfe,video,380,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmO-YZ4qh1l5SM1rmvgraKl3,Die Merkhilfe,video,480,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPdnyxR44XMMNC0KjMk4Iaq,Die Merkhilfe,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPnW2w9KF8bC-UGS5z8V-6v,Die Merkhilfe,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOOL8nfq1jdfoJXaF2aruZr,Die Merkhilfe,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOMLoCTsLpQ3_wd2tn1FYbM,Die Merkhilfe,video,450,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMuAT9M_v2iLVjfkVgEAziT,Die Merkhilfe,video,450,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMuAT9M_v2iLVjfkVgEAziT,Die Merkhilfe,video,450,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMPHQq4AxNvviVO10plZo2I,Die Merkhilfe,video,700,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOBXFsYk6mOZeGHlHTkmCpW,Die Merkhilfe,video,120; 240,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOxoH_P4aRa1xJgD8ygUCOX,Die Merkhilfe,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMs4JxM3JyDW-cEXoT-7eZY,Die Merkhilfe,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOTlJ0lGCNqB-hKccE87Sry,Die Merkhilfe,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmOOXcWFdWUH4gI52GkCCwy6,Die Merkhilfe,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPazIdX2v0m3IpAnJSmKolF,Die Merkhilfe,video,720,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmPMnnqoneU789J_fWi88yBx,Die Merkhilfe,video,480,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLjwHRoOuF25RCWRkpzjdG1c3iiwqE-n9U,Die Merkhilfe,video,700,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLjwHRoOuF25R7lVaqkqfQbjB7W_y-m4hE,Die Merkhilfe,video,700,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLn_RXXE1fMmMIpf0qMuEQLRyDh1MN2oQY,Die Merkhilfe,video,320,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PL-jLXAXdkGqfYPa3HJkMLUgXpNUcZmJgY,JGS Feuchtwangen,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PL7siVIUmPpIGxf89EBtjHKGMINqT10y0N,Terra X,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSpR1vDw0koJIewlvm1NWdso,Terra X,video,80,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSr-jyDJzSnCt4R4Sdm1HdR8,Terra X,video,100,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PL7siVIUmPpIGO3M8L3PObihKk7ir3LncS,Terra X,video,46014; 460,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSoIZDkJqkkZs22qFiW66uRJ,Terra X,video,480; 700,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSr1txkykStZgbftboawfjHh,Terra X,video,60,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSp52esI-I_3itPxYbdT8snL,Terra X,video,420,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSooMrdrbsKoj9TE6Fhdp-G-,Terra X,video,520,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSprulcamySfyZ1MD9ff7ZGq,Terra X,video,450; 160,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSoI1pZpYqxsD30n0BQRGamc,Terra X,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSopGvArs7MEgZt5ywCZom_7,Terra X,video,240,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSpNW9WQo97k0T4PevEq5xGb,Terra X,video,260,learner; teacher,10,18,de,, +https://www.youtube.com/playlist?list=PLHrmwJ40hoSpO6aL3owpzdhUzf1TTfPz9,Terra X,video,720; 080,learner; teacher,10,18,de,, \ No newline at end of file diff --git a/etl/edu_sharing_client/api/bulk_v1_api.py b/etl/edu_sharing_client/api/bulk_v1_api.py index 0e04113a..935259f0 100644 --- a/etl/edu_sharing_client/api/bulk_v1_api.py +++ b/etl/edu_sharing_client/api/bulk_v1_api.py @@ -145,6 +145,7 @@ def sync(self, body, match, type, group, **kwargs): # noqa: E501 :param list[str] match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) :param str type: type of node. If the node already exists, this will not change the type afterwards (required) :param str group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) + :param list[str] group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) :param list[str] aspects: aspects of node :param bool reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost :return: NodeEntry @@ -172,6 +173,7 @@ def sync_with_http_info(self, body, match, type, group, **kwargs): # noqa: E501 :param list[str] match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) :param str type: type of node. If the node already exists, this will not change the type afterwards (required) :param str group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) + :param list[str] group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) :param list[str] aspects: aspects of node :param bool reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost :return: NodeEntry @@ -179,7 +181,7 @@ def sync_with_http_info(self, body, match, type, group, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body', 'match', 'type', 'group', 'aspects', 'reset_version'] # noqa: E501 + all_params = ['body', 'match', 'type', 'group', 'group_by', 'aspects', 'reset_version'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -221,6 +223,9 @@ def sync_with_http_info(self, body, match, type, group, **kwargs): # noqa: E501 if 'match' in params: query_params.append(('match', params['match'])) # noqa: E501 collection_formats['match'] = 'multi' # noqa: E501 + if 'group_by' in params: + query_params.append(('groupBy', params['group_by'])) # noqa: E501 + collection_formats['groupBy'] = 'multi' # noqa: E501 if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'aspects' in params: diff --git a/etl/edu_sharing_client/models/job_detail.py b/etl/edu_sharing_client/models/job_detail.py index 6a1475bf..303ee96d 100644 --- a/etl/edu_sharing_client/models/job_detail.py +++ b/etl/edu_sharing_client/models/job_detail.py @@ -36,9 +36,9 @@ class JobDetail(object): 'key': 'Key', 'volatile': 'bool', 'full_name': 'str', - 'job_listener_names': 'list[str]', 'stateful': 'bool', - 'durable': 'bool' + 'durable': 'bool', + 'job_listener_names': 'list[str]' } attribute_map = { @@ -49,12 +49,12 @@ class JobDetail(object): 'key': 'key', 'volatile': 'volatile', 'full_name': 'fullName', - 'job_listener_names': 'jobListenerNames', 'stateful': 'stateful', - 'durable': 'durable' + 'durable': 'durable', + 'job_listener_names': 'jobListenerNames' } - def __init__(self, name=None, group=None, description=None, job_data_map=None, key=None, volatile=False, full_name=None, job_listener_names=None, stateful=False, durable=False): # noqa: E501 + def __init__(self, name=None, group=None, description=None, job_data_map=None, key=None, volatile=False, full_name=None, stateful=False, durable=False, job_listener_names=None): # noqa: E501 """JobDetail - a model defined in Swagger""" # noqa: E501 self._name = None self._group = None @@ -63,9 +63,9 @@ def __init__(self, name=None, group=None, description=None, job_data_map=None, k self._key = None self._volatile = None self._full_name = None - self._job_listener_names = None self._stateful = None self._durable = None + self._job_listener_names = None self.discriminator = None if name is not None: self.name = name @@ -81,12 +81,12 @@ def __init__(self, name=None, group=None, description=None, job_data_map=None, k self.volatile = volatile if full_name is not None: self.full_name = full_name - if job_listener_names is not None: - self.job_listener_names = job_listener_names if stateful is not None: self.stateful = stateful if durable is not None: self.durable = durable + if job_listener_names is not None: + self.job_listener_names = job_listener_names @property def name(self): @@ -235,27 +235,6 @@ def full_name(self, full_name): self._full_name = full_name - @property - def job_listener_names(self): - """Gets the job_listener_names of this JobDetail. # noqa: E501 - - - :return: The job_listener_names of this JobDetail. # noqa: E501 - :rtype: list[str] - """ - return self._job_listener_names - - @job_listener_names.setter - def job_listener_names(self, job_listener_names): - """Sets the job_listener_names of this JobDetail. - - - :param job_listener_names: The job_listener_names of this JobDetail. # noqa: E501 - :type: list[str] - """ - - self._job_listener_names = job_listener_names - @property def stateful(self): """Gets the stateful of this JobDetail. # noqa: E501 @@ -298,6 +277,27 @@ def durable(self, durable): self._durable = durable + @property + def job_listener_names(self): + """Gets the job_listener_names of this JobDetail. # noqa: E501 + + + :return: The job_listener_names of this JobDetail. # noqa: E501 + :rtype: list[str] + """ + return self._job_listener_names + + @job_listener_names.setter + def job_listener_names(self, job_listener_names): + """Sets the job_listener_names of this JobDetail. + + + :param job_listener_names: The job_listener_names of this JobDetail. # noqa: E501 + :type: list[str] + """ + + self._job_listener_names = job_listener_names + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/etl/edu_sharing_client/models/service.py b/etl/edu_sharing_client/models/service.py index d3290586..83d2bc67 100644 --- a/etl/edu_sharing_client/models/service.py +++ b/etl/edu_sharing_client/models/service.py @@ -30,78 +30,21 @@ class Service(object): """ swagger_types = { 'name': 'str', - 'url': 'str', - 'icon': 'str', - 'logo': 'str', - 'in_language': 'str', - 'type': 'str', - 'description': 'str', - 'audience': 'list[Audience]', - 'provider': 'Provider', - 'start_date': 'str', - 'interfaces': 'list[Interface]', - 'about': 'list[str]', - 'is_accessible_for_free': 'bool' + 'instances': 'list[ServiceInstance]' } attribute_map = { 'name': 'name', - 'url': 'url', - 'icon': 'icon', - 'logo': 'logo', - 'in_language': 'inLanguage', - 'type': 'type', - 'description': 'description', - 'audience': 'audience', - 'provider': 'provider', - 'start_date': 'startDate', - 'interfaces': 'interfaces', - 'about': 'about', - 'is_accessible_for_free': 'isAccessibleForFree' + 'instances': 'instances' } - def __init__(self, name=None, url=None, icon=None, logo=None, in_language=None, type=None, description=None, audience=None, provider=None, start_date=None, interfaces=None, about=None, is_accessible_for_free=False): # noqa: E501 + def __init__(self, name=None, instances=None): # noqa: E501 """Service - a model defined in Swagger""" # noqa: E501 self._name = None - self._url = None - self._icon = None - self._logo = None - self._in_language = None - self._type = None - self._description = None - self._audience = None - self._provider = None - self._start_date = None - self._interfaces = None - self._about = None - self._is_accessible_for_free = None + self._instances = None self.discriminator = None - if name is not None: - self.name = name - if url is not None: - self.url = url - if icon is not None: - self.icon = icon - if logo is not None: - self.logo = logo - if in_language is not None: - self.in_language = in_language - if type is not None: - self.type = type - if description is not None: - self.description = description - if audience is not None: - self.audience = audience - if provider is not None: - self.provider = provider - if start_date is not None: - self.start_date = start_date - if interfaces is not None: - self.interfaces = interfaces - if about is not None: - self.about = about - if is_accessible_for_free is not None: - self.is_accessible_for_free = is_accessible_for_free + self.name = name + self.instances = instances @property def name(self): @@ -121,260 +64,33 @@ def name(self, name): :param name: The name of this Service. # noqa: E501 :type: str """ + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 self._name = name @property - def url(self): - """Gets the url of this Service. # noqa: E501 + def instances(self): + """Gets the instances of this Service. # noqa: E501 - :return: The url of this Service. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Service. - - - :param url: The url of this Service. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def icon(self): - """Gets the icon of this Service. # noqa: E501 - - - :return: The icon of this Service. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this Service. - - - :param icon: The icon of this Service. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def logo(self): - """Gets the logo of this Service. # noqa: E501 - - - :return: The logo of this Service. # noqa: E501 - :rtype: str - """ - return self._logo - - @logo.setter - def logo(self, logo): - """Sets the logo of this Service. - - - :param logo: The logo of this Service. # noqa: E501 - :type: str - """ - - self._logo = logo - - @property - def in_language(self): - """Gets the in_language of this Service. # noqa: E501 - - - :return: The in_language of this Service. # noqa: E501 - :rtype: str - """ - return self._in_language - - @in_language.setter - def in_language(self, in_language): - """Sets the in_language of this Service. - - - :param in_language: The in_language of this Service. # noqa: E501 - :type: str - """ - - self._in_language = in_language - - @property - def type(self): - """Gets the type of this Service. # noqa: E501 - - - :return: The type of this Service. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Service. - - - :param type: The type of this Service. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def description(self): - """Gets the description of this Service. # noqa: E501 - - - :return: The description of this Service. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this Service. - - - :param description: The description of this Service. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def audience(self): - """Gets the audience of this Service. # noqa: E501 - - - :return: The audience of this Service. # noqa: E501 - :rtype: list[Audience] - """ - return self._audience - - @audience.setter - def audience(self, audience): - """Sets the audience of this Service. - - - :param audience: The audience of this Service. # noqa: E501 - :type: list[Audience] - """ - - self._audience = audience - - @property - def provider(self): - """Gets the provider of this Service. # noqa: E501 - - - :return: The provider of this Service. # noqa: E501 - :rtype: Provider - """ - return self._provider - - @provider.setter - def provider(self, provider): - """Sets the provider of this Service. - - - :param provider: The provider of this Service. # noqa: E501 - :type: Provider - """ - - self._provider = provider - - @property - def start_date(self): - """Gets the start_date of this Service. # noqa: E501 - - - :return: The start_date of this Service. # noqa: E501 - :rtype: str - """ - return self._start_date - - @start_date.setter - def start_date(self, start_date): - """Sets the start_date of this Service. - - - :param start_date: The start_date of this Service. # noqa: E501 - :type: str - """ - - self._start_date = start_date - - @property - def interfaces(self): - """Gets the interfaces of this Service. # noqa: E501 - - - :return: The interfaces of this Service. # noqa: E501 - :rtype: list[Interface] - """ - return self._interfaces - - @interfaces.setter - def interfaces(self, interfaces): - """Sets the interfaces of this Service. - - - :param interfaces: The interfaces of this Service. # noqa: E501 - :type: list[Interface] - """ - - self._interfaces = interfaces - - @property - def about(self): - """Gets the about of this Service. # noqa: E501 - - - :return: The about of this Service. # noqa: E501 - :rtype: list[str] - """ - return self._about - - @about.setter - def about(self, about): - """Sets the about of this Service. - - - :param about: The about of this Service. # noqa: E501 - :type: list[str] - """ - - self._about = about - - @property - def is_accessible_for_free(self): - """Gets the is_accessible_for_free of this Service. # noqa: E501 - - - :return: The is_accessible_for_free of this Service. # noqa: E501 - :rtype: bool + :return: The instances of this Service. # noqa: E501 + :rtype: list[ServiceInstance] """ - return self._is_accessible_for_free + return self._instances - @is_accessible_for_free.setter - def is_accessible_for_free(self, is_accessible_for_free): - """Sets the is_accessible_for_free of this Service. + @instances.setter + def instances(self, instances): + """Sets the instances of this Service. - :param is_accessible_for_free: The is_accessible_for_free of this Service. # noqa: E501 - :type: bool + :param instances: The instances of this Service. # noqa: E501 + :type: list[ServiceInstance] """ + if instances is None: + raise ValueError("Invalid value for `instances`, must not be `None`") # noqa: E501 - self._is_accessible_for_free = is_accessible_for_free + self._instances = instances def to_dict(self): """Returns the model properties as a dict""" diff --git a/etl/requirements.txt b/etl/requirements.txt index 7d61b016..2670dc0a 100644 --- a/etl/requirements.txt +++ b/etl/requirements.txt @@ -7,4 +7,5 @@ python-dotenv==0.13.0 Scrapy==1.8.0 requests==2.23.0 vobject==0.9.6.1 -xmltodict \ No newline at end of file +xmltodict +overrides==3.1.0 \ No newline at end of file diff --git a/etl/valuespace_converter/app/valuespaces.py b/etl/valuespace_converter/app/valuespaces.py index 3d020a08..872fb3b8 100644 --- a/etl/valuespace_converter/app/valuespaces.py +++ b/etl/valuespace_converter/app/valuespaces.py @@ -2,7 +2,7 @@ import json class Valuespaces: - ids = ['intendedEndUserRole', 'discipline', 'educationalContext', 'learningResourceType', 'sourceContentType'] + ids = ['intendedEndUserRole', 'discipline', 'educationalContext', 'learningResourceType', 'sourceContentType', 'toolCategory'] data = {} def __init__(self): for v in self.ids: From 8701a5562eaf8e2a974e063c81ddeb3e7203284c Mon Sep 17 00:00:00 2001 From: virgilchiriac <17074330+virgilchiriac@users.noreply.github.com> Date: Fri, 28 Aug 2020 16:34:53 +0200 Subject: [PATCH 009/590] CON-115 - merlin kreis (#5) * Re-enabling Merlin Kreis permissions. * Logging messages for thumbnails are more clear. * CON-115 - Skipping private content for now. We keep the logic for private content, but for now we just skip such elements. Co-authored-by: JohnKoumarelas --- etl/converter/pipelines.py | 3 +- etl/converter/spiders/merlin_spider.py | 83 +++++++++++++++++--------- 2 files changed, 58 insertions(+), 28 deletions(-) diff --git a/etl/converter/pipelines.py b/etl/converter/pipelines.py index b318c3fd..e7a47b39 100644 --- a/etl/converter/pipelines.py +++ b/etl/converter/pipelines.py @@ -275,12 +275,13 @@ def process_item(self, item, spider): + url + ": " + str(e) - + " (falling back to screenshot)" ) if "thumbnail" in item: + logging.warn("(falling back to " + ("defaultThumbnail" if "defaultThumbnail" in item else "screenshot") + ")") del item["thumbnail"] return self.process_item(item, spider) elif 'defaultThumbnail' in item: + logging.warn("(falling back to screenshot)") del item['defaultThumbnail'] return self.process_item(item, spider) else: diff --git a/etl/converter/spiders/merlin_spider.py b/etl/converter/spiders/merlin_spider.py index b85b521f..4cfc649a 100644 --- a/etl/converter/spiders/merlin_spider.py +++ b/etl/converter/spiders/merlin_spider.py @@ -18,7 +18,7 @@ class MerlinSpider(CrawlSpider, LomBase): name = "merlin_spider" url = "https://merlin.nibis.de/index.php" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "Merlin" # name as shown in the search ui - version = "0.1" # the version of your crawler, used to identify if a reimport is necessary + version = "0.2" # the version of your crawler, used to identify if a reimport is necessary apiUrl = "https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*" # * regular expression, to represent all possible values. limit = 100 @@ -59,13 +59,29 @@ def parse(self, response: scrapy.http.Response): element_xml_str = etree.tostring( element, pretty_print=True, encoding="unicode" ) - element_dict = xmltodict.parse(element_xml_str) try: + element_dict = xmltodict.parse(element_xml_str) + element_dict = element_dict["data"] + + # Preparing the values here helps for all following logic across the methods. + prepare_element(self, element_dict) + + # If there is no available Kreis code, then we do not want to deal with this element. + if not("kreis_id" in element_dict + and element_dict["kreis_id"] is not None + and len(element_dict["kreis_id"]) > 0): + continue + + # If the content is private, skip it for now! + # TODO: remove this when the private is more clear!!! + if not(len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100"): + continue + # TODO: It's probably a pointless attribute. # del element_dict["data"]["score"] # Passing the dictionary for easier access to attributes. - copyResponse.meta["item"] = element_dict["data"] + copyResponse.meta["item"] = element_dict # In case JSON string representation is preferred: # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) @@ -130,14 +146,20 @@ def handleEntry(self, response): def getBase(self, response): base = LomBase.getBase(self, response) - base.add_value("thumbnail", response.xpath("/data/thumbnail/text()").get()) - if response.xpath("/data/srcLogoUrl/text()").get(): - base.add_value("defaultThumbnail", "https://merlin.nibis.de" + response.xpath("/data/srcLogoUrl/text()").get()) - elif response.xpath("/data/logo/text()").get(): - base.add_value("defaultThumbnail", "https://merlin.nibis.de" + response.xpath("/data/logo/text()").get()) - else: # backup thumbnail hard-coded. - base.add_value('defaultThumbnail', 'https://merlin.nibis.de/logos/bs_logos/merlin.png') + # Element response as a Python dict. + element_dict = dict(response.meta["item"]) + + base.add_value("thumbnail", element_dict.get("thumbnail", "")) # get or default + + # As a backup, if no other thumbnail URL is available. + element_dict["hardcodedDefaultLogoUrl"] = "/logos/bs_logos/merlin.png" + + # By the order of preference. As soon as one of these default thumbnails is available you keep that. + for default_thumbnail in ["srcLogoUrl", "logo", "hardcodedDefaultLogoUrl"]: + if default_thumbnail in element_dict: + base.add_value("defaultThumbnail", "https://merlin.nibis.de" + element_dict[default_thumbnail]) + break return base @@ -160,10 +182,11 @@ def getLicense(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - if "kreis_id" in element_dict and element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: - license.replace_value('internal', Constants.LICENSE_NONPUBLIC) # private - else: + # If there is only one element and is the Kreis code 100, then it is public content. + if len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100": license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public + else: + license.replace_value('internal', Constants.LICENSE_NONPUBLIC) # private return license @@ -229,21 +252,27 @@ def getPermissions(self, response): permissions.replace_value("public", False) permissions.add_value("autoCreateGroups", True) + permissions.add_value("autoCreateMediacenters", True) - # If the license is private. - if "kreis_id" in element_dict and element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: - # Self-explained. 1 media center per Kreis-code in this case. - # permissions.add_value("autoCreateMediacenters", True) - kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - if not isinstance(kreis_ids, list): # one element - kreis_ids = [kreis_ids] - kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) - # kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix - - # permissions.add_value('groups', ['Lower Saxony']) - permissions.add_value("groups", ["LowerSaxony-private"]) - # permissions.add_value('mediacenters', kreis_ids) - else: + # If there is only one element and is the Kreis code 100, then it is public content. + if len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100": permissions.add_value("groups", ["LowerSaxony-public"]) + else: + permissions.add_value("groups", ["LowerSaxony-private"]) + + # Self-explained. 1 media center per Kreis-code in this case. + permissions.add_value('mediacenters', element_dict["kreis_id"]) return permissions + +def prepare_element(self, element_dict): + # Step 1. Prepare Kreis codes. + if "kreis_id" in element_dict and element_dict["kreis_id"] is not None: + kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + if not isinstance(kreis_ids, list): # one element + kreis_ids = [kreis_ids] + kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) + kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix + element_dict["kreis_id"] = kreis_ids + + return element_dict \ No newline at end of file From 6621c08c1ae20232e0a55854428b360c31ebd728 Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Fri, 25 Sep 2020 15:40:03 +0200 Subject: [PATCH 010/590] change tutory spider to new api (#8) Co-authored-by: Torsten Simon --- converter/spiders/tutory_spider.py | 44 +++++++++++++++--------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 57ff5aae..86a0a40b 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -18,25 +18,24 @@ class TutorySpider(scrapy.Spider, LomBase, JSONBase): name = "tutory_spider" friendlyName = "tutory" url = "https://www.tutory.de/" - baseUrl = "https://www.tutory.de/api/v1/" - version = "0.1.0" + objectUrl = "https://www.tutory.de/bereitstellung/dokument/" + baseUrl = "https://www.tutory.de/api/v1/share/" + version = "0.1.1" def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def start_requests(self): - url = self.baseUrl + "worksheet?pageSize=999999" + url = self.baseUrl + "worksheet?groupSlug=entdecken&pageSize=999999" yield scrapy.Request(url=url, callback=self.parseList) def parseList(self, response): data = json.loads(response.body) for j in data["worksheets"]: - responseCopy = response.replace(url=self.url + "worksheet/" + j["id"]) + responseCopy = response.replace(url=self.objectUrl + j["id"]) responseCopy.meta["item"] = j if self.hasChanged(responseCopy): - yield scrapy.Request( - url=responseCopy.url, callback=self.parse, meta={"item": j} - ) + yield self.parse(responseCopy) def getId(self, response): return str(response.meta["item"]["id"]) @@ -45,7 +44,6 @@ def getHash(self, response): return response.meta["item"]["updatedAt"] + self.version def parse(self, response): - print(response.url) return LomBase.parse(self, response) def getBase(self, response): @@ -53,7 +51,7 @@ def getBase(self, response): base.add_value("lastModified", response.meta["item"]["updatedAt"]) base.add_value( "thumbnail", - self.url + "worksheet/" + response.meta["item"]["id"] + ".jpg?width=1000", + self.objectUrl + response.meta["item"]["id"] + ".jpg?width=1000", ) return base @@ -79,23 +77,25 @@ def getLicense(self, response): def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["name"]) - if response.meta["item"]["description"] != "": + if 'description' in response.meta["item"]: general.add_value("description", response.meta["item"]["description"]) else: html = self.getUrlData(response.url)["html"] - data = ( - Selector(text=html) - .xpath('//ul[contains(@class,"worksheet-pages")]//text()') - .getall() - ) - cutoff = 4 - if len(data) > cutoff: - for i in range(cutoff): - del data[0] + general.add_value("description", 'test') + if html: + data = ( + Selector(text=html) + .xpath('//ul[contains(@class,"worksheet-pages")]//text()') + .getall() + ) + cutoff = 4 + if len(data) > cutoff: + for i in range(cutoff): + del data[0] - text = " ".join(data) - text = text[:1000] - general.add_value("description", text) + text = " ".join(data) + text = text[:1000] + general.add_value("description", text) return general def getLOMTechnical(self, response): From fa76e1cda301d623822783501938d4100780b06c Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Tue, 6 Oct 2020 16:41:41 +0200 Subject: [PATCH 011/590] SC-6677 - Dockerfile for Edu-Sharing initialization (#9) * SC-6677 - Dockerfile for Edu-Sharing initialization In this commit, we automatically configure Edu-Sharing w.r.t. the metadatasets file "mds_oeh.xml". - Please find information about the different mds_oeh.xml files in the schulcloud/metadatasets/README.md file. This information was collected through e-mails exchanged between Schul-Cloud and Edu-Sharing/Metaventis and by comparing the files themselves. - The former mds_oeh_v2.xml was deleted as there was no difference with mds_oeh_v1.xml. - Dockerfile has been updated to use mds_oeh_17_09_2020.xml (latest version). --- schulcloud/Dockerfile | 16 + schulcloud/docker-compose.yml | 29 ++ schulcloud/metadatasets/README.md | 11 + .../metadatasets/curl_metadatasetsV2.sh | 22 ++ .../metadatasets/mds_oeh_11_09_2020.xml | 359 +++++++++++++++++ .../metadatasets/mds_oeh_17_09_2020.xml | 360 ++++++++++++++++++ .../metadatasets/mds_oeh_24_06_2020.xml | 154 ++++++++ 7 files changed, 951 insertions(+) create mode 100644 schulcloud/Dockerfile create mode 100644 schulcloud/docker-compose.yml create mode 100644 schulcloud/metadatasets/README.md create mode 100644 schulcloud/metadatasets/curl_metadatasetsV2.sh create mode 100644 schulcloud/metadatasets/mds_oeh_11_09_2020.xml create mode 100644 schulcloud/metadatasets/mds_oeh_17_09_2020.xml create mode 100644 schulcloud/metadatasets/mds_oeh_24_06_2020.xml diff --git a/schulcloud/Dockerfile b/schulcloud/Dockerfile new file mode 100644 index 00000000..f36c299b --- /dev/null +++ b/schulcloud/Dockerfile @@ -0,0 +1,16 @@ + FROM edusharing/repo-rs-moodle:sc-latest +# Tested on FROM edusharing/repo-rs-moodle:sc-2a81f4d31 +# +# Production version, on mv-repo, (0f18b0ce2) not available in Docker Hub: +# https://hub.docker.com/r/edusharing/repo-rs-moodle/tags + +# Copy the metadatasets file to the container, while changing the permissions to the correct user:group. +COPY --chown=tomcat:tomcat \ + metadatasets/mds_oeh_17_09_2020.xml \ + /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh.xml + +# Add the script that will wait and add the mds_oeh_24_06_2020.xml at the right time. +ADD metadatasets/curl_metadatasetsV2.sh /root/curl_metadatasetsV2.sh + +# sleep infinity as otherwise immediately after the container would start the container would exit. +CMD /root/curl_metadatasetsV2.sh; sleep infinity diff --git a/schulcloud/docker-compose.yml b/schulcloud/docker-compose.yml new file mode 100644 index 00000000..a5c25233 --- /dev/null +++ b/schulcloud/docker-compose.yml @@ -0,0 +1,29 @@ +version: "3.7" +services: + + edusharing: + build: . + container_name: edusharing_docker + ports: + - "80:80" + volumes: + - "alf_data:/usr/local/alfresco/alf_data/" + - "pg_data:/pg_data/" + - "moodle_data:/var/moodledata/" + - "rs_cache:/var/cache/esrender/" + - "mds_xml_dir:/usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml" + environment: + - "PORT=80" + - "DOMAIN=localhost" + - "SCHEME=http" + # network_mode: host + deploy: # For Docker Swarm. + restart_policy: + condition: on-failure + +volumes: + alf_data: + pg_data: + moodle_data: + rs_cache: + mds_xml_dir: diff --git a/schulcloud/metadatasets/README.md b/schulcloud/metadatasets/README.md new file mode 100644 index 00000000..b8ce03a9 --- /dev/null +++ b/schulcloud/metadatasets/README.md @@ -0,0 +1,11 @@ +###### Description +In this directory you can find the various "mds_oeh.xml" (Metadatasets for Open Edu Hub) files provided by Edu-Sharing/Metaventis company. In this file we attempt to provide am overview of the changes across the different provided versions of the file. + +The "curl_metadatasetsV2.sh" file is part of the Docker execution to let the system know about the included metadatasets file. + +###### Versions of mds_oeh.xml + +- mds_oeh_24_06_2020.xml: The first provided file. +- mds_oeh_11_09_2020.xml: Introduced the "ngsearch" query, originally only provided by the default metadatasets file, mds.xml. Changes in content element fields, default collections sorting by creation date. +- mds_oeh_17_09_2020.xml: Added the "ccm:replicationsourceuuid" property field in the Collections query, s.t. it can be queried and changed its format in the "DSL" query format. It should be now available for Solr and probably for Elasticsearch as well. + diff --git a/schulcloud/metadatasets/curl_metadatasetsV2.sh b/schulcloud/metadatasets/curl_metadatasetsV2.sh new file mode 100644 index 00000000..c872eae3 --- /dev/null +++ b/schulcloud/metadatasets/curl_metadatasetsV2.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# Through the web interface. [Not working!] +# message_to_detect='edu-sharing Docker Demo' +# until [ "`curl --silent --show-error --connect-timeout 1 http://localhost:80 | grep \"$message_to_detect\"`" != "" ]; + +# Through the tomcat log files. [Works!] +message_to_detect='INFO: Server startup in ' +until [ "`cat /usr/local/tomcat/logs/catalina* | grep \"$message_to_detect\"`" != "" ]; +do + echo --- sleeping for 10 seconds + sleep 10 +done + +echo Tomcat is ready! + +curl -X PUT \ + --header 'Content-Type: application/json' \ + --user admin:admin \ + --header 'Accept: application/xml' \ + -d '{"metadatasetsV2":"mds,mds_oeh"}' \ + 'http://127.0.0.1/edu-sharing/rest/admin/v1/applications/homeApplication.properties.xml' \ No newline at end of file diff --git a/schulcloud/metadatasets/mds_oeh_11_09_2020.xml b/schulcloud/metadatasets/mds_oeh_11_09_2020.xml new file mode 100644 index 00000000..115847a1 --- /dev/null +++ b/schulcloud/metadatasets/mds_oeh_11_09_2020.xml @@ -0,0 +1,359 @@ + \ No newline at end of file diff --git a/schulcloud/metadatasets/mds_oeh_17_09_2020.xml b/schulcloud/metadatasets/mds_oeh_17_09_2020.xml new file mode 100644 index 00000000..a49ffa2f --- /dev/null +++ b/schulcloud/metadatasets/mds_oeh_17_09_2020.xml @@ -0,0 +1,360 @@ + \ No newline at end of file diff --git a/schulcloud/metadatasets/mds_oeh_24_06_2020.xml b/schulcloud/metadatasets/mds_oeh_24_06_2020.xml new file mode 100644 index 00000000..ddbada2c --- /dev/null +++ b/schulcloud/metadatasets/mds_oeh_24_06_2020.xml @@ -0,0 +1,154 @@ + \ No newline at end of file From 99e43334b2bd97fce1c899d32ab8eda8563523e0 Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Tue, 6 Oct 2020 16:43:23 +0200 Subject: [PATCH 012/590] SC-7041 - Improving Merlin's thumbnail URLs (#10) While examining the thumbnail URLs to understand how authentication could help, we figured that most visible issues are caused by wrong URL values. Therefore, we started cleaning the values and three changes (steps to repair issues) are available in this commit. --- converter/spiders/merlin_spider.py | 44 ++++++++++++++++++++++-------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 4cfc649a..68c3277b 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -64,7 +64,7 @@ def parse(self, response: scrapy.http.Response): element_dict = element_dict["data"] # Preparing the values here helps for all following logic across the methods. - prepare_element(self, element_dict) + self.prepare_element(element_dict) # If there is no available Kreis code, then we do not want to deal with this element. if not("kreis_id" in element_dict @@ -265,14 +265,34 @@ def getPermissions(self, response): return permissions -def prepare_element(self, element_dict): - # Step 1. Prepare Kreis codes. - if "kreis_id" in element_dict and element_dict["kreis_id"] is not None: - kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - if not isinstance(kreis_ids, list): # one element - kreis_ids = [kreis_ids] - kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) - kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix - element_dict["kreis_id"] = kreis_ids - - return element_dict \ No newline at end of file + def prepare_element(self, element_dict): + # Step 1. Prepare Kreis codes. + if "kreis_id" in element_dict and element_dict["kreis_id"] is not None: + kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + if not isinstance(kreis_ids, list): # one element + kreis_ids = [kreis_ids] + kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) + kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix + element_dict["kreis_id"] = kreis_ids + + # Step 2. Fix thumbnail URL. + thumbnail_prepared = element_dict["thumbnail"] + + # Step 2. Case a: Remove the 3 dots "...". + thumbnail_prepared = thumbnail_prepared.replace("...", "") + + # Step 2. Case b: Replace "%2F" with '/' + # TODO: check why not ALL occurrences are replaced. + thumbnail_prepared = thumbnail_prepared.replace("%2F", "/") + + # Step 2. Case c: Replace the dot after the parent identifier with a '/'. + if element_dict["parent_identifier"] is not None: + parent_identifier = element_dict["parent_identifier"] + subpath_position = thumbnail_prepared.find(parent_identifier) + len(parent_identifier) + if thumbnail_prepared[subpath_position] == ".": + thumbnail_prepared = thumbnail_prepared[:subpath_position] + "/" + thumbnail_prepared[subpath_position + 1:] + + element_dict["thumbnail"] = thumbnail_prepared + + return element_dict + From ee58ba439d1a0d3baecdeb070c171e3cb3e5ed8e Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Tue, 6 Oct 2020 16:43:51 +0200 Subject: [PATCH 013/590] SC-6861 - sources friendly names (#11) Added two more fields to allow for a "friendly" name to appear to users when getting results: 1. replicationsource_DISPLAYNAME: for simple sources, such as merlin_spider and mediothek_pixiothek_spider. 2. replicationsourceorigin_DISPLAYNAME: for compound cases, such as "oeh_spider", which include other sources internally. --- converter/es_connector.py | 3 ++ .../spiders/utils/spider_name_converter.py | 50 +++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 converter/spiders/utils/spider_name_converter.py diff --git a/converter/es_connector.py b/converter/es_connector.py index 17611cf3..5c7e15bf 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -12,6 +12,7 @@ from vobject.vcard import VCardBehavior from converter.constants import Constants +from converter.spiders.utils.spider_name_converter import get_spider_friendly_name from edu_sharing_client.api_client import ApiClient from edu_sharing_client.configuration import Configuration from edu_sharing_client.api.bulk_v1_api import BULKV1Api @@ -186,6 +187,7 @@ def mapLicense(self, spaces, license): def transformItem(self, uuid, spider, item): spaces = { "ccm:replicationsource": spider.name, + "ccm:replicationsource_DISPLAYNAME": get_spider_friendly_name(spider.name), "ccm:replicationsourceid": item["sourceId"], "ccm:replicationsourcehash": item["hash"], "ccm:objecttype": item["type"], @@ -199,6 +201,7 @@ def transformItem(self, uuid, spider, item): spaces["ccm:replicationsourceorigin"] = item[ "origin" ] # TODO currently not mapped in edu-sharing + spaces["ccm:replicationsourceorigin_DISPLAYNAME"] = get_spider_friendly_name(item["origin"]) self.mapLicense(spaces, item["license"]) if "description" in item["lom"]["general"]: diff --git a/converter/spiders/utils/spider_name_converter.py b/converter/spiders/utils/spider_name_converter.py new file mode 100644 index 00000000..35ba9d64 --- /dev/null +++ b/converter/spiders/utils/spider_name_converter.py @@ -0,0 +1,50 @@ +import logging + +spider_to_friendly_name = None + + +def load_friendly_spider_names(): + """ + Returns a dictionary which maps the Spider's name to its "friendly" name. + + e.g., merlin_spider --> Merlin, br_rss_spider --> Bayerischer Rundfunk + + Based on https://stackoverflow.com/questions/46871133/get-all-spiders-class-name-in-scrapy + + Author: Ioannis Koumarelas, ioannis.koumarelas@hpi.de, Schul-Cloud, Content team. + """ + from scrapy.utils import project + from scrapy import spiderloader + + settings = project.get_project_settings() + spider_loader = spiderloader.SpiderLoader.from_settings(settings) + + spider_names = spider_loader.list() + spider_classes = [spider_loader.load(name) for name in spider_names] + + spider_name_to_friendly_name = {} + for spider in spider_classes: + spider_name_to_friendly_name[spider.name] = spider.friendlyName + + return spider_name_to_friendly_name + + +def get_spider_friendly_name(spider_name): + """ + Given the spider's name, returns its friendly name. + """ + + global spider_to_friendly_name + if spider_to_friendly_name is None: + spider_to_friendly_name = load_friendly_spider_names() + + if spider_name in spider_to_friendly_name: + return spider_to_friendly_name[spider_name] + else: + if spider_name is not None: + logging.info("Friendly name for spider " + spider_name + " has not been found.") + return spider_name + + +if __name__ == '__main__': + load_friendly_spider_names() \ No newline at end of file From 0ff224c892263c257b8b5f4ff68668663888893e Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Thu, 8 Oct 2020 10:10:22 +0200 Subject: [PATCH 014/590] SC-6917 - Adapt Merlin permissions to allow private content. (#12) The media-centers have been removed, as they were never really needed, the Kreises are created as groups, and the groups have been adjusted to not include the Kreis 100, when more than 1 Kreis exists. --- converter/spiders/merlin_spider.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 68c3277b..6ae8bc25 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -72,11 +72,6 @@ def parse(self, response: scrapy.http.Response): and len(element_dict["kreis_id"]) > 0): continue - # If the content is private, skip it for now! - # TODO: remove this when the private is more clear!!! - if not(len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100"): - continue - # TODO: It's probably a pointless attribute. # del element_dict["data"]["score"] @@ -252,16 +247,30 @@ def getPermissions(self, response): permissions.replace_value("public", False) permissions.add_value("autoCreateGroups", True) - permissions.add_value("autoCreateMediacenters", True) + + groups = [] # If there is only one element and is the Kreis code 100, then it is public content. if len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100": - permissions.add_value("groups", ["LowerSaxony-public"]) + # Add to state-wide public group. + groups.append("LowerSaxony-public") + + # Add 1 group per Kreis-code, which in this case is just "100" (merlin_spider_100). + groups.extend(element_dict["kreis_id"]) else: - permissions.add_value("groups", ["LowerSaxony-private"]) + # Add to state-wide private group. + groups.append("LowerSaxony-private") + + kreis_ids = element_dict["kreis_id"] + + # If Kreis code 100 (country-wide) is included in the list, remove it. + if "merlin_spider_100" in kreis_ids: + kreis_ids.remove("merlin_spider_100") + + # Add 1 group per Kreis-code. + groups.extend(kreis_ids) - # Self-explained. 1 media center per Kreis-code in this case. - permissions.add_value('mediacenters', element_dict["kreis_id"]) + permissions.add_value("groups", groups) return permissions From eacdadcd8af169cb54629db0d2d46a5616f854a6 Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Mon, 16 Nov 2020 10:51:59 +0100 Subject: [PATCH 015/590] SC-7700 - Updating Merlin's group (states, counties) names to official (#14) * SC-7700 - Updating Merlin's group (states, counties) names to official County (Kreis) codes are now converted to the official format, being prefixed with "3". Extra things that are done: * Merlin variable names converted to English * Prepared proper state group values to be used in the future. * Extra code refinements. * Added mds_oeh_override.xml, so we can search using the groups field (ccm:ph_invited). * Changes in the Dockerfile to support the new mds_oeh_override.xml * SC-7700 - Associated OEH content with group 'public' --- converter/spiders/merlin_spider.py | 61 +++++++++++--------- converter/spiders/oeh_spider.py | 10 ++++ schulcloud/Dockerfile | 4 ++ schulcloud/metadatasets/mds_oeh_override.xml | 21 +++++++ 4 files changed, 69 insertions(+), 27 deletions(-) create mode 100644 schulcloud/metadatasets/mds_oeh_override.xml diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 6ae8bc25..3a10707e 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -66,10 +66,10 @@ def parse(self, response: scrapy.http.Response): # Preparing the values here helps for all following logic across the methods. self.prepare_element(element_dict) - # If there is no available Kreis code, then we do not want to deal with this element. - if not("kreis_id" in element_dict - and element_dict["kreis_id"] is not None - and len(element_dict["kreis_id"]) > 0): + # If there is no available county (Kreis) code, then we do not want to deal with this element. + if not("county_ids" in element_dict + and element_dict["county_ids"] is not None + and len(element_dict["county_ids"]) > 0): continue # TODO: It's probably a pointless attribute. @@ -177,8 +177,8 @@ def getLicense(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - # If there is only one element and is the Kreis code 100, then it is public content. - if len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100": + # If there is only one element and is the County code 3100, then it is public content. + if len(element_dict["county_ids"]) == 1 and str(element_dict["county_ids"][0]) == "county-3100": license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public else: license.replace_value('internal', Constants.LICENSE_NONPUBLIC) # private @@ -236,9 +236,9 @@ def getValuespaces(self, response): def getPermissions(self, response): """ - In case license information, in the form of Kreis codes, is available. This changes the permissions from + In case license information, in the form of counties (Kreis codes), is available. This changes the permissions from public to private and sets accordingly the groups and mediacenters. For more information regarding the available - Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' + Merlin county (kreis) codes please consult 'http://merlin.nibis.de/index.php?action=kreise' """ permissions = LomBase.getPermissions(self, response) @@ -250,39 +250,46 @@ def getPermissions(self, response): groups = [] - # If there is only one element and is the Kreis code 100, then it is public content. - if len(element_dict["kreis_id"]) == 1 and str(element_dict["kreis_id"][0]) == "merlin_spider_100": + county_ids = element_dict["county_ids"] + public_county = "county-3100" + + # If there is only one element and is the County code 3100, then it is public content. + if len(county_ids) == 1 and str(county_ids[0]) == public_county: # Add to state-wide public group. + # groups.append("state-LowerSaxony-public") groups.append("LowerSaxony-public") - # Add 1 group per Kreis-code, which in this case is just "100" (merlin_spider_100). - groups.extend(element_dict["kreis_id"]) + # Add 1 group per County-code, which in this case is just "100" (3100). + groups.extend(county_ids) else: - # Add to state-wide private group. + # Add to state-wide private/licensed group. + # groups.append("state-LowerSaxony-licensed") groups.append("LowerSaxony-private") - kreis_ids = element_dict["kreis_id"] - - # If Kreis code 100 (country-wide) is included in the list, remove it. - if "merlin_spider_100" in kreis_ids: - kreis_ids.remove("merlin_spider_100") + # If County code 100 (country-wide) is included in the list, remove it. + if public_county in county_ids: + county_ids.remove(public_county) - # Add 1 group per Kreis-code. - groups.extend(kreis_ids) + # Add 1 group per county. + groups.extend(county_ids) permissions.add_value("groups", groups) return permissions def prepare_element(self, element_dict): - # Step 1. Prepare Kreis codes. + # Step 1. Prepare county (Kreis) codes. if "kreis_id" in element_dict and element_dict["kreis_id"] is not None: - kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - if not isinstance(kreis_ids, list): # one element - kreis_ids = [kreis_ids] - kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) - kreis_ids = [self.name + "_" + id for id in kreis_ids] # add prefix - element_dict["kreis_id"] = kreis_ids + county_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + if not isinstance(county_ids, list): # one element + county_ids = [county_ids] + county_ids = sorted(county_ids, key=lambda x: int(x)) + + # Add prefix "3" to conform with nationally-assigned IDs: + # https://de.wikipedia.org/wiki/Liste_der_Landkreise_in_Deutschland + county_ids = ["3" + id for id in county_ids] + county_ids = ["county-" + x for x in county_ids] + element_dict["county_ids"] = county_ids # Step 2. Fix thumbnail URL. thumbnail_prepared = element_dict["thumbnail"] diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 54368426..2edd774c 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -1,6 +1,7 @@ import logging from converter.spiders.edu_sharing_base import EduSharingBase +from converter.spiders.lom_base import LomBase class OEHSpider(EduSharingBase): @@ -36,3 +37,12 @@ def shouldImport(self, response=None): ) return False return True + + def getPermissions(self, response): + permissions = LomBase.getPermissions(self, response) + + permissions.replace_value("public", False) + permissions.add_value("autoCreateGroups", True) + permissions.add_value("groups", ["public"]) + + return permissions diff --git a/schulcloud/Dockerfile b/schulcloud/Dockerfile index f36c299b..6ee1382d 100644 --- a/schulcloud/Dockerfile +++ b/schulcloud/Dockerfile @@ -9,6 +9,10 @@ COPY --chown=tomcat:tomcat \ metadatasets/mds_oeh_17_09_2020.xml \ /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh.xml +COPY --chown=tomcat:tomcat \ + metadatasets/mds_oeh_override.xml \ + /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh_override.xml + # Add the script that will wait and add the mds_oeh_24_06_2020.xml at the right time. ADD metadatasets/curl_metadatasetsV2.sh /root/curl_metadatasetsV2.sh diff --git a/schulcloud/metadatasets/mds_oeh_override.xml b/schulcloud/metadatasets/mds_oeh_override.xml new file mode 100644 index 00000000..3f43700c --- /dev/null +++ b/schulcloud/metadatasets/mds_oeh_override.xml @@ -0,0 +1,21 @@ + \ No newline at end of file From 4f05b4091787d0b6c7483a4db732fb0cca99d06b Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Thu, 3 Dec 2020 16:00:31 +0100 Subject: [PATCH 016/590] SC-7761 - Implement changes to support collections. This ticket introduces the following new information: (a) relation, which defines the relationship between part and children elements of a collection, (b) aggregation level, which should be 2 for parent elements and 1 for children elements, and (c) searchable, which should be 1 for parent elements and 0 for children elements. In particular, the changes in this ticket: SC-8810 - Implement changes for the rest spiders (Merlin and OEH_spider). Adding aggregationLevel, searchable, and an empty relation attribute is provided by default (if we pass no value at all from the spiders). SC-8009: Changed spider version. SC-8009 - Implement changes to support collections in Mediothek. 1. Relation is now part of LomBase (items.py). 2. LomBase expects a method to include Relation information (lom_base.py). 3. The final transformation that is sent to Edu-Sharing now expects this extra information (relation, aggregation level, and searchable) and passes it (es_connector.py). 4. All the necessary groupings of items into collections and the according selection of parent elements (mediothek_pixiothek_spider.py). Special treatment is done to singular collection elements which describe a full collection. 5. Default values are set up for the rest spiders (merlin_spider.py and oeh_spider.py). --- converter/es_connector.py | 12 + converter/items.py | 22 ++ converter/spiders/lom_base.py | 4 + .../spiders/mediothek_pixiothek_spider.py | 364 +++++++++++++----- converter/spiders/merlin_spider.py | 6 + converter/spiders/oeh_spider.py | 10 +- 6 files changed, 319 insertions(+), 99 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 5c7e15bf..3f261ab4 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -196,6 +196,8 @@ def transformItem(self, uuid, spider, item): "ccm:wwwurl": item["lom"]["technical"]["location"], "cclom:location": item["lom"]["technical"]["location"], "cclom:title": item["lom"]["general"]["title"], + "cclom:general_aggregationlevel": item["lom"]["general"]["aggregationLevel"], + "ccm:searchable": item["searchable"], } if "origin" in item: spaces["ccm:replicationsourceorigin"] = item[ @@ -285,6 +287,16 @@ def transformItem(self, uuid, spider, item): if not type(spaces[key]) is list: spaces[key] = [spaces[key]] + # Relation information, according to the LOM-DE.doc#7 specifications: http://sodis.de/lom-de/LOM-DE.doc + if "relation" in item["lom"]: + spaces["cclom:relation"] = item["lom"]["relation"] + # Since Edu-Sharing has no further information about the schema of this attribute it is better to treat it + # as a list of strings and not as a JSON. + for i, element in enumerate(spaces["cclom:relation"]): + relation_value = str(element).replace("\n", "").replace("\r", "") + relation_value = ' '.join(relation_value.split()) + spaces["cclom:relation"][i] = relation_value + return spaces def createGroupsIfNotExists(self, groups, type: CreateGroupType): diff --git a/converter/items.py b/converter/items.py index e2814678..cb8792f1 100644 --- a/converter/items.py +++ b/converter/items.py @@ -101,6 +101,18 @@ class LomClassificationItem(Item): description = Field() keyword = Field() +class LomRelationResourceItem(Item): + identifier = Field(output_processor=JoinMultivalues()) + catalog = Field() + entry = Field() + description = Field() + +class LomRelationItem(Item): + """ + Following the LOM-DE.doc#7 (Relation) specifications: http://sodis.de/lom-de/LOM-DE.doc . + """ + kind = Field() + resource = Field(serializer=LomRelationResourceItem) class LomBaseItem(Item): general = Field(serializer=LomGeneralItem) @@ -109,6 +121,7 @@ class LomBaseItem(Item): educational = Field(serializer=LomEducationalItem) # rights = Field(serializer=LomRightsItem) classification = Field(serializer=LomClassificationItem) + relation = Field(serializer=LomRelationItem, output_processor=JoinMultivalues()) class ResponseItem(Item): @@ -174,6 +187,7 @@ class BaseItem(Item): "permissions (access rights) for this entry" license = Field(serializer=LicenseItem) publisher = Field() + searchable = Field() class BaseItemLoader(ItemLoader): @@ -239,6 +253,14 @@ class LomClassificationItemLoader(ItemLoader): default_item_class = LomClassificationItem default_output_processor = TakeFirst() +class LomRelationResourceItemLoader(ItemLoader): + default_item_class = LomRelationResourceItem + default_output_processor = TakeFirst() + +class LomRelationItemLoader(ItemLoader): + default_item_class = LomRelationItem + default_output_processor = TakeFirst() + class PermissionItemLoader(ItemLoader): default_item_class = PermissionItem diff --git a/converter/spiders/lom_base.py b/converter/spiders/lom_base.py index b28c6cea..4b3b8444 100644 --- a/converter/spiders/lom_base.py +++ b/converter/spiders/lom_base.py @@ -152,6 +152,7 @@ def getLOM(self, response) -> LomBaseItemloader: lom.add_value("technical", self.getLOMTechnical(response).load_item()) lom.add_value("educational", self.getLOMEducational(response).load_item()) lom.add_value("classification", self.getLOMClassification(response).load_item()) + lom.add_value("relation", self.getLOMRelation(response).load_item()) return lom def getBase(self, response=None) -> BaseItemLoader: @@ -180,6 +181,9 @@ def getLicense(self, response=None) -> LicenseItemLoader: def getLOMClassification(self, response=None) -> LomClassificationItemLoader: return LomClassificationItemLoader(response=response) + def getLOMRelation(self, response=None) -> LomRelationItemLoader: + return LomRelationItemLoader(response=response) + def getPermissions(self, response=None) -> PermissionItemLoader: permissions = PermissionItemLoader(response=response) # default all materials to public, needs to be changed depending on the spider! diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index 86b906f3..942ef7c7 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -1,8 +1,11 @@ +import copy import json import time from datetime import datetime from scrapy.spiders import CrawlSpider + +from converter.es_connector import EduSharing from converter.items import * from converter.spiders.lom_base import LomBase from converter.constants import * @@ -10,42 +13,41 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): """ - This crawler fetches data from the Mediothek/Pixiothek. The API request sends all results in one page. The outcome is an JSON array which will be parsed to their elements. + This crawler fetches data from the Mediothek/Pixiothek. The API request sends all results in one page. The outcome + is an JSON array which will be parsed to their elements. - Author: Timur Yure, timur.yure@capgemini.com , Capgemini for Schul-Cloud, Content team. + Author: Ioannis Koumarelas, ioannis.koumarelas@gmail.com , Schul-Cloud, Content team. """ name = "mediothek_pixiothek_spider" url = "https://www.schulportal-thueringen.de/" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "MediothekPixiothek" # name as shown in the search ui - version = "0.1" # the version of your crawler, used to identify if a reimport is necessary + version = "0.2" # the version of your crawler, used to identify if a reimport is necessary start_urls = [ "https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" + # Alternatively, you can load the file from a local path + # "file://LOCAL_FILE_PATH" # e.g., file:///data/file.json ] def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def parse(self, response: scrapy.http.Response): - - # Call Splash only once per page (that contains multiple XML elements). - data = self.getUrlData(response.url) - response.meta["rendered_data"] = data elements = json.loads(response.body_as_unicode()) + prepared_elements = [self.prepare_element(element_dict) for element_dict in elements] + + collection_elements = self.prepare_collections(prepared_elements) - # grouped_elements = self.group_elements_by_medium_id(elements) - grouped_elements = self.group_elements_by_sammlung(elements) + for i, element_dict in enumerate(collection_elements): - for i, element in enumerate(grouped_elements): copyResponse = response.copy() # Passing the dictionary for easier access to attributes. - copyResponse.meta["item"] = element + copyResponse.meta["item"] = element_dict # In case JSON string representation is preferred: - json_str = json.dumps(element, indent=4, sort_keys=True, ensure_ascii=False) + json_str = json.dumps(element_dict, indent=4, sort_keys=True, ensure_ascii=False) copyResponse._set_body(json_str) - print(json_str) if self.hasChanged(copyResponse): yield self.handleEntry(copyResponse) @@ -53,85 +55,6 @@ def parse(self, response: scrapy.http.Response): # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. LomBase.parse(self, copyResponse) - def group_elements_by_medium_id(self, elements): - """ - This method groups the corresponding elements based on their mediumId. This changes the logic so that every - element in the end maps to an educational element in the https://www.schulportal-thueringen.de. - """ - - medium_id_groups = {} - for idx, element in enumerate(elements): - medium_id = element["mediumId"] - - # The first element that has this mediumId creates the representative for this medium. - if medium_id not in medium_id_groups: - medium_id_groups[medium_id] = { - "id": medium_id, - "pts": self.get_or_default(element, "pts"), - "previewImageUrl": self.get_or_default(element, "previewImageUrl"), - "titel": self.get_or_default(element, "einzeltitel"), - "kurzinhalt": self.get_or_default(element, "kurzinhalt"), - "listeStichwort": self.get_or_default(element, "listeStichwort"), - "oeffentlich": self.get_or_default(element, "oeffentlich"), - "downloadUrl": "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" + str(medium_id) - } - - # TODO: Discuss when it makes sense to combine "serientitel" and "einzeltitel"! - # The first element to have a serientitel for this mediumId will save it. The rest will just skip it. - if "serientitel" in element and "serientitel" not in medium_id_groups[medium_id]: - medium_id_groups[medium_id]["titel"] = element["serientitel"] - medium_id_groups[medium_id]["serientitel"] = element["serientitel"] - if "einzeltitel" in element: - medium_id_groups[medium_id]["titel"] += " - " + element["einzeltitel"] - medium_id_groups[medium_id]["einzeltitel"] = element["einzeltitel"] - - - grouped_elements = [medium_id_groups[medium_id] for medium_id in medium_id_groups] - - return grouped_elements - - def group_elements_by_sammlung(self, elements): - """ - In this method we identify elements that have a keyword (Stichwort) ending in "collection" (sammlung). - These elements are parents of other elements that have a serienTitel same as the einzeltitel of these collection - items. Then, we remove these children from the elements and we only have collections or single items, not part - of any collection. - """ - - # Step 1 - Identify collection elements - collections_elements = set() - for idx, element in enumerate(elements): - keywords = element["listeStichwort"] - element_collections_keywords = set() - for keyword in keywords: - if keyword.endswith("sammlung"): - element_collections_keywords.add(keyword) - break - if len(element_collections_keywords) > 0: - collections_elements.add(idx) - - # Step 2 - Get a dictionary of "Einzeltitel" --> element index, for the collection elements. - # collections_einzeltitel = {elements[idx]["einzeltitel"]: idx for idx in collections_elements} - collections_einzeltitel = {} - for idx in collections_elements: - collection_einzeltitel = elements[idx]["einzeltitel"] - if collection_einzeltitel not in collections_einzeltitel: - collections_einzeltitel[collection_einzeltitel] = list() - collections_einzeltitel[collection_einzeltitel].append(elements[idx]) - # if "serientitel" in elements[idx]: - # collections_einzeltitel[collection_einzeltitel].append(elements[idx]["serientitel"]) - # else: - # collections_einzeltitel[collection_einzeltitel].append(None) - print("hi") - - - - - def get_or_default(self, element, attribute, default_value=""): - if attribute in element: - return element[attribute] - else: - return default_value def getId(self, response): # Element response as a Python dict. @@ -142,11 +65,13 @@ def getId(self, response): def getHash(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - # presentation timestamp (PTS) id = element_dict["id"] + + # presentation timestamp (PTS) pts = element_dict["pts"] + # date_object = datetime.strptime(hash, "%Y-%m-%d %H:%M:%S.%f").date() - return id + pts + return hash(hash(id) + hash(pts)) def mapResponse(self, response): r = ResponseItemLoader(response=response) @@ -168,6 +93,8 @@ def getBase(self, response): # portal." base.add_value("thumbnail", element_dict["previewImageUrl"]) + base.add_value("searchable", element_dict.get("searchable", "0")) + return base def getLOMGeneral(self, response): @@ -176,9 +103,9 @@ def getLOMGeneral(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? - # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel - general.add_value("title", element_dict["titel"]) + general.add_value("title", element_dict["title"]) + + general.add_value("aggregationLevel", element_dict["aggregation_level"]) # self._if_exists_add(general, element_dict, "description", "kurzinhalt") if "kurzinhalt" in element_dict: @@ -241,4 +168,245 @@ def getPermissions(self, response): else: permissions.add_value('groups', ['Thuringia-public']) - return permissions \ No newline at end of file + return permissions + + + def getLOMRelation(self, response=None) -> LomRelationItemLoader: + """ + Helps implement collections using relations as described in the LOM-DE.doc#7 (Relation) specifications: + http://sodis.de/lom-de/LOM-DE.doc . + """ + relation = LomBase.getLOMRelation(self, response) + + # Element response as a Python dict. + element_dict = response.meta["item"] + + relation.add_value("kind", element_dict["relation"][0]["kind"]) + + resource = LomRelationResourceItem() + resource["identifier"] = element_dict["relation"][0]["resource"]["identifier"] + relation.add_value("resource", resource) + + return relation + + def prepare_collections(self, prepared_elements): + """ + Prepares Mediothek and Pixiothek collections according to their strategies. + """ + mediothek_elements = [] + pixiothek_elements = [] + for element_dict in prepared_elements: + if element_dict["pixiothek"] == "1": + pixiothek_elements.append(element_dict) + else: + mediothek_elements.append(element_dict) + + max_id = int(max(prepared_elements, key=lambda x: int(x["id"]))["id"]) + + pixiothek_elements_grouped, mediothek_elements, max_id = \ + self.group_pixiothek_elements(pixiothek_elements, mediothek_elements, max_id) + + mediothek_elements_grouped, max_id = self.group_mediothek_elements(mediothek_elements, max_id) + + collection_elements = [] + collection_elements.extend(pixiothek_elements_grouped) + collection_elements.extend(mediothek_elements_grouped) + + return collection_elements + + def group_by_elements(self, elements, group_by): + """ + This method groups the corresponding elements based on the provided group_by parameter. This changes the logic + so that every element in the end maps to an educational element in the https://www.schulportal-thueringen.de. + """ + groups = {} + for idx, element in enumerate(elements): + if group_by not in element: + logging.debug("Element " + str(element["id"]) + " does not contain information about " + group_by) + continue + group_by_value = element[group_by] + if group_by_value not in groups: + groups[group_by_value] = [] + groups[group_by_value].append(element) + + return groups + + def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements, max_id): + """ + Collection elements in Pixiothek have a "parent" (representative) Mediothek element that describes the whole + collection. Our task in this method is for every Pixiothek group to find its Mediothek element and add the + connections between it and the Pixiothek elements. These Mediothek elements will not be considered as children + of Mediothek collections. + + If we cannot find such a "parent" element among the Mediothek elements, then we select one of them as the + collection parent (representative element) and set some of its attributes accordingly. + """ + + default_download_url = "https://www.schulportal-thueringen.de/html/images/" \ + "themes/tsp2/startseite/banner_phone_startseite.jpg?id=" + + mediothek_default_download_url = "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" + + pixiothek_elements_grouped_by = self.group_by_elements(pixiothek_elements, "serientitel") + + # Group Mediothek elements by einzeltitel. We are going to use this dictionary in the following loop to find + # Pixiothek items that have this value in their serientitel. + mediothek_elements_grouped_by_einzeltitel = self.group_by_elements(mediothek_elements, "einzeltitel") + + single_element_collection_serientitel = "Mediensammlungen zur freien Verwendung im Bildungsbereich" + + collection_elements = [] + + edusharing = EduSharing() + + # Keeping track of "parent" (representative) elements to remove them from the Mediothek elements. + parent_mediothek_elements = set() + + # Generate new "representative" (parent) element. + for group_by_key, group in pixiothek_elements_grouped_by.items(): + serientitel = None + if "serientitel" in group[0]: + serientitel = group[0]["serientitel"] + + # If a single Mediothek element exists with the same einzeltitel as this group's serientitel, then we shall use it + # as the parent element of this collection. + if serientitel in mediothek_elements_grouped_by_einzeltitel and \ + len(mediothek_elements_grouped_by_einzeltitel[serientitel]) == 1 and \ + mediothek_elements_grouped_by_einzeltitel[serientitel][0]["id"] not in parent_mediothek_elements: # Is not used as a parent of another collection. + + parent_element = copy.deepcopy(mediothek_elements_grouped_by_einzeltitel[serientitel][0]) + parent_mediothek_elements.add(parent_element["id"]) + parent_element["title"] = parent_element["einzeltitel"] + parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) + + # If the found Mediothek element has a serientitel equal to a predefined value, which indicates that + # this is a collection item (which should normally be a parent and not a single element), we treat + # specially and set the title equal to the einzeltitel, which already describes the collection. + if parent_element["serientitel"] == single_element_collection_serientitel: + group.append(copy.deepcopy(mediothek_elements_grouped_by_einzeltitel[serientitel][0])) + + # Else, we shall use any random element of this group as the parent element. + else: + parent_element = copy.deepcopy(group[0]) + + # We need to assign a new ID, different from the previous ones. + max_id += 1 + parent_element["id"] = str(max_id) + + # Assign a fake URL that we can still recognize if we ever want to allow the access of the collection + # content. + parent_element["downloadUrl"] = default_download_url + str(max_id) + parent_element["title"] = parent_element["serientitel"] + + parent_element["searchable"] = 1 + parent_element["aggregation_level"] = 2 + parent_element["uuid"] = edusharing.buildUUID(parent_element["downloadUrl"]) + + for element in group: + element["searchable"] = 0 + element["aggregation_level"] = 1 + element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) + + # Add connections from parent to children elements. + parent_element, group = self.relate_parent_with_children_elements(parent_element, group) + + collection_elements.append(parent_element) + collection_elements.extend(group) + + # Remove Mediothek elements which were used as parents. We go in reverse mode as only then the indices keep + # making sense as we keep deleting elements. The other way around, every time you delete an element the + # consequent indices are not valid anymore. + for i in reversed(range(len(mediothek_elements))): + if mediothek_elements[i]["id"] in parent_mediothek_elements: + del (mediothek_elements[i]) + + return collection_elements, mediothek_elements, max_id + + def group_mediothek_elements(self, mediothek_elements, max_id): + """ + Collection elements in Mediothek have no special element to represent them (a parent element). Therefore, we + select one of them as the collection representative (parent element) and set some of its attributes accordingly. + """ + mediothek_default_download_url = "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" + + mediothek_elements_grouped_by = self.group_by_elements(mediothek_elements, "mediumNummer") + + # Specifies a special case when a + single_element_collection_serientitel = "Mediensammlungen zur freien Verwendung im Bildungsbereich" + + collection_elements = [] + + edusharing = EduSharing() # Used to generate UUIDs. + + # Generate new "parent" (representative) element. + for group_by_key, group in mediothek_elements_grouped_by.items(): + parent_element = copy.deepcopy(group[0]) + + # We need to assign a new ID, different from the previous ones. + max_id += 1 + parent_element["id"] = str(max_id) + parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) + + # In case we only have a single element in the collection AND its value in the serientitel is equal to a + # predefined value, which indicates that this is a collection (parent and not a single element), we treat + # this case different and set the title equal to the einzeltitel, which already describes the collection. + if len(group) == 1 and "serientitel" in parent_element and \ + parent_element["serientitel"] == single_element_collection_serientitel: + parent_element["title"] = parent_element["einzeltitel"] + else: + if "serientitel" in parent_element: + parent_element["title"] = parent_element["serientitel"] + else: + parent_element["title"] = parent_element["einzeltitel"] + + parent_element["searchable"] = 1 + parent_element["aggregation_level"] = 2 + parent_element["uuid"] = edusharing.buildUUID(parent_element["downloadUrl"]) + + for element in group: + element["searchable"] = 0 + element["aggregation_level"] = 1 + element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) + + # Add connections from parent to children elements. + parent_element, group = self.relate_parent_with_children_elements(parent_element, group) + + collection_elements.append(parent_element) + collection_elements.extend(group) + + return collection_elements, max_id + + def relate_parent_with_children_elements(self, parent_element, children_elements): + # Add connections from "parent" to "children" elements. + parent_element["relation"] = [ + { + "kind": "haspart", + "resource": { + "identifier": [ + # Use the ccm:replicationsourceuuid to refer to the children elements. + element["uuid"] for element in children_elements + ] + } + } + ] + + # Add connections from "children" elements to "parent". + for element in children_elements: + element["relation"] = [ + { + "kind": "ispartof", + "resource": { + # Use the ccm:replicationsourceuuid to refer to the parent element. + "identifier": [parent_element["uuid"]] + } + } + ] + return parent_element, children_elements + + def prepare_element(self, element_dict): + # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? + # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel + # Please keep in mind that we override this value for parent elements of collections. + element_dict["title"] = element_dict["einzeltitel"] + + return element_dict \ No newline at end of file diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 3a10707e..8a80de66 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -156,6 +156,9 @@ def getBase(self, response): base.add_value("defaultThumbnail", "https://merlin.nibis.de" + element_dict[default_thumbnail]) break + # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. + base.add_value("searchable", "1") + return base def getLOMGeneral(self, response): @@ -165,6 +168,9 @@ def getLOMGeneral(self, response): "description", response.xpath("/data/beschreibung/text()").get() ) + # Adding a default aggregationLevel, which can be used during filtering queries. + general.add_value("aggregationLevel", "1") + return general def getUri(self, response): diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 2edd774c..bf8fdeed 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -18,8 +18,17 @@ def __init__(self, **kwargs): def getBase(self, response): base = EduSharingBase.getBase(self, response) base.replace_value("type", self.getProperty("ccm:objecttype", response)) + + # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. + base.replace_value("searchable", "1") return base + def getLOMGeneral(self, response): + general = EduSharingBase.getLOMGeneral(self, response) + + # Adding a default aggregationLevel, which can be used during filtering queries. + general.replace_value("aggregationLevel", "1") + return general def getLOMTechnical(self, response): technical = EduSharingBase.getLOMTechnical(self, response) @@ -28,7 +37,6 @@ def getLOMTechnical(self, response): technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) return technical - def shouldImport(self, response=None): if "ccm:collection_io_reference" in response.meta["item"]["aspects"]: logging.info( From 3e22a41fbaf53afcd949d26ca1383bb29f02d49a Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Tue, 8 Dec 2020 14:23:55 +0100 Subject: [PATCH 017/590] Fixes Mediothek request code after merging latest OEH code. --- converter/spiders/mediothek_pixiothek_spider.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index 942ef7c7..9a949b45 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -23,15 +23,19 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): url = "https://www.schulportal-thueringen.de/" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "MediothekPixiothek" # name as shown in the search ui version = "0.2" # the version of your crawler, used to identify if a reimport is necessary - start_urls = [ - "https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" - # Alternatively, you can load the file from a local path - # "file://LOCAL_FILE_PATH" # e.g., file:///data/file.json - ] + apiUrl = "https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" + # Alternatively, you can load the file from a local path + # "file://LOCAL_FILE_PATH" # e.g., file:///data/file.json def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) + def start_requests(self): + yield scrapy.Request( + url=self.apiUrl, + callback=self.parse, + ) + def parse(self, response: scrapy.http.Response): elements = json.loads(response.body_as_unicode()) prepared_elements = [self.prepare_element(element_dict) for element_dict in elements] From beb509e6571e493efac419920be9b6f61b3800ca Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Fri, 11 Dec 2020 09:31:35 +0100 Subject: [PATCH 018/590] SC-7761 - Changes to make extra fields known to Alfresco through ccmodel.xml --- converter/es_connector.py | 19 +++++++---- .../metadatasets/ccmodel_hpischulcloud.xml | 30 +++++++++++++++++ schulcloud/metadatasets/mds_oeh_override.xml | 32 +++++++++++++++++++ 3 files changed, 75 insertions(+), 6 deletions(-) create mode 100644 schulcloud/metadatasets/ccmodel_hpischulcloud.xml diff --git a/converter/es_connector.py b/converter/es_connector.py index a9057c95..ed8379f8 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -224,8 +224,8 @@ def transformItem(self, uuid, spider, item): "ccm:wwwurl": item["lom"]["technical"]["location"], "cclom:location": item["lom"]["technical"]["location"], "cclom:title": item["lom"]["general"]["title"], - "cclom:general_aggregationlevel": item["lom"]["general"]["aggregationLevel"], - "ccm:searchable": item["searchable"], + "ccm:hpi_lom_general_aggregationlevel": str(item["lom"]["general"]["aggregationLevel"]), + "ccm:hpi_searchable": str(item["searchable"]), } if "notes" in item: spaces["ccm:notes"] = item["notes"] @@ -325,13 +325,20 @@ def transformItem(self, uuid, spider, item): # Relation information, according to the LOM-DE.doc#7 specifications: http://sodis.de/lom-de/LOM-DE.doc if "relation" in item["lom"]: - spaces["cclom:relation"] = item["lom"]["relation"] + spaces["ccm:hpi_lom_relation"] = item["lom"]["relation"] # Since Edu-Sharing has no further information about the schema of this attribute it is better to treat it # as a list of strings and not as a JSON. - for i, element in enumerate(spaces["cclom:relation"]): - relation_value = str(element).replace("\n", "").replace("\r", "") + for i, element in enumerate(spaces["ccm:hpi_lom_relation"]): + # JSON expects double quotes. + element_str = str(element).replace("\'", "\"") + # JSON to Python dictionary + element_dict = json.loads(element_str) + + # We expect and prefer single quotes in the result. + relation_value = json.dumps(element_dict, sort_keys=True).replace("\"", "\'") + # Remove redundant white spaces. relation_value = ' '.join(relation_value.split()) - spaces["cclom:relation"][i] = relation_value + spaces["ccm:hpi_lom_relation"][i] = relation_value return spaces diff --git a/schulcloud/metadatasets/ccmodel_hpischulcloud.xml b/schulcloud/metadatasets/ccmodel_hpischulcloud.xml new file mode 100644 index 00000000..fe64d073 --- /dev/null +++ b/schulcloud/metadatasets/ccmodel_hpischulcloud.xml @@ -0,0 +1,30 @@ + + + + aspect for hpi + + + d:text + true + + false + + + + + d:text + true + + false + + + + + d:text + true + + false + + + + \ No newline at end of file diff --git a/schulcloud/metadatasets/mds_oeh_override.xml b/schulcloud/metadatasets/mds_oeh_override.xml index 3f43700c..4d6db93b 100644 --- a/schulcloud/metadatasets/mds_oeh_override.xml +++ b/schulcloud/metadatasets/mds_oeh_override.xml @@ -8,6 +8,22 @@ true OR + + + true + OR + + + + true + OR + + + + true + OR + + @@ -16,6 +32,22 @@ true OR + + + true + OR + + + + true + OR + + + + true + OR + + \ No newline at end of file From ba2603769226695b99f292695cb3f6094c9700ce Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Thu, 7 Jan 2021 09:03:57 +0100 Subject: [PATCH 019/590] =?UTF-8?q?SC-8346=20-=20Replaces=20Mediothek=20co?= =?UTF-8?q?llection=20elements'=20title=20to=20their=20file=E2=80=A6=20(#1?= =?UTF-8?q?9)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * SC-8346 - Replaces Mediothek collection elements' title to their filename From Einzeltitel we now use the filename, without the extension, as the title for Mediothek collection elements. * SC-8346 - Changing serientitel to einzeltitel for Mediothek collection parent elements. * SC-8346 - Remove underscores. * SC-8346 - Consistent ID assignment for generated collection parents Co-authored-by: Ioannis Koumarelas --- .../spiders/mediothek_pixiothek_spider.py | 65 ++++++++++--------- 1 file changed, 34 insertions(+), 31 deletions(-) diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index 9a949b45..52850558 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -1,7 +1,6 @@ import copy import json -import time -from datetime import datetime +import os from scrapy.spiders import CrawlSpider @@ -205,12 +204,10 @@ def prepare_collections(self, prepared_elements): else: mediothek_elements.append(element_dict) - max_id = int(max(prepared_elements, key=lambda x: int(x["id"]))["id"]) + pixiothek_elements_grouped, mediothek_elements = \ + self.group_pixiothek_elements(pixiothek_elements, mediothek_elements) - pixiothek_elements_grouped, mediothek_elements, max_id = \ - self.group_pixiothek_elements(pixiothek_elements, mediothek_elements, max_id) - - mediothek_elements_grouped, max_id = self.group_mediothek_elements(mediothek_elements, max_id) + mediothek_elements_grouped = self.group_mediothek_elements(mediothek_elements) collection_elements = [] collection_elements.extend(pixiothek_elements_grouped) @@ -233,9 +230,13 @@ def group_by_elements(self, elements, group_by): groups[group_by_value] = [] groups[group_by_value].append(element) + # For consistency sort all values per key. + for key in groups.keys(): + groups[key] = sorted(groups[key], key=lambda x: int(x["id"])) + return groups - def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements, max_id): + def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements): """ Collection elements in Pixiothek have a "parent" (representative) Mediothek element that describes the whole collection. Our task in this method is for every Pixiothek group to find its Mediothek element and add the @@ -267,7 +268,8 @@ def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements, max_i parent_mediothek_elements = set() # Generate new "representative" (parent) element. - for group_by_key, group in pixiothek_elements_grouped_by.items(): + for group_by_key in sorted(pixiothek_elements_grouped_by.keys()): + group = pixiothek_elements_grouped_by[group_by_key] serientitel = None if "serientitel" in group[0]: serientitel = group[0]["serientitel"] @@ -293,13 +295,15 @@ def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements, max_i else: parent_element = copy.deepcopy(group[0]) - # We need to assign a new ID, different from the previous ones. - max_id += 1 - parent_element["id"] = str(max_id) + # We need to assign a new ID, different from the previous ones. For this purpose, we decide to modify + # the ID of the existing element and add some suffix to note that this is an artificial element. + # Clearly, such a big number for an ID will have no collisions with existing real elements. + artificial_element_suffix = "000000" + parent_element["id"] = parent_element["id"] + artificial_element_suffix # Assign a fake URL that we can still recognize if we ever want to allow the access of the collection # content. - parent_element["downloadUrl"] = default_download_url + str(max_id) + parent_element["downloadUrl"] = default_download_url + parent_element["id"] parent_element["title"] = parent_element["serientitel"] parent_element["searchable"] = 1 @@ -324,9 +328,9 @@ def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements, max_i if mediothek_elements[i]["id"] in parent_mediothek_elements: del (mediothek_elements[i]) - return collection_elements, mediothek_elements, max_id + return collection_elements, mediothek_elements - def group_mediothek_elements(self, mediothek_elements, max_id): + def group_mediothek_elements(self, mediothek_elements): """ Collection elements in Mediothek have no special element to represent them (a parent element). Therefore, we select one of them as the collection representative (parent element) and set some of its attributes accordingly. @@ -343,25 +347,19 @@ def group_mediothek_elements(self, mediothek_elements, max_id): edusharing = EduSharing() # Used to generate UUIDs. # Generate new "parent" (representative) element. - for group_by_key, group in mediothek_elements_grouped_by.items(): + for group_by_key in sorted(mediothek_elements_grouped_by.keys()): + group = mediothek_elements_grouped_by[group_by_key] parent_element = copy.deepcopy(group[0]) - # We need to assign a new ID, different from the previous ones. - max_id += 1 - parent_element["id"] = str(max_id) + # We need to assign a new ID, different from the previous ones. For this purpose, we decide to modify + # the ID of the existing element and add some suffix to note that this is an artificial element. + # Clearly, such a big number for an ID will have no collisions with existing real elements. + artificial_element_suffix = "000000" + parent_element["id"] = parent_element["id"] + artificial_element_suffix + parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) - # In case we only have a single element in the collection AND its value in the serientitel is equal to a - # predefined value, which indicates that this is a collection (parent and not a single element), we treat - # this case different and set the title equal to the einzeltitel, which already describes the collection. - if len(group) == 1 and "serientitel" in parent_element and \ - parent_element["serientitel"] == single_element_collection_serientitel: - parent_element["title"] = parent_element["einzeltitel"] - else: - if "serientitel" in parent_element: - parent_element["title"] = parent_element["serientitel"] - else: - parent_element["title"] = parent_element["einzeltitel"] + parent_element["title"] = parent_element["einzeltitel"] parent_element["searchable"] = 1 parent_element["aggregation_level"] = 2 @@ -372,13 +370,18 @@ def group_mediothek_elements(self, mediothek_elements, max_id): element["aggregation_level"] = 1 element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) + if "dateiName" in element: + # Remove the file extension + filename, file_extension = os.path.splitext(element["dateiName"]) + element["title"] = filename.replace("_", " ") + # Add connections from parent to children elements. parent_element, group = self.relate_parent_with_children_elements(parent_element, group) collection_elements.append(parent_element) collection_elements.extend(group) - return collection_elements, max_id + return collection_elements def relate_parent_with_children_elements(self, parent_element, children_elements): # Add connections from "parent" to "children" elements. From 857b64944df2568b7fcffe5a5e9081b524eca678 Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Mon, 11 Jan 2021 11:50:20 +0100 Subject: [PATCH 020/590] SC-8375 - Changes for Mediothek collection elements titles using dateiBezeichnung (#20) Co-authored-by: Ioannis Koumarelas --- converter/spiders/mediothek_pixiothek_spider.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index 52850558..d139b6be 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -315,6 +315,8 @@ def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements): element["aggregation_level"] = 1 element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) + element["title"] = element["dateiBezeichnung"] + # Add connections from parent to children elements. parent_element, group = self.relate_parent_with_children_elements(parent_element, group) @@ -370,10 +372,7 @@ def group_mediothek_elements(self, mediothek_elements): element["aggregation_level"] = 1 element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) - if "dateiName" in element: - # Remove the file extension - filename, file_extension = os.path.splitext(element["dateiName"]) - element["title"] = filename.replace("_", " ") + element["title"] = element["dateiBezeichnung"] # Add connections from parent to children elements. parent_element, group = self.relate_parent_with_children_elements(parent_element, group) From 8cfe96ba9b508a8a234c894233a845c929596656 Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Fri, 12 Feb 2021 10:45:02 +0100 Subject: [PATCH 021/590] SC-8573 cron execution spiders (#23) * SC-8573 - Cron execution of spiders: Modified crawl_schulcloud.sh to suppress output. Replacing .env.X (for a specific X) with .env Passing environment (dev, prod) as an argument Mailing reports spider executions Using env var $mailx_recipients to send reports. Adding documentation at the top of the script. Fixed hardcoded spider's name Also added some variables to control whether we want to suppress the output or not, execute using nohup (as a background task), etc. Co-authored-by: Ioannis Koumarelas --- crawl_schulcloud.sh | 111 +++++++++++++++++++++++++++++--------------- 1 file changed, 73 insertions(+), 38 deletions(-) diff --git a/crawl_schulcloud.sh b/crawl_schulcloud.sh index a216849b..9bd17241 100644 --- a/crawl_schulcloud.sh +++ b/crawl_schulcloud.sh @@ -1,36 +1,48 @@ #!/bin/bash +# Please execute this script in the following way: +# env mailx_recipients="e-mail1 e-mail2 ... e-mailN" bash crawl_schulcloud.sh +# TIP: This is how you could include it in a cronjob as well. -# This script is used to execute the spiders, while storing their output to log files. +working_dir=/root/oeh-search-etl-branches/master_cron/oeh-search-etl +cd $working_dir +source .venv/bin/activate -# First we store all spiders in an array variable. spiders=( - "br_rss" - "digitallearninglab" - "geogebra" - "irights" - "leifi" - "mediothek_pixiothek" - "memucho" - "merlin" - "oai_sodis" - "planet_schule" - "rlp" - "serlo" - "wirlernenonline" - "wirlernenonline_gsheet" - "zdf_rss" - "zoerr" - "zum" + "mediothek_pixiothek_spider" + "merlin_spider" + "oeh_spider" ) -# Print the spiders that wil be executed (for debugging purposes). -#echo ${spiders[@]} +print_logo=false +show_spider_output=false + +# dev, prod | WARNING: It assumes the existence of .env.dev and .env.prod in the converter/ directory. Please refer to +# .env.example for reference environmental variables. +environment="dev" +if [[ $# -eq 0 ]] ; then + echo 'No environment specified as an argument, defaulting to dev.' +else + environment=$1 + echo "The environment ${environment} was specified." +fi +if ! test -f "converter/.env.$environment"; then + echo "converter/.env.$environment does not exist. Exiting..." + exit 2 +else + echo "Copying converter/.env.$environment to converter/.env" + cp "converter/.env.$environment" "converter/.env" +fi + +# Set to true only when $show_spider_output = false. Please prefer to keep to false, at least for crawlings against the +# production machine. (It causes the execution to run in the background and, thus, multiple spiders will run.) +use_nohup=false # Make the directory "nohups" if it does not already exist. mkdir -p nohups -echo -' +################################### +if [ "$print_logo" = true ] ; then + echo ' ( ) ( @@ -45,25 +57,48 @@ echo (\ |) '---' (| /) ` (| |) ` \) (/ - ____ ________ __ _ __ + ____ ________ __ _ __ / __ \/ ____/ / / / _________ (_)___/ /__ __________ / / / / __/ / /_/ / / ___/ __ \/ / __ / _ \/ ___/ ___/ -/ /_/ / /___/ __ / (__ ) /_/ / / /_/ / __/ / (__ ) -\____/_____/_/ /_/ /____/ .___/_/\__,_/\___/_/ /____/ - /_/ -' +/ /_/ / /___/ __ / (__ ) /_/ / / /_/ / __/ / (__ ) +\____/_____/_/ /_/ /____/ .___/_/\__,_/\___/_/ /____/ + /_/' +fi + # Execute the spiders. for spider in ${spiders[@]} do - echo "Executing $spider spider." - - # Execute the spider and save its output to two files: "nohup_SPIDER.out" (individual log) and "nohup.out" (collective logs). - #nohup scrapy crawl ${spider}_spider -a resetVersion=true | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null 2>&1 & - #nohup scrapy crawl ${spider}_spider -a cleanrun=true | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null 2>&1 & - nohup scrapy crawl ${spider}_spider | tee -a nohups/nohup_${spider}.out nohups/nohup.out >/dev/null & 2>&1 - - # Execute the spider in the background. - #scrapy crawl ${spider}_spider & + echo "Executing $spider spider." + + # Execute the spider + if [ "$show_spider_output" = true ] ; then + # ... , save its output to "nohup_SPIDER.out", AND print stdout and stderr. + scrapy crawl ${spider} -a resetVersion=true | tee -a nohups/nohup_${spider}.out + elif [ "$show_spider_output" = false ] && [ "$use_nohup" = true ]; then + # Execute the spider and save its output to two files: "nohup_SPIDER.out" (individual log) and "nohup.out" + # (collective logs). + nohup scrapy crawl ${spider} -a resetVersion=true | tee -a nohups/nohup_${spider}.out \ + nohups/nohup.out >/dev/null 2>&1 & + else # elif [ "$show_spider_output" = false ] && [ "use_nohup" = false ]; then + # ... and save its output to "nohup_SPIDER.out". + scrapy crawl ${spider} -a resetVersion=true &> nohups/nohup_${spider}.out + fi + + echo "Finished execution of $spider spider" + + # If the env var $mailx_recipients is set, please send the report to it. (Could be multiple addresses separated + # via a white spaces). e.g., export mailx_recipients="mail1@hpi.de mail2@hpi.de ... mailN@hpi.de" + if [ ! -z ${mailx_recipients+x} ]; then + echo "Gathering report for $spider spider" + + spider_output=$(tail -n 40 nohups/nohup_${spider}.out) + # Remove everything before and including the string 'INFO: Closing spider (finished)' + spider_output_statistics="*** Report for ${spider} crawling ***"${spider_output#*"INFO: Closing spider (finished)"} + echo "$spider_output_statistics" | mailx -s "${spider} has just finished crawling." ${mailx_recipients} + + echo "Report sent for $spider spider" + fi done -echo "Happy crawling! :-)" + +echo "Finished with all spiders! :-)" \ No newline at end of file From 3306fa4910278ae1af2a27005318637c6da6a152 Mon Sep 17 00:00:00 2001 From: Ioannis Koumarelas Date: Mon, 1 Mar 2021 10:26:18 +0100 Subject: [PATCH 022/590] SC-8701 - Allow passing spider as argument to crawl_schulcloud.sh. (#24) Co-authored-by: Ioannis Koumarelas --- crawl_schulcloud.sh | 84 ++++++++++++++++++++++++++----------------- schulcloud/Dockerfile | 17 ++++----- 2 files changed, 60 insertions(+), 41 deletions(-) diff --git a/crawl_schulcloud.sh b/crawl_schulcloud.sh index 9bd17241..86a0f012 100644 --- a/crawl_schulcloud.sh +++ b/crawl_schulcloud.sh @@ -1,11 +1,19 @@ #!/bin/bash # Please execute this script in the following way: -# env mailx_recipients="e-mail1 e-mail2 ... e-mailN" bash crawl_schulcloud.sh +# bash crawl_schulcloud.sh --arg1 val1 --arg2 "val2 val3" # TIP: This is how you could include it in a cronjob as well. +############################## +# STEP 1: Declaring variables. +print_logo=false +show_spider_output=false + +# Set to true only when $show_spider_output = false. Please prefer to keep to false, at least for crawlings against the +# production machine. (It causes the execution to run in the background and, thus, multiple spiders will run.) +use_nohup=false + +## Main variables working_dir=/root/oeh-search-etl-branches/master_cron/oeh-search-etl -cd $working_dir -source .venv/bin/activate spiders=( "mediothek_pixiothek_spider" @@ -13,34 +21,24 @@ spiders=( "oeh_spider" ) -print_logo=false -show_spider_output=false - # dev, prod | WARNING: It assumes the existence of .env.dev and .env.prod in the converter/ directory. Please refer to # .env.example for reference environmental variables. environment="dev" -if [[ $# -eq 0 ]] ; then - echo 'No environment specified as an argument, defaulting to dev.' -else - environment=$1 - echo "The environment ${environment} was specified." -fi -if ! test -f "converter/.env.$environment"; then - echo "converter/.env.$environment does not exist. Exiting..." - exit 2 -else - echo "Copying converter/.env.$environment to converter/.env" - cp "converter/.env.$environment" "converter/.env" -fi -# Set to true only when $show_spider_output = false. Please prefer to keep to false, at least for crawlings against the -# production machine. (It causes the execution to run in the background and, thus, multiple spiders will run.) -use_nohup=false +############################ +# STEP 2: Parsing arguments. +while [[ "$#" -gt 0 ]]; do + case $1 in + -e|--environment) environment="$2"; shift ;; + -s|--spiders) spiders_str=("$2"); spiders=($spiders_str); shift ;; # Convert a double quoted value to an array. + -m|--mailx_recipients) mailx_recipients=("$2"); shift ;; + -w|--working_dir) working_dir="$2"; shift;; -# Make the directory "nohups" if it does not already exist. -mkdir -p nohups + *) echo "Unknown parameter passed: $1"; exit 1 ;; + esac + shift +done -################################### if [ "$print_logo" = true ] ; then echo ' ( @@ -65,23 +63,43 @@ if [ "$print_logo" = true ] ; then /_/' fi +echo "working_dir=$working_dir"; +echo "environment=$environment"; +echo "spiders=${spiders[@]}"; +echo "mailx_recipients=$mailx_recipients"; + +############################## +# STEP 3: Prepare environment. +cd $working_dir +source .venv/bin/activate + +if ! test -f "converter/.env.$environment"; then + echo "converter/.env.$environment does not exist. Exiting..." + exit 2 +else + echo "Copying converter/.env.$environment to converter/.env" + cp "converter/.env.$environment" "converter/.env" +fi + +# Make the directory "nohups" if it does not already exist. +mkdir -p nohups -# Execute the spiders. +############################## +# STEP 4: Execute the spiders. for spider in ${spiders[@]} do echo "Executing $spider spider." # Execute the spider if [ "$show_spider_output" = true ] ; then - # ... , save its output to "nohup_SPIDER.out", AND print stdout and stderr. + # Save its output to "nohup_SPIDER.out" AND print stdout and stderr. scrapy crawl ${spider} -a resetVersion=true | tee -a nohups/nohup_${spider}.out elif [ "$show_spider_output" = false ] && [ "$use_nohup" = true ]; then - # Execute the spider and save its output to two files: "nohup_SPIDER.out" (individual log) and "nohup.out" - # (collective logs). + # Save its output to "nohup_SPIDER.out" (individual log) and "nohup.out". (collective logs) nohup scrapy crawl ${spider} -a resetVersion=true | tee -a nohups/nohup_${spider}.out \ nohups/nohup.out >/dev/null 2>&1 & else # elif [ "$show_spider_output" = false ] && [ "use_nohup" = false ]; then - # ... and save its output to "nohup_SPIDER.out". + # Save its output to "nohup_SPIDER.out". scrapy crawl ${spider} -a resetVersion=true &> nohups/nohup_${spider}.out fi @@ -92,10 +110,10 @@ do if [ ! -z ${mailx_recipients+x} ]; then echo "Gathering report for $spider spider" - spider_output=$(tail -n 40 nohups/nohup_${spider}.out) + spider_output=$(tail -n 500 nohups/nohup_${spider}.out) # Remove everything before and including the string 'INFO: Closing spider (finished)' - spider_output_statistics="*** Report for ${spider} crawling ***"${spider_output#*"INFO: Closing spider (finished)"} - echo "$spider_output_statistics" | mailx -s "${spider} has just finished crawling." ${mailx_recipients} + spider_output_statistics="*** Report for ${spider} crawling in ${environment} environment ***"${spider_output#*"INFO: Closing spider (finished)"} + echo "$spider_output_statistics" | mailx -s "${spider} has just finished crawling in ${environment}." ${mailx_recipients} echo "Report sent for $spider spider" fi diff --git a/schulcloud/Dockerfile b/schulcloud/Dockerfile index 6ee1382d..cf6d56d9 100644 --- a/schulcloud/Dockerfile +++ b/schulcloud/Dockerfile @@ -1,17 +1,18 @@ - FROM edusharing/repo-rs-moodle:sc-latest +# FROM edusharing/repo-rs-moodle:sc-latest + FROM edusharing/repo-rs-moodle:6.0-dev # Tested on FROM edusharing/repo-rs-moodle:sc-2a81f4d31 # # Production version, on mv-repo, (0f18b0ce2) not available in Docker Hub: # https://hub.docker.com/r/edusharing/repo-rs-moodle/tags -# Copy the metadatasets file to the container, while changing the permissions to the correct user:group. -COPY --chown=tomcat:tomcat \ - metadatasets/mds_oeh_17_09_2020.xml \ - /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh.xml +# # Copy the metadatasets file to the container, while changing the permissions to the correct user:group. +# COPY --chown=tomcat:tomcat \ +# metadatasets/mds_oeh_17_09_2020.xml \ +# /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh.xml -COPY --chown=tomcat:tomcat \ - metadatasets/mds_oeh_override.xml \ - /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh_override.xml +# COPY --chown=tomcat:tomcat \ +# metadatasets/mds_oeh_override.xml \ +# /usr/local/tomcat/shared/classes/org/edu_sharing/metadataset/v2/xml/mds_oeh_override.xml # Add the script that will wait and add the mds_oeh_24_06_2020.xml at the right time. ADD metadatasets/curl_metadatasetsV2.sh /root/curl_metadatasetsV2.sh From df170766130463e278d674522bec51eed6a33931 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Oct 2021 15:06:47 +0200 Subject: [PATCH 023/590] add umwelt_im_unterricht_spider.py v0.0.1 (WIP!) - works in local 'json'-mode - rough first draft; still missing a bunch of metadata-fields (see ToDos) --- .../spiders/umwelt_im_unterricht_spider.py | 277 ++++++++++++++++++ 1 file changed, 277 insertions(+) create mode 100644 converter/spiders/umwelt_im_unterricht_spider.py diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py new file mode 100644 index 00000000..db625b09 --- /dev/null +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -0,0 +1,277 @@ +import logging + +import scrapy +import w3lib.html +from scrapy.spiders import CrawlSpider + +from converter.constants import Constants +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ + PermissionItemLoader +from converter.spiders.base_classes import LomBase + + +class UmweltImUnterrichtSpider(CrawlSpider, LomBase): + """ + Crawler for Umwelt-im-Unterricht.de + (Bundesministerium für Umwelt, Naturschutz und nukleare Sicherheit) + """ + name = "umwelt_im_unterricht_spider" + friendlyName = "Umwelt im Unterricht" + start_urls = [ + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Atopics", + # # Typ: Thema der Woche + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", + # # Typ: Unterrichtsvorschlag + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Acontexts", + # # Typ: Hintergrund (Kontext) + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials", + # # Typ: Arbeitsmaterial + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_video", + # Typ: Video + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", + # # Typ: Bilderserie + ] + version = "0.0.1" # last update: 2021-10-07 + topic_urls = set() # urls that need to be parsed will be added here + topic_urls_already_parsed = set() # this set is used for 'checking off' already parsed urls + + EDUCATIONAL_CONTEXT_MAPPING: dict = { + # There's only 2 "Zielgruppen": 'Grundschule' and 'Sekundarstufe' + # ToDo: either map Sekundarstufe to both or neither + 'Sekundarstufe': ['Sekundarstufe I', 'Sekundarstufe II'] + } + DISCIPLINE_MAPPING: dict = { + 'Arbeit, Wirtschaft, Technik': 'Arbeitslehre', + 'Ethik, Philosophie, Religion': ['Ethik', 'Philosophie', 'Religion'], + # 'Fächerübergreifend', # ToDo: no mapping available + 'Politik, SoWi, Gesellschaft': ['Politik', 'Sozialkunde', 'Gesellschaftskunde'], + # 'Verbraucherbildung' # ToDo: no mapping available + } + + def getId(self, response=None) -> str: + return response.url + + def getHash(self, response=None) -> str: + date_raw = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() + date_cleaned_up = w3lib.html.strip_html5_whitespace(date_raw) + hash_temp = str(date_cleaned_up + self.version) + return hash_temp + + def parse_start_url(self, response, **kwargs): + for url in self.start_urls: + yield scrapy.Request(url=url, callback=self.parse_category_overview_for_individual_topic_urls) + + def parse_category_overview_for_individual_topic_urls(self, response, **kwargs): + # logging.debug(f"INSIDE PARSE CATEGORY METHOD: {response.url}") + topic_urls_raw: list = response.xpath('//a[@class="internal-link readmore"]/@href').getall() + # logging.debug(f"TOPIC URLS (RAW) ={topic_urls_raw}") + + for url_ending in topic_urls_raw: + self.topic_urls.add(response.urljoin(url_ending)) + # logging.debug(f"TOPIC URLS ({len(self.topic_urls)}) = {self.topic_urls}") + + # if there's a "Letzte"-Button in the overview, there's more topic_urls to be gathered than the initially + # displayed 10 elements + last_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last last"]/a/@href').get() + if last_page_button_url is not None: + last_page_button_url = response.urljoin(last_page_button_url) + # Using the "next page"-button until we reach the last page: + if last_page_button_url != response.url: + next_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last next"]/a/@href').get() + if next_page_button_url is not None: + next_url_to_parse = response.urljoin(next_page_button_url) + yield scrapy.Request(url=next_url_to_parse, + callback=self.parse_category_overview_for_individual_topic_urls) + # if last_page_button_url == response.url: + # logging.debug(f"Reached the last page: {response.url}") + # logging.debug(f"{len(self.topic_urls)} individual topic_urls were found: {self.topic_urls}") + for url in self.topic_urls: + # making sure that we don't accidentally crawl individual pages more than once + if url not in self.topic_urls_already_parsed: + yield scrapy.Request(url=url, callback=self.parse) + self.topic_urls_already_parsed.add(url) + # logging.debug(f"topic_urls after yielding them: {len(self.topic_urls)} --- " + # f"topic_urls_already_parsed: {len(self.topic_urls_already_parsed)}") + + def parse(self, response, **kwargs): + base = BaseItemLoader() + # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py + + # TODO: fill "base"-keys with values for + # - thumbnail recommended (let splash handle it) + # - publisher optional + base.add_value('sourceId', response.url) + date_raw = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() + date_cleaned_up = w3lib.html.strip_html5_whitespace(date_raw) + base.add_value('lastModified', date_cleaned_up) + base.add_value('type', Constants.TYPE_MATERIAL) + # base.add_value('thumbnail', thumbnail_url) + + lom = LomBaseItemloader() + + general = LomGeneralItemloader() + # TODO: fill "general"-keys with values for + # - coverage optional + # - structure optional + # - aggregationLevel optional + general.add_value('identifier', response.url) + title = response.xpath('//div[@class="tx-cps-uiu"]/article/h1/text()').get() + general.add_value('title', title) + keywords = response.xpath('//div[@class="b-cpsuiu-show-keywords"]/ul/li/a/text()').getall() + if len(keywords) >= 1: + general.add_value('keyword', keywords) + description = response.xpath('/html/head/meta[@name="description"]/@content').get() + general.add_value('description', description) + general.add_value('language', 'de') + + lom.add_value('general', general.load_item()) + + technical = LomTechnicalItemLoader() + # TODO: fill "technical"-keys with values for + # - size optional + # - requirement optional + # - installationRemarks optional + # - otherPlatformRequirements optional + technical.add_value('format', 'text/html') + technical.add_value('location', response.url) + lom.add_value('technical', technical.load_item()) + + lifecycle = LomLifecycleItemloader() + # TODO: fill "lifecycle"-keys with values for + # - url recommended + # - email optional + # - uuid optional + lifecycle.add_value('role', 'publisher') + lifecycle.add_value('date', date_cleaned_up) + lifecycle.add_value('organization', 'Bundesministerium für Umwelt, Naturschutz und nukleare Sicherheit (BMU)') + lom.add_value('lifecycle', lifecycle.load_item()) + + educational = LomEducationalItemLoader() + # TODO: fill "educational"-keys with values for + # - description recommended (= "Comments on how this learning object is to be used") + # - interactivityType optional + # - interactivityLevel optional + # - semanticDensity optional + # - typicalAgeRange optional + # - difficulty optional + # - typicalLearningTime optional + educational.add_value('language', 'de') + lom.add_value('educational', educational.load_item()) + + # once you've filled "general", "technical", "lifecycle" and "educational" with values, + # the LomBaseItem is loaded into the "base"-BaseItemLoader + base.add_value('lom', lom.load_item()) + + vs = ValuespaceItemLoader() + # for possible values, either consult https://vocabs.openeduhub.de + # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs + # TODO: fill "valuespaces"-keys with values for + # - discipline recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) + # - intendedEndUserRole recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) + # - learningResourceType recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) + # - conditionsOfAccess recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/conditionsOfAccess.ttl) + # - containsAdvertisement recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/containsAdvertisement.ttl) + # - price recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/price.ttl) + # - educationalContext optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/educationalContext.ttl) + # - sourceContentType optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/sourceContentType.ttl) + # - toolCategory optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/toolCategory.ttl) + # - accessibilitySummary optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/accessibilitySummary.ttl) + # - dataProtectionConformity optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/dataProtectionConformity.ttl) + # - fskRating optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/fskRating.ttl) + # - oer optional + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/oer.ttl) + disciplines_raw = response.xpath('//div[@class="b-cpsuiu-show-subjects"]/ul/li/a/text()').getall() + if len(disciplines_raw) >= 1: + disciplines = list() + for discipline_value in disciplines_raw: + # self.debug_discipline_values.add(discipline_value) + if discipline_value in self.DISCIPLINE_MAPPING.keys(): + discipline_value = self.DISCIPLINE_MAPPING.get(discipline_value) + if type(discipline_value) is list: + disciplines.extend(discipline_value) + else: + disciplines.append(discipline_value) + if len(disciplines) >= 1: + vs.add_value('discipline', disciplines) + + educational_context_raw = response.xpath('//div[@class="b-cpsuiu-show-targets"]/ul/li/a/text()').getall() + if len(educational_context_raw) >= 1: + educational_context = list() + for educational_context_value in educational_context_raw: + # self.debug_educational_context_values.add(educational_context_value) + if educational_context_value in self.EDUCATIONAL_CONTEXT_MAPPING.keys(): + educational_context_value = self.EDUCATIONAL_CONTEXT_MAPPING.get(educational_context_value) + if type(educational_context_value) is list: + educational_context.extend(educational_context_value) + else: + educational_context.append(educational_context_value) + if len(educational_context) >= 1: + vs.add_value('educationalContext', educational_context) + + base.add_value('valuespaces', vs.load_item()) + + lic = LicenseItemLoader() + # TODO: fill "license"-keys with values for + # - oer recommended ('oer' is automatically set if the 'url'-field above + # is recognized in LICENSE_MAPPINGS: for possible url-mapping values, please take a look at + # LICENSE_MAPPINGS in converter/constants.py) + # - author recommended + # - internal optional + # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) + license_url = response.xpath('//div[@class="cc-licence-info"]/p/a[@rel="license"]/@href').get() + if license_url is not None: + lic.add_value('url', license_url) + + license_description_raw = response.xpath('//div[@class="cc-licence-info"]').get() + if license_description_raw is not None: + license_description_raw = w3lib.html.remove_tags(license_description_raw) + license_description_raw = w3lib.html.replace_escape_chars(license_description_raw, which_ones="\n", + replace_by=" ") + license_description_raw = w3lib.html.replace_escape_chars(license_description_raw) + license_description = " ".join(license_description_raw.split()) + lic.add_value('description', license_description) + base.add_value('license', lic.load_item()) + + # Either fill the PermissionItemLoader manually (not necessary most of the times) + permissions = PermissionItemLoader() + # or (preferably) call the inherited getPermissions(response)-method + # from converter/spiders/base_classes/lom_base.py by using super().: + # permissions = super().getPermissions(response) + # TODO: if necessary, add/replace values for the following "permissions"-keys + # - public optional + # - groups optional + # - mediacenters optional + # - autoCreateGroups optional + # - autoCreateMediacenters optional + base.add_value('permissions', permissions.load_item()) + + # Either fill the ResponseItemLoader manually (not necessary most of the time) + # response_loader = ResponseItemLoader() + # or (preferably) call the inherited mapResponse(response)-method + # from converter/spiders/base_classes/lom_base.py by using super().: + response_loader = super().mapResponse(response) + # TODO: if necessary, add/replace values for the following "response"-keys + # - url required + # - status optional + # - html optional + # - text optional + # - headers optional + # - cookies optional + # - har optional + base.add_value('response', response_loader.load_item()) + + # once all scrapy.Item are loaded into our "base", we yield the BaseItem by calling the .load_item() method + yield base.load_item() From ea688d56d7b78129c9e533655a940ea54bea26a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Oct 2021 19:45:55 +0200 Subject: [PATCH 024/590] add LomClassificationItem to sample_spider_alternative.py - initially forgot to add 'classification' to the spider blueprint since it was never used anywhere --- converter/spiders/sample_spider_alternative.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 8b1cd07f..5eee54ec 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -4,7 +4,7 @@ from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ - PermissionItemLoader + PermissionItemLoader, LomClassificationItemLoader from converter.spiders.base_classes import LomBase @@ -70,6 +70,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - LomTechnicalItem required # - LomLifeCycleItem required (multiple possible) # - LomEducationalItem required + # - LomClassificationItem optional general = LomGeneralItemloader() # TODO: fill "general"-keys with values for @@ -132,6 +133,15 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - typicalLearningTime optional lom.add_value('educational', educational.load_item()) + classification = LomClassificationItemLoader() + # TODO: fill "classification"-keys with values for + # - cost optional + # - purpose optional + # - taxonPath optional + # - description optional + # - keyword optional + lom.add_value('classification', classification.load_item()) + # once you've filled "general", "technical", "lifecycle" and "educational" with values, # the LomBaseItem is loaded into the "base"-BaseItemLoader base.add_value('lom', lom.load_item()) From 251dd53aa711ac3d9ca83d99d0c6cc5499a6ce1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Oct 2021 21:03:55 +0200 Subject: [PATCH 025/590] umwelt_im_unterricht_spider.py (WIP!) - fill up most of the remaining metadata-fields -- ToDo: doublecheck the remaining questionable fields - add classification.description (for classification.purpose:'competency' and purpose:'educational objective') --- .../spiders/umwelt_im_unterricht_spider.py | 145 ++++++++---------- 1 file changed, 61 insertions(+), 84 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index db625b09..40ca2d15 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -1,13 +1,11 @@ -import logging - import scrapy import w3lib.html from scrapy.spiders import CrawlSpider from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ - PermissionItemLoader + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, \ + LomClassificationItemLoader from converter.spiders.base_classes import LomBase @@ -21,24 +19,22 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): start_urls = [ # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Atopics", # # Typ: Thema der Woche - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", - # # Typ: Unterrichtsvorschlag + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", + # Typ: Unterrichtsvorschlag # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Acontexts", # # Typ: Hintergrund (Kontext) # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials", # # Typ: Arbeitsmaterial - "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_video", - # Typ: Video + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_video", + # # Typ: Video # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", # # Typ: Bilderserie ] - version = "0.0.1" # last update: 2021-10-07 + version = "0.0.1" # last update: 2021-10-07 topic_urls = set() # urls that need to be parsed will be added here - topic_urls_already_parsed = set() # this set is used for 'checking off' already parsed urls + topic_urls_already_parsed = set() # this set is used for 'checking off' already parsed urls EDUCATIONAL_CONTEXT_MAPPING: dict = { - # There's only 2 "Zielgruppen": 'Grundschule' and 'Sekundarstufe' - # ToDo: either map Sekundarstufe to both or neither 'Sekundarstufe': ['Sekundarstufe I', 'Sekundarstufe II'] } DISCIPLINE_MAPPING: dict = { @@ -50,19 +46,16 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): } def getId(self, response=None) -> str: - return response.url + pass def getHash(self, response=None) -> str: - date_raw = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() - date_cleaned_up = w3lib.html.strip_html5_whitespace(date_raw) - hash_temp = str(date_cleaned_up + self.version) - return hash_temp + pass def parse_start_url(self, response, **kwargs): for url in self.start_urls: yield scrapy.Request(url=url, callback=self.parse_category_overview_for_individual_topic_urls) - def parse_category_overview_for_individual_topic_urls(self, response, **kwargs): + def parse_category_overview_for_individual_topic_urls(self, response): # logging.debug(f"INSIDE PARSE CATEGORY METHOD: {response.url}") topic_urls_raw: list = response.xpath('//a[@class="internal-link readmore"]/@href').getall() # logging.debug(f"TOPIC URLS (RAW) ={topic_urls_raw}") @@ -95,15 +88,14 @@ def parse_category_overview_for_individual_topic_urls(self, response, **kwargs): # f"topic_urls_already_parsed: {len(self.topic_urls_already_parsed)}") def parse(self, response, **kwargs): + current_url: str = response.url base = BaseItemLoader() - # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py - # TODO: fill "base"-keys with values for - # - thumbnail recommended (let splash handle it) - # - publisher optional base.add_value('sourceId', response.url) date_raw = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() date_cleaned_up = w3lib.html.strip_html5_whitespace(date_raw) + hash_temp = str(date_cleaned_up + self.version) + base.add_value('hash', hash_temp) base.add_value('lastModified', date_cleaned_up) base.add_value('type', Constants.TYPE_MATERIAL) # base.add_value('thumbnail', thumbnail_url) @@ -112,7 +104,6 @@ def parse(self, response, **kwargs): general = LomGeneralItemloader() # TODO: fill "general"-keys with values for - # - coverage optional # - structure optional # - aggregationLevel optional general.add_value('identifier', response.url) @@ -128,22 +119,14 @@ def parse(self, response, **kwargs): lom.add_value('general', general.load_item()) technical = LomTechnicalItemLoader() - # TODO: fill "technical"-keys with values for - # - size optional - # - requirement optional - # - installationRemarks optional - # - otherPlatformRequirements optional technical.add_value('format', 'text/html') technical.add_value('location', response.url) lom.add_value('technical', technical.load_item()) lifecycle = LomLifecycleItemloader() - # TODO: fill "lifecycle"-keys with values for - # - url recommended - # - email optional - # - uuid optional lifecycle.add_value('role', 'publisher') lifecycle.add_value('date', date_cleaned_up) + lifecycle.add_value('url', "https://www.umwelt-im-unterricht.de/impressum/") lifecycle.add_value('organization', 'Bundesministerium für Umwelt, Naturschutz und nukleare Sicherheit (BMU)') lom.add_value('lifecycle', lifecycle.load_item()) @@ -159,40 +142,56 @@ def parse(self, response, **kwargs): educational.add_value('language', 'de') lom.add_value('educational', educational.load_item()) - # once you've filled "general", "technical", "lifecycle" and "educational" with values, - # the LomBaseItem is loaded into the "base"-BaseItemLoader + # ToDo: didactic_comment / competencies + classification = LomClassificationItemLoader() + + if "/wochenthemen/" in current_url: + classification.add_value('purpose', 'educational objective') + # didactic comments are only part of "Thema der Woche" + didactic_comment = response.xpath('//div[@class="c-collapse-content js-collapse-content"]').get() + if didactic_comment is not None: + didactic_comment = w3lib.html.remove_tags(didactic_comment) + # didactic_comment = w3lib.html.replace_escape_chars(didactic_comment, which_ones='\t', replace_by=" ") + # didactic_comment = w3lib.html.replace_escape_chars(didactic_comment) + didactic_comment = " ".join(didactic_comment.split()) + if didactic_comment.endswith(".mehr lesenweniger lesen"): + didactic_comment = didactic_comment.replace("mehr lesenweniger lesen", "") + # ToDo: make sure which string format looks best in edu-sharing (cleaned up <-> with escape chars) + classification.add_value('description', didactic_comment) + + if "/unterrichtsvorschlaege/" in current_url: + classification.add_value('purpose', 'competency') + competency_description: list = response.xpath('//div[@class="b-cpsuiu-show-description"]/*[not(' + '@class="cc-licence-info")]').getall() + # competency_description will grab the whole div-element, but EXCLUDE the "license"-container + if len(competency_description) >= 1: + # only if the list of strings is not empty, we'll try to type-convert it to a string (and clean its + # formatting up) + competency_description: str = " ".join(competency_description) + competency_description = w3lib.html.remove_tags(competency_description) + classification.add_value('description', competency_description) + + lom.add_value('classification', classification.load_item()) + base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() # for possible values, either consult https://vocabs.openeduhub.de # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs # TODO: fill "valuespaces"-keys with values for - # - discipline recommended - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) - # - intendedEndUserRole recommended - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) # - learningResourceType recommended # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) - # - conditionsOfAccess recommended - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/conditionsOfAccess.ttl) - # - containsAdvertisement recommended - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/containsAdvertisement.ttl) - # - price recommended - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/price.ttl) - # - educationalContext optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/educationalContext.ttl) - # - sourceContentType optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/sourceContentType.ttl) - # - toolCategory optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/toolCategory.ttl) - # - accessibilitySummary optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/accessibilitySummary.ttl) - # - dataProtectionConformity optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/dataProtectionConformity.ttl) - # - fskRating optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/fskRating.ttl) - # - oer optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/oer.ttl) + vs.add_value('price', 'no') + vs.add_value('containsAdvertisement', 'no') + vs.add_value('conditionsOfAccess', 'no login') + vs.add_value('intendedEndUserRole', 'teacher') + vs.add_value('sourceContentType', 'Unterrichtsmaterial- und Aufgaben-Sammlung') + vs.add_value('accessibilitySummary', 'Not tested') # ToDo: check if the accessibility has changed + # see: https://www.umwelt-im-unterricht.de/erklaerung-zur-barrierefreiheit/ + vs.add_value('dataProtectionConformity', 'Sensible data collection') # ToDo: DSGVO-konform? + # see: https://www.umwelt-im-unterricht.de/datenschutz/ + vs.add_value('oer', 'partly OER') # ToDo: alles OER? nur teils? wie setzen? + # see: https://www.umwelt-im-unterricht.de/ueber-umwelt-im-unterricht/ disciplines_raw = response.xpath('//div[@class="b-cpsuiu-show-subjects"]/ul/li/a/text()').getall() if len(disciplines_raw) >= 1: disciplines = list() @@ -228,9 +227,6 @@ def parse(self, response, **kwargs): # - oer recommended ('oer' is automatically set if the 'url'-field above # is recognized in LICENSE_MAPPINGS: for possible url-mapping values, please take a look at # LICENSE_MAPPINGS in converter/constants.py) - # - author recommended - # - internal optional - # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) license_url = response.xpath('//div[@class="cc-licence-info"]/p/a[@rel="license"]/@href').get() if license_url is not None: lic.add_value('url', license_url) @@ -240,37 +236,18 @@ def parse(self, response, **kwargs): license_description_raw = w3lib.html.remove_tags(license_description_raw) license_description_raw = w3lib.html.replace_escape_chars(license_description_raw, which_ones="\n", replace_by=" ") + # if we would replace_escape_chars() straight away, there would be words stuck together that don't belong + # together. just replacing \n with a whitespace is enough to keep the structure of the string intact. license_description_raw = w3lib.html.replace_escape_chars(license_description_raw) license_description = " ".join(license_description_raw.split()) + # making sure that there's only 1 whitespace between words, not 4+ when the original string had serveral \t lic.add_value('description', license_description) base.add_value('license', lic.load_item()) - # Either fill the PermissionItemLoader manually (not necessary most of the times) - permissions = PermissionItemLoader() - # or (preferably) call the inherited getPermissions(response)-method - # from converter/spiders/base_classes/lom_base.py by using super().: - # permissions = super().getPermissions(response) - # TODO: if necessary, add/replace values for the following "permissions"-keys - # - public optional - # - groups optional - # - mediacenters optional - # - autoCreateGroups optional - # - autoCreateMediacenters optional + permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - # Either fill the ResponseItemLoader manually (not necessary most of the time) - # response_loader = ResponseItemLoader() - # or (preferably) call the inherited mapResponse(response)-method - # from converter/spiders/base_classes/lom_base.py by using super().: response_loader = super().mapResponse(response) - # TODO: if necessary, add/replace values for the following "response"-keys - # - url required - # - status optional - # - html optional - # - text optional - # - headers optional - # - cookies optional - # - har optional base.add_value('response', response_loader.load_item()) # once all scrapy.Item are loaded into our "base", we yield the BaseItem by calling the .load_item() method From 798b80723e925cb5bec032a1be3eecf689b8068d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Oct 2021 11:46:14 +0200 Subject: [PATCH 026/590] umwelt_im_unterricht_spider.py (WIP!) - move 'didactic_comment' to educational.description - after feedback: clarified the remaining metadata questions from the ToDo-list - next ToDo: set learningResourceType depending on which material-type is currently getting crawled --- .../spiders/umwelt_im_unterricht_spider.py | 53 ++++++------------- 1 file changed, 16 insertions(+), 37 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 40ca2d15..abdfeb9d 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -19,8 +19,8 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): start_urls = [ # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Atopics", # # Typ: Thema der Woche - "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", - # Typ: Unterrichtsvorschlag + # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", + # # Typ: Unterrichtsvorschlag # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Acontexts", # # Typ: Hintergrund (Kontext) # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials", @@ -40,9 +40,8 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): DISCIPLINE_MAPPING: dict = { 'Arbeit, Wirtschaft, Technik': 'Arbeitslehre', 'Ethik, Philosophie, Religion': ['Ethik', 'Philosophie', 'Religion'], - # 'Fächerübergreifend', # ToDo: no mapping available - 'Politik, SoWi, Gesellschaft': ['Politik', 'Sozialkunde', 'Gesellschaftskunde'], - # 'Verbraucherbildung' # ToDo: no mapping available + 'Fächerübergreifend': 'Allgemein', # ToDo: no mapping available + 'Politik, SoWi, Gesellschaft': ['Politik', 'Sozialkunde', 'Gesellschaftskunde'] } def getId(self, response=None) -> str: @@ -103,9 +102,6 @@ def parse(self, response, **kwargs): lom = LomBaseItemloader() general = LomGeneralItemloader() - # TODO: fill "general"-keys with values for - # - structure optional - # - aggregationLevel optional general.add_value('identifier', response.url) title = response.xpath('//div[@class="tx-cps-uiu"]/article/h1/text()').get() general.add_value('title', title) @@ -131,22 +127,12 @@ def parse(self, response, **kwargs): lom.add_value('lifecycle', lifecycle.load_item()) educational = LomEducationalItemLoader() - # TODO: fill "educational"-keys with values for - # - description recommended (= "Comments on how this learning object is to be used") - # - interactivityType optional - # - interactivityLevel optional - # - semanticDensity optional - # - typicalAgeRange optional - # - difficulty optional - # - typicalLearningTime optional educational.add_value('language', 'de') - lom.add_value('educational', educational.load_item()) - - # ToDo: didactic_comment / competencies - classification = LomClassificationItemLoader() + # TODO: didactic comment could be either one of these: + # - educational.description + # - classification.description (with classification.purpose set to 'educational objective') if "/wochenthemen/" in current_url: - classification.add_value('purpose', 'educational objective') # didactic comments are only part of "Thema der Woche" didactic_comment = response.xpath('//div[@class="c-collapse-content js-collapse-content"]').get() if didactic_comment is not None: @@ -154,11 +140,15 @@ def parse(self, response, **kwargs): # didactic_comment = w3lib.html.replace_escape_chars(didactic_comment, which_ones='\t', replace_by=" ") # didactic_comment = w3lib.html.replace_escape_chars(didactic_comment) didactic_comment = " ".join(didactic_comment.split()) - if didactic_comment.endswith(".mehr lesenweniger lesen"): + if didactic_comment.endswith("mehr lesenweniger lesen"): + # the button-description of the expandable info-box ends up in the string, therefore removing it: didactic_comment = didactic_comment.replace("mehr lesenweniger lesen", "") # ToDo: make sure which string format looks best in edu-sharing (cleaned up <-> with escape chars) - classification.add_value('description', didactic_comment) + educational.add_value('description', didactic_comment) + lom.add_value('educational', educational.load_item()) + + classification = LomClassificationItemLoader() if "/unterrichtsvorschlaege/" in current_url: classification.add_value('purpose', 'competency') competency_description: list = response.xpath('//div[@class="b-cpsuiu-show-description"]/*[not(' @@ -172,25 +162,18 @@ def parse(self, response, **kwargs): classification.add_value('description', competency_description) lom.add_value('classification', classification.load_item()) - base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() - # for possible values, either consult https://vocabs.openeduhub.de - # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs - # TODO: fill "valuespaces"-keys with values for - # - learningResourceType recommended - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) + # ToDo: Set 'learningResourceType' depending on the material that's being crawled, recognize it by url vs.add_value('price', 'no') vs.add_value('containsAdvertisement', 'no') vs.add_value('conditionsOfAccess', 'no login') vs.add_value('intendedEndUserRole', 'teacher') - vs.add_value('sourceContentType', 'Unterrichtsmaterial- und Aufgaben-Sammlung') - vs.add_value('accessibilitySummary', 'Not tested') # ToDo: check if the accessibility has changed + vs.add_value('accessibilitySummary', 'Not tested') # see: https://www.umwelt-im-unterricht.de/erklaerung-zur-barrierefreiheit/ - vs.add_value('dataProtectionConformity', 'Sensible data collection') # ToDo: DSGVO-konform? + vs.add_value('dataProtectionConformity', 'Sensible data collection') # see: https://www.umwelt-im-unterricht.de/datenschutz/ - vs.add_value('oer', 'partly OER') # ToDo: alles OER? nur teils? wie setzen? # see: https://www.umwelt-im-unterricht.de/ueber-umwelt-im-unterricht/ disciplines_raw = response.xpath('//div[@class="b-cpsuiu-show-subjects"]/ul/li/a/text()').getall() if len(disciplines_raw) >= 1: @@ -223,10 +206,6 @@ def parse(self, response, **kwargs): base.add_value('valuespaces', vs.load_item()) lic = LicenseItemLoader() - # TODO: fill "license"-keys with values for - # - oer recommended ('oer' is automatically set if the 'url'-field above - # is recognized in LICENSE_MAPPINGS: for possible url-mapping values, please take a look at - # LICENSE_MAPPINGS in converter/constants.py) license_url = response.xpath('//div[@class="cc-licence-info"]/p/a[@rel="license"]/@href').get() if license_url is not None: lic.add_value('url', license_url) From bf227ecb29d76c9d47ea763a023b8b5a5c141e5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Oct 2021 12:41:26 +0200 Subject: [PATCH 027/590] umwelt_im_unterricht_spider.py (WIP!) - set learningResourceType depending on the URL-structure --- .../spiders/umwelt_im_unterricht_spider.py | 43 +++++++++++++------ 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index abdfeb9d..5ec0cf85 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -17,18 +17,18 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): name = "umwelt_im_unterricht_spider" friendlyName = "Umwelt im Unterricht" start_urls = [ - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Atopics", - # # Typ: Thema der Woche - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", - # # Typ: Unterrichtsvorschlag - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Acontexts", - # # Typ: Hintergrund (Kontext) - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials", - # # Typ: Arbeitsmaterial - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_video", - # # Typ: Video - # "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", - # # Typ: Bilderserie + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Atopics", + # Typ: Thema der Woche + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons", + # Typ: Unterrichtsvorschlag + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Acontexts", + # Typ: Hintergrund (Kontext) + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials", + # Typ: Arbeitsmaterial + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_video", + # Typ: Video + "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", + # Typ: Bilderserie ] version = "0.0.1" # last update: 2021-10-07 topic_urls = set() # urls that need to be parsed will be added here @@ -129,7 +129,7 @@ def parse(self, response, **kwargs): educational = LomEducationalItemLoader() educational.add_value('language', 'de') - # TODO: didactic comment could be either one of these: + # TODO: a didactic comment could fit into either one of these: # - educational.description # - classification.description (with classification.purpose set to 'educational objective') if "/wochenthemen/" in current_url: @@ -165,7 +165,22 @@ def parse(self, response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() - # ToDo: Set 'learningResourceType' depending on the material that's being crawled, recognize it by url + + # depending on the website-category, we need to set a specific learningResourceType + # because the value 'website' for all crawled items would not be helpful enough + if "/wochenthemen/" in current_url or "/unterrichtsvorschlaege/" in current_url: + vs.add_value('learningResourceType', 'lesson plan') + if "/hintergrund/" in current_url: + vs.add_value('learningResourceType', 'Text') + if "/medien/dateien/" in current_url: + # topics categorized as "Arbeitsmaterial" offer customizable worksheets to teachers + vs.add_value('learningResourceType', 'worksheet') + if "/medien/videos/" in current_url: + vs.add_value('learningResourceType', 'video') + if "/medien/bilder/" in current_url: + # topics categorized as "Bilderserie" hold several images in a gallery (with individual licenses) + vs.add_value('learningResourceType', 'image') + vs.add_value('price', 'no') vs.add_value('containsAdvertisement', 'no') vs.add_value('conditionsOfAccess', 'no login') From b57cb0640a89bef787c876dd16de8a8ab66ebabc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Oct 2021 13:20:27 +0200 Subject: [PATCH 028/590] umwelt_im_unterricht_spider.py v0.0.2 - fix license_url (replace "http://" by "https://") --- converter/spiders/umwelt_im_unterricht_spider.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 5ec0cf85..976c83f3 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -30,7 +30,7 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", # Typ: Bilderserie ] - version = "0.0.1" # last update: 2021-10-07 + version = "0.0.2" # last update: 2021-10-08 topic_urls = set() # urls that need to be parsed will be added here topic_urls_already_parsed = set() # this set is used for 'checking off' already parsed urls @@ -221,8 +221,10 @@ def parse(self, response, **kwargs): base.add_value('valuespaces', vs.load_item()) lic = LicenseItemLoader() - license_url = response.xpath('//div[@class="cc-licence-info"]/p/a[@rel="license"]/@href').get() + license_url: str = response.xpath('//div[@class="cc-licence-info"]/p/a[@rel="license"]/@href').get() if license_url is not None: + if license_url.startswith("http://"): + license_url = license_url.replace("http://", "https://") lic.add_value('url', license_url) license_description_raw = response.xpath('//div[@class="cc-licence-info"]').get() From 5b3a69209d4eaa075445c5664d54fcd064a431d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Mon, 11 Oct 2021 13:23:24 +0200 Subject: [PATCH 029/590] add documentation / scrapy contracts --- .../spiders/umwelt_im_unterricht_spider.py | 32 ++++++++++++++++--- 1 file changed, 27 insertions(+), 5 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 976c83f3..015d859f 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -40,7 +40,7 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): DISCIPLINE_MAPPING: dict = { 'Arbeit, Wirtschaft, Technik': 'Arbeitslehre', 'Ethik, Philosophie, Religion': ['Ethik', 'Philosophie', 'Religion'], - 'Fächerübergreifend': 'Allgemein', # ToDo: no mapping available + 'Fächerübergreifend': 'Allgemein', 'Politik, SoWi, Gesellschaft': ['Politik', 'Sozialkunde', 'Gesellschaftskunde'] } @@ -55,6 +55,12 @@ def parse_start_url(self, response, **kwargs): yield scrapy.Request(url=url, callback=self.parse_category_overview_for_individual_topic_urls) def parse_category_overview_for_individual_topic_urls(self, response): + """ + + Scrapy Contracts: + @url https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons + @returns requests 10 + """ # logging.debug(f"INSIDE PARSE CATEGORY METHOD: {response.url}") topic_urls_raw: list = response.xpath('//a[@class="internal-link readmore"]/@href').getall() # logging.debug(f"TOPIC URLS (RAW) ={topic_urls_raw}") @@ -68,10 +74,12 @@ def parse_category_overview_for_individual_topic_urls(self, response): last_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last last"]/a/@href').get() if last_page_button_url is not None: last_page_button_url = response.urljoin(last_page_button_url) - # Using the "next page"-button until we reach the last page: + # Using the "next page"-button to navigate through all individual topics until we reach the last page: if last_page_button_url != response.url: next_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last next"]/a/@href').get() if next_page_button_url is not None: + # ToDo: optimize the page navigation by making it independent of the 'next'-button + # (by manually 'building' the url_strings from 1 to "last-page" with RegEx) next_url_to_parse = response.urljoin(next_page_button_url) yield scrapy.Request(url=next_url_to_parse, callback=self.parse_category_overview_for_individual_topic_urls) @@ -87,6 +95,12 @@ def parse_category_overview_for_individual_topic_urls(self, response): # f"topic_urls_already_parsed: {len(self.topic_urls_already_parsed)}") def parse(self, response, **kwargs): + """ + + Scrapy Contracts: + @url https://www.umwelt-im-unterricht.de/hintergrund/generationengerechtigkeit-klimaschutz-und-eine-lebenswerte-zukunft/ + @returns item 1 + """ current_url: str = response.url base = BaseItemLoader() @@ -107,6 +121,7 @@ def parse(self, response, **kwargs): general.add_value('title', title) keywords = response.xpath('//div[@class="b-cpsuiu-show-keywords"]/ul/li/a/text()').getall() if len(keywords) >= 1: + # only add keywords if the list isn't empty general.add_value('keyword', keywords) description = response.xpath('/html/head/meta[@name="description"]/@content').get() general.add_value('description', description) @@ -141,8 +156,10 @@ def parse(self, response, **kwargs): # didactic_comment = w3lib.html.replace_escape_chars(didactic_comment) didactic_comment = " ".join(didactic_comment.split()) if didactic_comment.endswith("mehr lesenweniger lesen"): - # the button-description of the expandable info-box ends up in the string, therefore removing it: + # the button-description of the expandable info-box ends up in the string, + # therefore we are manually removing it: didactic_comment = didactic_comment.replace("mehr lesenweniger lesen", "") + # since there's currently no way to confirm how the string looks in the web-interface: # ToDo: make sure which string format looks best in edu-sharing (cleaned up <-> with escape chars) educational.add_value('description', didactic_comment) @@ -153,7 +170,8 @@ def parse(self, response, **kwargs): classification.add_value('purpose', 'competency') competency_description: list = response.xpath('//div[@class="b-cpsuiu-show-description"]/*[not(' '@class="cc-licence-info")]').getall() - # competency_description will grab the whole div-element, but EXCLUDE the "license"-container + # the xpath-expression for competency_description will grab the whole div-element, + # but EXCLUDE the "license"-container (if the license-description exists, it's always part of the same div) if len(competency_description) >= 1: # only if the list of strings is not empty, we'll try to type-convert it to a string (and clean its # formatting up) @@ -185,11 +203,12 @@ def parse(self, response, **kwargs): vs.add_value('containsAdvertisement', 'no') vs.add_value('conditionsOfAccess', 'no login') vs.add_value('intendedEndUserRole', 'teacher') + # see: https://www.umwelt-im-unterricht.de/ueber-umwelt-im-unterricht/ vs.add_value('accessibilitySummary', 'Not tested') # see: https://www.umwelt-im-unterricht.de/erklaerung-zur-barrierefreiheit/ vs.add_value('dataProtectionConformity', 'Sensible data collection') # see: https://www.umwelt-im-unterricht.de/datenschutz/ - # see: https://www.umwelt-im-unterricht.de/ueber-umwelt-im-unterricht/ + disciplines_raw = response.xpath('//div[@class="b-cpsuiu-show-subjects"]/ul/li/a/text()').getall() if len(disciplines_raw) >= 1: disciplines = list() @@ -197,6 +216,8 @@ def parse(self, response, **kwargs): # self.debug_discipline_values.add(discipline_value) if discipline_value in self.DISCIPLINE_MAPPING.keys(): discipline_value = self.DISCIPLINE_MAPPING.get(discipline_value) + # since the mapping value can either be a single string OR a list of strings, we need to make sure that + # our 'disciplines'-list is a list of strings (not a list with nested lists): if type(discipline_value) is list: disciplines.extend(discipline_value) else: @@ -206,6 +227,7 @@ def parse(self, response, **kwargs): educational_context_raw = response.xpath('//div[@class="b-cpsuiu-show-targets"]/ul/li/a/text()').getall() if len(educational_context_raw) >= 1: + # the educationalContext-mapping is only done when there's at least one educational_context found educational_context = list() for educational_context_value in educational_context_raw: # self.debug_educational_context_values.add(educational_context_value) From 9319b5764629f7f65275dbcf0b46a287dbf4f93b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Mon, 11 Oct 2021 20:48:24 +0200 Subject: [PATCH 030/590] rework crawler navigation through overview_urls - no longer uses the "next page"-button, but instead builds the url-list by splitting the "last page"-button up into a URL and its page-parameter (int) -- this makes sure that we don't lose several pages and topics at once if scrapy gets only a timeout as a response from one overview subpage --- .../spiders/umwelt_im_unterricht_spider.py | 46 +++++++++++-------- 1 file changed, 26 insertions(+), 20 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 015d859f..2810f78d 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -1,3 +1,5 @@ +import re + import scrapy import w3lib.html from scrapy.spiders import CrawlSpider @@ -32,7 +34,8 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): ] version = "0.0.2" # last update: 2021-10-08 topic_urls = set() # urls that need to be parsed will be added here - topic_urls_already_parsed = set() # this set is used for 'checking off' already parsed urls + topic_urls_parsed = set() # this set is used for 'checking off' already parsed urls + overview_urls_already_parsed = set() # this set is used for 'checking off' already parsed overview_pages EDUCATIONAL_CONTEXT_MAPPING: dict = { 'Sekundarstufe': ['Sekundarstufe I', 'Sekundarstufe II'] @@ -52,9 +55,9 @@ def getHash(self, response=None) -> str: def parse_start_url(self, response, **kwargs): for url in self.start_urls: - yield scrapy.Request(url=url, callback=self.parse_category_overview_for_individual_topic_urls) + yield scrapy.Request(url=url, callback=self.parse_category_overview_for_topics_and_subpages) - def parse_category_overview_for_individual_topic_urls(self, response): + def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http.Response): """ Scrapy Contracts: @@ -72,27 +75,30 @@ def parse_category_overview_for_individual_topic_urls(self, response): # if there's a "Letzte"-Button in the overview, there's more topic_urls to be gathered than the initially # displayed 10 elements last_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last last"]/a/@href').get() + page_number_regex = re.compile(r'(?P.*&tx_solr%5Bpage%5D=)(?P\d+)') + overview_urls_parsed: set = set() + if last_page_button_url is not None: - last_page_button_url = response.urljoin(last_page_button_url) - # Using the "next page"-button to navigate through all individual topics until we reach the last page: - if last_page_button_url != response.url: - next_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last next"]/a/@href').get() - if next_page_button_url is not None: - # ToDo: optimize the page navigation by making it independent of the 'next'-button - # (by manually 'building' the url_strings from 1 to "last-page" with RegEx) - next_url_to_parse = response.urljoin(next_page_button_url) - yield scrapy.Request(url=next_url_to_parse, - callback=self.parse_category_overview_for_individual_topic_urls) - # if last_page_button_url == response.url: - # logging.debug(f"Reached the last page: {response.url}") - # logging.debug(f"{len(self.topic_urls)} individual topic_urls were found: {self.topic_urls}") + page_number_dict: dict = page_number_regex.search(last_page_button_url).groupdict() + url_without_page_parameter = response.urljoin(page_number_dict.get('url_with_parameters')) + last_page_number = int(page_number_dict.get('nr')) + for i in range(2, last_page_number + 1): + # since the initial url in start_urls already counts as page 1, + # we're iterating from page 2 to the last page + next_overview_subpage_to_crawl = str(url_without_page_parameter + str(i)) + if next_overview_subpage_to_crawl not in self.overview_urls_already_parsed: + yield scrapy.Request(url=next_overview_subpage_to_crawl, + callback=self.parse_category_overview_for_topics_and_subpages) + overview_urls_parsed.add(next_overview_subpage_to_crawl) + self.overview_urls_already_parsed.update(overview_urls_parsed) + + parsed_urls: set = set() for url in self.topic_urls: # making sure that we don't accidentally crawl individual pages more than once - if url not in self.topic_urls_already_parsed: + if url not in self.topic_urls_parsed: yield scrapy.Request(url=url, callback=self.parse) - self.topic_urls_already_parsed.add(url) - # logging.debug(f"topic_urls after yielding them: {len(self.topic_urls)} --- " - # f"topic_urls_already_parsed: {len(self.topic_urls_already_parsed)}") + parsed_urls.add(url) + self.topic_urls_parsed.update(parsed_urls) def parse(self, response, **kwargs): """ From 23817ecad2040037b7504fe961b4389d7b1be51b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Oct 2021 11:40:34 +0200 Subject: [PATCH 031/590] minimal code cleanup, add documentation --- .../spiders/umwelt_im_unterricht_spider.py | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 2810f78d..c8b90789 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -34,7 +34,7 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): ] version = "0.0.2" # last update: 2021-10-08 topic_urls = set() # urls that need to be parsed will be added here - topic_urls_parsed = set() # this set is used for 'checking off' already parsed urls + topic_urls_parsed = set() # this set is used for 'checking off' already parsed (individual) topic urls overview_urls_already_parsed = set() # this set is used for 'checking off' already parsed overview_pages EDUCATIONAL_CONTEXT_MAPPING: dict = { @@ -64,38 +64,37 @@ def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http. @url https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons @returns requests 10 """ - # logging.debug(f"INSIDE PARSE CATEGORY METHOD: {response.url}") topic_urls_raw: list = response.xpath('//a[@class="internal-link readmore"]/@href').getall() - # logging.debug(f"TOPIC URLS (RAW) ={topic_urls_raw}") for url_ending in topic_urls_raw: self.topic_urls.add(response.urljoin(url_ending)) - # logging.debug(f"TOPIC URLS ({len(self.topic_urls)}) = {self.topic_urls}") # if there's a "Letzte"-Button in the overview, there's more topic_urls to be gathered than the initially # displayed 10 elements last_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last last"]/a/@href').get() + # the string last_page_button_url typically looks like this: + # "/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images&tx_solr%5Bpage%5D=8" page_number_regex = re.compile(r'(?P.*&tx_solr%5Bpage%5D=)(?P\d+)') - overview_urls_parsed: set = set() + overview_urls_parsed: set = set() # temporary set used for checking off already visited URLs if last_page_button_url is not None: page_number_dict: dict = page_number_regex.search(last_page_button_url).groupdict() url_without_page_parameter = response.urljoin(page_number_dict.get('url_with_parameters')) last_page_number = int(page_number_dict.get('nr')) for i in range(2, last_page_number + 1): - # since the initial url in start_urls already counts as page 1, - # we're iterating from page 2 to the last page + # the initial url from start_urls already counts as page 1, therefore we're iterating + # from page 2 to the last page next_overview_subpage_to_crawl = str(url_without_page_parameter + str(i)) if next_overview_subpage_to_crawl not in self.overview_urls_already_parsed: yield scrapy.Request(url=next_overview_subpage_to_crawl, callback=self.parse_category_overview_for_topics_and_subpages) overview_urls_parsed.add(next_overview_subpage_to_crawl) - self.overview_urls_already_parsed.update(overview_urls_parsed) + self.overview_urls_already_parsed.update(overview_urls_parsed) # checking off the (10) URLs that we yielded - parsed_urls: set = set() + parsed_urls: set = set() # temporary set used for checking off already visited topics for url in self.topic_urls: - # making sure that we don't accidentally crawl individual pages more than once if url not in self.topic_urls_parsed: + # making sure that we don't accidentally crawl individual pages more than once yield scrapy.Request(url=url, callback=self.parse) parsed_urls.add(url) self.topic_urls_parsed.update(parsed_urls) @@ -252,6 +251,7 @@ def parse(self, response, **kwargs): license_url: str = response.xpath('//div[@class="cc-licence-info"]/p/a[@rel="license"]/@href').get() if license_url is not None: if license_url.startswith("http://"): + # the license-mapper expects urls that are in https:// format, but UIU uses http:// links to CC-licenses license_url = license_url.replace("http://", "https://") lic.add_value('url', license_url) @@ -264,7 +264,7 @@ def parse(self, response, **kwargs): # together. just replacing \n with a whitespace is enough to keep the structure of the string intact. license_description_raw = w3lib.html.replace_escape_chars(license_description_raw) license_description = " ".join(license_description_raw.split()) - # making sure that there's only 1 whitespace between words, not 4+ when the original string had serveral \t + # making sure that there's only 1 whitespace between words lic.add_value('description', license_description) base.add_value('license', lic.load_item()) @@ -274,5 +274,4 @@ def parse(self, response, **kwargs): response_loader = super().mapResponse(response) base.add_value('response', response_loader.load_item()) - # once all scrapy.Item are loaded into our "base", we yield the BaseItem by calling the .load_item() method yield base.load_item() From 32dc5645ea5f8c34ac894438e4561a60bf0c9fdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Oct 2021 12:22:07 +0200 Subject: [PATCH 032/590] add type hinting, descriptions --- .../spiders/umwelt_im_unterricht_spider.py | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index c8b90789..4dc5f0f8 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -59,6 +59,9 @@ def parse_start_url(self, response, **kwargs): def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http.Response): """ + Crawls an overview page of a "type"-category (e.g. "Hintergrund", "Bilderserie" etc.) for subpages and topics. + If the overview has subpages, it will recursively yield additional scrapy.Requests to the overview-subpages. + Afterwards it yields the (10) individual topic_urls (per overview page) to the parse()-method. Scrapy Contracts: @url https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Alessons @@ -71,7 +74,7 @@ def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http. # if there's a "Letzte"-Button in the overview, there's more topic_urls to be gathered than the initially # displayed 10 elements - last_page_button_url = response.xpath('//li[@class="tx-pagebrowse-last last"]/a/@href').get() + last_page_button_url: str = response.xpath('//li[@class="tx-pagebrowse-last last"]/a/@href').get() # the string last_page_button_url typically looks like this: # "/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images&tx_solr%5Bpage%5D=8" page_number_regex = re.compile(r'(?P.*&tx_solr%5Bpage%5D=)(?P\d+)') @@ -79,7 +82,7 @@ def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http. overview_urls_parsed: set = set() # temporary set used for checking off already visited URLs if last_page_button_url is not None: page_number_dict: dict = page_number_regex.search(last_page_button_url).groupdict() - url_without_page_parameter = response.urljoin(page_number_dict.get('url_with_parameters')) + url_without_page_parameter: str = response.urljoin(page_number_dict.get('url_with_parameters')) last_page_number = int(page_number_dict.get('nr')) for i in range(2, last_page_number + 1): # the initial url from start_urls already counts as page 1, therefore we're iterating @@ -99,8 +102,9 @@ def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http. parsed_urls.add(url) self.topic_urls_parsed.update(parsed_urls) - def parse(self, response, **kwargs): + def parse(self, response: scrapy.http.Response, **kwargs): """ + Parses an individual topic url for metadata and yields a BaseItem. Scrapy Contracts: @url https://www.umwelt-im-unterricht.de/hintergrund/generationengerechtigkeit-klimaschutz-und-eine-lebenswerte-zukunft/ @@ -110,8 +114,8 @@ def parse(self, response, **kwargs): base = BaseItemLoader() base.add_value('sourceId', response.url) - date_raw = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() - date_cleaned_up = w3lib.html.strip_html5_whitespace(date_raw) + date_raw: str = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() + date_cleaned_up: str = w3lib.html.strip_html5_whitespace(date_raw) hash_temp = str(date_cleaned_up + self.version) base.add_value('hash', hash_temp) base.add_value('lastModified', date_cleaned_up) @@ -122,13 +126,13 @@ def parse(self, response, **kwargs): general = LomGeneralItemloader() general.add_value('identifier', response.url) - title = response.xpath('//div[@class="tx-cps-uiu"]/article/h1/text()').get() + title: str = response.xpath('//div[@class="tx-cps-uiu"]/article/h1/text()').get() general.add_value('title', title) - keywords = response.xpath('//div[@class="b-cpsuiu-show-keywords"]/ul/li/a/text()').getall() + keywords: list = response.xpath('//div[@class="b-cpsuiu-show-keywords"]/ul/li/a/text()').getall() if len(keywords) >= 1: # only add keywords if the list isn't empty general.add_value('keyword', keywords) - description = response.xpath('/html/head/meta[@name="description"]/@content').get() + description: str = response.xpath('/html/head/meta[@name="description"]/@content').get() general.add_value('description', description) general.add_value('language', 'de') @@ -214,7 +218,7 @@ def parse(self, response, **kwargs): vs.add_value('dataProtectionConformity', 'Sensible data collection') # see: https://www.umwelt-im-unterricht.de/datenschutz/ - disciplines_raw = response.xpath('//div[@class="b-cpsuiu-show-subjects"]/ul/li/a/text()').getall() + disciplines_raw: list = response.xpath('//div[@class="b-cpsuiu-show-subjects"]/ul/li/a/text()').getall() if len(disciplines_raw) >= 1: disciplines = list() for discipline_value in disciplines_raw: @@ -255,7 +259,7 @@ def parse(self, response, **kwargs): license_url = license_url.replace("http://", "https://") lic.add_value('url', license_url) - license_description_raw = response.xpath('//div[@class="cc-licence-info"]').get() + license_description_raw: str = response.xpath('//div[@class="cc-licence-info"]').get() if license_description_raw is not None: license_description_raw = w3lib.html.remove_tags(license_description_raw) license_description_raw = w3lib.html.replace_escape_chars(license_description_raw, which_ones="\n", From 2ce8d8f5dcb3c9cefac74727ca5f1925329cc511 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 28 Oct 2021 13:12:49 +0200 Subject: [PATCH 033/590] fix:edu sharing base search via asterisk --- converter/spiders/base_classes/edu_sharing_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 80c60c9b..56ed6796 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -34,7 +34,7 @@ def buildUrl(self, offset=0): def search(self, offset=0): return JsonRequest( url=self.buildUrl(offset), - data={"criterias": [{"property": "ngsearchword", "values": [""]}]}, + data={"criterias": [{"property": "ngsearchword", "values": ["*"]}]}, callback=self.parse, ) From f961b71bbaffddbde2b71a23baa7dfc162591978 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 12 Nov 2021 12:26:32 +0100 Subject: [PATCH 034/590] youtube new sources list --- csv/youtube.csv | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/csv/youtube.csv b/csv/youtube.csv index 6a28b31f..c6607463 100644 --- a/csv/youtube.csv +++ b/csv/youtube.csv @@ -196,8 +196,12 @@ https://www.youtube.com/channel/UCrMePiHCWG4Vwqv3t7W9EFg,SciShow ,video,4003; 4 https://www.youtube.com/channel/UCrMePiHCWG4Vwqv3t7W9EFg,SciShow,video,28010; 4003,Primarstufe; Sekundarstufe 1,"learner, teacher",3,99,en,, https://www.youtube.com/channel/UCRFIPG2u1DxKLNuE3y2SjHA,CrashCourse ,video,28010; 4003,Primarstufe; Sekundarstufe 1,"learner, teacher",3,99,en,, https://www.youtube.com/channel/UCotQdo7hOWcKl_hNMvuL79A,Forsche mit uns! NaWi mit GUB e.V.,video,28010,Primarstufe,"learner, teacher",3,12,de,, -https://www.youtube.com/channel/UCPAfRcqYTpbQxC-i9C6HK_g/featured,Frau Schimpf,video,060,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",12,99,de,, -https://www.youtube.com/channel/UCekU1TP424ZV9DnNjdivaDQ/videos,Bernd-H. Brand,video,100,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, +https://www.youtube.com/channel/UCPAfRcqYTpbQxC-i9C6HK_g,Frau Schimpf,video,060,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",12,99,de,, +https://www.youtube.com/channel/UCekU1TP424ZV9DnNjdivaDQ,Bernd-H. Brand,video,100,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, https://www.youtube.com/channel/UCIntY47wLCbBNItj7-2FwXQ,LEIFIphysik,video,460,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, -https://www.youtube.com/channel/UC5EyFAt0bLnC6YzBvEXgF8A/videos,the artinspector,video,060,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, -https://www.youtube.com/channel/UCwRH985XgMYXQ6NxXDo8npw/featured,Dinge erklärt - Kurzgesagt,video,,"Sekundarstufe1, Sekundarstufe 2","learner, teacher",,,de,, \ No newline at end of file +https://www.youtube.com/channel/UC5EyFAt0bLnC6YzBvEXgF8A,the artinspector,video,060,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, +https://www.youtube.com/channel/UCwRH985XgMYXQ6NxXDo8npw,Dinge erklärt - Kurzgesagt,video,,"Sekundarstufe1, Sekundarstufe 2","learner, teacher",,,de,, +https://www.youtube.com/channel/UCKjJ1nCoMFTHzQlUtHHBBsw,Akademie für Lerncoaching,video,720,Primarstufe; Sekundarstufe1; Sekundarstufe 2,teacher,,,de,, +https://www.youtube.com/channel/UCFSS2FtaFNKMei4jGQOVL3w,Chemie und Bio in der Schule,video,100; 080,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, +https://www.youtube.com/channel/UCk0aUAhu9RxfOX1iMXAJ-2g,Chemistry Kicksass,video,100,Sekundarstufe 1; Sekundarstufe 2,learner,,,de,, +https://www.youtube.com/channel/UCWNvo3l-K-X6CPSBcP9NCNg,Chemie - simpleclub,video,100,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,,,de,, \ No newline at end of file From beb533a287e347ddbfc0bc086aa0f5e64fc24397 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 12 Nov 2021 17:08:03 +0100 Subject: [PATCH 035/590] materialnetzwerk parse page with web engine --- converter/spiders/materialnetzwerk_spider.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/converter/spiders/materialnetzwerk_spider.py b/converter/spiders/materialnetzwerk_spider.py index e145e260..b013f4dc 100644 --- a/converter/spiders/materialnetzwerk_spider.py +++ b/converter/spiders/materialnetzwerk_spider.py @@ -1,4 +1,5 @@ import json +import logging from typing import Optional import scrapy.http @@ -9,6 +10,7 @@ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase from converter.valuespace_helper import ValuespaceHelper +from converter.web_tools import WebTools, WebEngine class MaterialNetzwerkSpider(CrawlSpider, LomBase): @@ -85,10 +87,14 @@ def parse_bundle_overview(self, response: scrapy.http.Response): :return: yields a scrapy.Request for the first worksheet """ + ## render the web page to execute js and copy to the response + body = WebTools.getUrlData(response.url, WebEngine.Pyppeteer) + response = response.replace(body = body['html']) + # a typical bundle_overview looks like this: https://editor.mnweg.org/mnw/sammlung/das-menschliche-skelett-m-78 # there's minimal metadata to be found, but we can grab the descriptions of each worksheet and use the # accumulated strings as our description for the bundle page - bundle_title = response.xpath('//div[@class="l-container content"]/h2/text()').get() + bundle_title = response.xpath('//*/div[@class="l-container content"]/h2/text()').get() bundle_description = response.xpath('/html/head/meta[@property="description"]/@content').get() # div class tutoryMark holds the same content as the description in the header # bundle_tutory_mark = response.xpath('//div[@class="tutoryMark"]/text()').getall() @@ -164,6 +170,7 @@ def parse_bundle_overview(self, response: scrapy.http.Response): 'bundle_thumbnail': first_worksheet_thumbnail } if first_worksheet_url is not None: + logging.debug(first_worksheet_url) yield scrapy.Request(url=first_worksheet_url, callback=self.parse, cb_kwargs=bundle_dict) # print(debug_disciplines_sorted) pass From 29b5cbd12bab665ead706855e5b44dafe5a1d8ac Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 14 Apr 2022 11:11:51 +0200 Subject: [PATCH 036/590] fix:alt labels not transformed to valuespaces --- converter/pipelines.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 1523a7d7..e5b6da26 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -272,9 +272,7 @@ def process_item(self, raw_item, spider): for v in valuespace: labels = list(v["prefLabel"].values()) if "altLabel" in v: - labels = labels + list( - [x for y in list(v["altLabel"].values()) for x in y] - ) + labels = labels + list(v["altLabel"].values()) labels = list(map(lambda x: x.casefold(), labels)) if v["id"].endswith(entry) or entry.casefold() in labels: _id = v["id"] From 4042ed52d823db38156490209cb6040366a40d9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Wed, 2 Feb 2022 00:25:40 +0100 Subject: [PATCH 037/590] materialnetzwerk_spider v0.0.6 - replace old learningResourceType value ("teaching module") with new_lrt values ("Unterrichtsbaustein" & "Arbeitsblatt") -- change: new_lrt uses uuids - add discipline mapping for "Erdkunde, Gemeinschaftskunde, Geschichte" - ToDo: figure out why not all bundles are getting scraped (should be 75 total, currently <50% get scraped with pyppeteer) --- converter/spiders/materialnetzwerk_spider.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/converter/spiders/materialnetzwerk_spider.py b/converter/spiders/materialnetzwerk_spider.py index b013f4dc..be1f0ff5 100644 --- a/converter/spiders/materialnetzwerk_spider.py +++ b/converter/spiders/materialnetzwerk_spider.py @@ -16,7 +16,7 @@ class MaterialNetzwerkSpider(CrawlSpider, LomBase): name = "materialnetzwerk_spider" friendlyName = "Materialnetzwerk.org" - version = "0.0.5" # last update: 2021-09-29 + version = "0.0.6" # last update: 2022-04-14 start_urls = [ # 'https://editor.mnweg.org/?p=1&materialType=bundle', # this doesn't list any materials since they're loaded dynamically @@ -32,6 +32,8 @@ class MaterialNetzwerkSpider(CrawlSpider, LomBase): } discipline_mapping = { 'AES': "Ernährung und Hauswirtschaft", # Ernährung und Hauswirtschaft + 'Erdkunde, Gemeinschaftskunde, Geschichte': ['Erdkunde', 'Gesellschaftskunde', 'Sozialkunde', 'Geschichte'], + # Gemeinschaftskunde can be either "Gesellschaftskunde" or "Sozialkunde" (depending on the county) } # debug_disciplines = set() @@ -87,9 +89,9 @@ def parse_bundle_overview(self, response: scrapy.http.Response): :return: yields a scrapy.Request for the first worksheet """ - ## render the web page to execute js and copy to the response + # render the web page to execute js and copy to the response body = WebTools.getUrlData(response.url, WebEngine.Pyppeteer) - response = response.replace(body = body['html']) + response = response.replace(body=body['html']) # a typical bundle_overview looks like this: https://editor.mnweg.org/mnw/sammlung/das-menschliche-skelett-m-78 # there's minimal metadata to be found, but we can grab the descriptions of each worksheet and use the @@ -251,7 +253,12 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value('learningResourceType', 'teaching module') + # ToDo: learningResourceType ("teaching module") -> new_lrt "Unterrichtsbaustein" & "Arbeitsblatt"? + # vs.add_value('learningResourceType', 'teaching module') + vs.add_value('new_lrt', ["5098cf0b-1c12-4a1b-a6d3-b3f29621e11d", + "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9" + "36e68792-6159-481d-a97b-2c00901f4f78"]) + # "Unterrichtsbaustein", "Webseite und Portal (stabil)", "Arbeitsblatt bundle_discipline = kwargs.get('bundle_discipline') if bundle_discipline is not None: if self.discipline_mapping.get(bundle_discipline) is not None: From cb42b6f4f1f2027dbe14989d61eb15fcf6571c75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Mon, 18 Oct 2021 16:53:57 +0200 Subject: [PATCH 038/590] zum_klexikon_spider v0.1.2 (squashed) - add "new_lrt"-field to the crawler - ToDo: Confirm this is working as intended after new_lrt has been implemented into the item pipeline - rename zum_klexikon_spider class to be in line with the rest of the spider class-names zum_klexikon.py v0.1.1 - fix crawler navigation (crawler was stuck at page 1 because it looked for a json object that didn't exist (anymore?)) - implement workaround for general.description -- use first paragraph of fulltext to manually create a short-description (since the API doesn't offer us any description) - remove non-functional @scrapes from scrapy contracts since they always return "OK" --- converter/spiders/zum_klexikon.py | 75 ++++++++++++++++++++++++------- 1 file changed, 59 insertions(+), 16 deletions(-) diff --git a/converter/spiders/zum_klexikon.py b/converter/spiders/zum_klexikon.py index 9b0038c6..bb215787 100644 --- a/converter/spiders/zum_klexikon.py +++ b/converter/spiders/zum_klexikon.py @@ -1,22 +1,26 @@ -from converter.items import LomTechnicalItem, LicenseItem, LomGeneralItem, ValuespaceItem -from .base_classes.mediawiki_base import MediaWikiBase, jmes_pageids -import scrapy import json + import jmespath -from ..constants import Constants +import scrapy +import w3lib.html +from scrapy import Selector -jmes_continue = jmespath.compile('"query-continue".allpages') +from converter.items import LomTechnicalItem, LicenseItem, LomGeneralItemloader, ValuespaceItemLoader +from .base_classes.mediawiki_base import MediaWikiBase, jmes_pageids, jmes_title, jmes_links, jmes_continue +from ..constants import Constants -class ZUMSpider(MediaWikiBase, scrapy.Spider): +class ZUMKlexikonSpider(MediaWikiBase, scrapy.Spider): name = "zum_klexikon_spider" friendlyName = "ZUM-Klexikon" url = "https://klexikon.zum.de/" - version = "0.1.0" + version = "0.1.2" # last update: 2022-02-16 license = Constants.LICENSE_CC_BY_SA_30 def parse_page_query(self, response: scrapy.http.Response): """ + + Scrapy Contracts: @url https://klexikon.zum.de/api.php?format=json&action=query&list=allpages&aplimit=100&apfilterredir=nonredirects @returns requests 101 101 """ @@ -29,7 +33,7 @@ def parse_page_query(self, response: scrapy.http.Response): callback=self.parse_page_data, cb_kwargs={"extra": {'pageid': str(pageid)}} ) - if 'query-continue' not in data: + if 'continue' not in data: return yield self.query_for_pages(jmes_continue.search(data)) @@ -38,32 +42,71 @@ def getId(self, response=None): def technical_item(self, response) -> LomTechnicalItem: """ + + Scrapy Contracts: @url https://klexikon.zum.de/api.php?format=json&action=parse&pageid=10031&prop=text|langlinks|categories|links|templates|images|externallinks|sections|revid|displaytitle|iwlinks|properties - @scrapes format location """ response.meta['item'] = json.loads(response.body) return self.getLOMTechnical(response).load_item() def license_item(self, response) -> LicenseItem: """ + + Scrapy Contracts: @url https://klexikon.zum.de/api.php?format=json&action=parse&pageid=10031&prop=text|langlinks|categories|links|templates|images|externallinks|sections|revid|displaytitle|iwlinks|properties - @scrapes url """ response.meta['item'] = json.loads(response.body) return self.getLicense(response).load_item() - def general_item(self, response) -> LomGeneralItem: + # def general_item(self, response) -> LomGeneralItem: + def getLOMGeneral(self, response=None) -> LomGeneralItemloader: """ + Gathers title, keyword and (short-)description and returns the LomGeneralItemloader afterwards. + + Scrapy Contracts: @url https://klexikon.zum.de/api.php?format=json&action=parse&pageid=4937&prop=text|langlinks|categories|links|templates|images|externallinks|sections|revid|displaytitle|iwlinks|properties - @scrapes title keyword description """ + # old implementation with missing 'description'-value: response.meta['item'] = json.loads(response.body) - return self.getLOMGeneral(response).load_item() + # return self.getLOMGeneral(response).load_item() + general = LomGeneralItemloader() + data = json.loads(response.body) + general.replace_value('title', jmes_title.search(data)) + general.replace_value('keyword', jmes_links.search(data)) + + jmes_text = jmespath.compile('parse.text."*"') + fulltext = jmes_text.search(data) + first_paragraph = Selector(text=fulltext).xpath('//p').get() + # grabbing the first

-Element as a workaround for the missing short-description + if first_paragraph is not None: + first_paragraph = w3lib.html.remove_tags(first_paragraph) + general.add_value('description', first_paragraph) + else: + # if for some reason the first paragraph is not found, this is a fallback solution to manually split the + # fulltext by its first newline (since we don't want to copypaste the "fulltext" into our description + fulltext = w3lib.html.remove_tags(fulltext) + first_paragraph = fulltext.split("\n")[0] + first_paragraph = first_paragraph.strip() + general.add_value('description', first_paragraph) + return general - def valuespace_item(self, response) -> ValuespaceItem: + def getValuespaces(self, response) -> ValuespaceItemLoader: """ + Scrapy Contracts: @url https://klexikon.zum.de/api.php?format=json&action=parse&pageid=10031&prop=text|langlinks|categories|links|templates|images|externallinks|sections|revid|displaytitle|iwlinks|properties - @scrapes discipline educationalContext intendedEndUserRole """ response.meta['item'] = json.loads(response.body) - return self.getValuespaces(response).load_item() + vs = ValuespaceItemLoader() + data = response.meta['item'] + # this jmespath expression doesn't distinguish between values for + # 'discipline', 'educationalContext' or 'intendedEndUserRole' + # it tries to fit the values into each metadata-field + # and the non-fitting values get dropped by the valuespaces pipeline + jmes_categories = jmespath.compile('parse.categories[]."*"') + categories = jmes_categories.search(data) # ['Ethik', 'Sekundarstufe_1'] + if categories is not None: + vs.add_value("discipline", categories) + vs.add_value("educationalContext", categories) + vs.add_value("intendedEndUserRole", categories) + vs.add_value("new_lrt", "Wiki (dynamisch)") + return vs From 90d792cba12f885286e2337100c0d9c908be84d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 11 Feb 2022 12:50:48 +0100 Subject: [PATCH 039/590] add Valuespaces field for "new_lrt" --- converter/items.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/items.py b/converter/items.py index 41dd58a6..c0b306d7 100644 --- a/converter/items.py +++ b/converter/items.py @@ -127,6 +127,7 @@ class ValuespaceItem(Item): discipline = Field(output_processor=JoinMultivalues()) educationalContext = Field(output_processor=JoinMultivalues()) learningResourceType = Field(output_processor=JoinMultivalues()) + new_lrt = Field(output_processor=JoinMultivalues()) sourceContentType = Field(output_processor=JoinMultivalues()) toolCategory = Field(output_processor=JoinMultivalues()) From 3987d935b7de74459c40c1833c4e3647dd1b2a64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Wed, 16 Feb 2022 16:27:30 +0100 Subject: [PATCH 040/590] sample_spider_alternative: binary / new lrt fields (squashed) - add binary field explanation - add "new_lrt" explanation --- converter/spiders/sample_spider_alternative.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 5eee54ec..58beca53 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -51,6 +51,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - type recommended # - thumbnail recommended # - publisher optional + # - binary optional (only needed if you're working with binary files (e.g. .pdf-files), + # if you want to see an example, check out "niedersachsen_abi_spider.py") base.add_value('sourceId', response.url) # if the source doesn't have a "datePublished" or "lastModified"-value in its header or JSON_LD, # you might have to help yourself with a unique string consisting of the datetime of the crawl + self.version @@ -156,6 +158,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) # - learningResourceType recommended # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) + # - new_lrt recommended + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/new_lrt.ttl) # - conditionsOfAccess recommended # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/conditionsOfAccess.ttl) # - containsAdvertisement recommended From f710eb04aa221816741c81fcc31a9a9e66edc51d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Wed, 16 Feb 2022 16:51:42 +0100 Subject: [PATCH 041/590] planet_schule_spider v0.1.3 - add new_lrt -- change: new_lrt uses uuid - disable old learningResourceType --- converter/spiders/planet_schule_spider.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/converter/spiders/planet_schule_spider.py b/converter/spiders/planet_schule_spider.py index d8ce24e4..ac53bac6 100644 --- a/converter/spiders/planet_schule_spider.py +++ b/converter/spiders/planet_schule_spider.py @@ -19,7 +19,7 @@ class PlanetSchuleSpider(RSSBase): start_urls = [ "https://www.planet-schule.de/data/planet-schule-vodcast-komplett.rss" ] - version = "0.1.2" + version = "0.1.3" # last update: 2022-02-16 # Planet Schule allows us to crawl their site, therefore ignore the robots.txt directions, but don't hammer the # site while debugging custom_settings = { @@ -99,11 +99,17 @@ def getValuespaces(self, response): '//div[@class="sen_info_v2"]//p[contains(text(),"Fächer")]/parent::*/parent::*/div[last()]/p/a//text()' ).getall() valuespaces.add_value("discipline", discipline) - lrt = ValuespaceHelper.mimetypeToLearningResourceType( - response.meta["item"].xpath("enclosure/@type").get() - ) - if lrt: - valuespaces.add_value("learningResourceType", lrt) + + # # ToDo: remove old learningResourceType after crawler version 0.1.4 + # # since the old learningResourceType is getting phased out -> it is replaced by new_lrt + # lrt = ValuespaceHelper.mimetypeToLearningResourceType( + # response.meta["item"].xpath("enclosure/@type").get() + # ) + # if lrt: + # valuespaces.add_value("learningResourceType", lrt) + + valuespaces.add_value('new_lrt', "3616febb-8cf8-4503-8f80-ebc552d85506") + # "TV-Sendung und Video-Podcast" return valuespaces @staticmethod From f80ec1823c4961ec15f15c0b7ebbe05ad99585af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 17 Feb 2022 14:57:40 +0100 Subject: [PATCH 042/590] kindoergarten_spider.py v0.1.4 (squashed): - change: use uuid-value for new_lrt - add: detection for different kinds of new_lrt values (e.g. "Arbeitsblatt" by url path) kindoergarten_spider v0.1.3: - add rudimentary scrapy contract - add new_lrt - The API-structure slightly changed, therefore updated the sitemap example code --- converter/spiders/kindoergarten_spider.py | 34 +++++++++++++++-------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/converter/spiders/kindoergarten_spider.py b/converter/spiders/kindoergarten_spider.py index 993ec94c..a6e76535 100644 --- a/converter/spiders/kindoergarten_spider.py +++ b/converter/spiders/kindoergarten_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import LomGeneralItemloader, LomBaseItemloader, LomTechnicalItemLoader, \ - LicenseItemLoader, ResponseItemLoader, LomEducationalItemLoader, ValuespaceItemLoader + LicenseItemLoader, ResponseItemLoader, LomEducationalItemLoader, ValuespaceItemLoader, BaseItem from converter.spiders.base_classes import LomBase from converter.util.sitemap import SitemapEntry, from_xml_response @@ -21,9 +21,7 @@ class KindoergartenSpider(scrapy.Spider, LomBase): start_urls = ['https://kindoergarten.wordpress.com/sitemap.xml'] name = 'kindoergarten_spider' - version = '0.1.2' - # TODO: - # do additional urls need to be skipped? + version = '0.1.4' # last update: 2022-04-14 skip_these_urls = [ '/impressum/', '/nutzungsbedingungen/', @@ -43,18 +41,21 @@ def parse(self, response: scrapy.http.XmlResponse, **kwargs): """ parse XML sitemap for items and afterwards follow individual items by their item.loc (url) with a callback to parse_site + + Scrapy Contracts: + @url https://kindoergarten.wordpress.com/sitemap.xml + @returns requests 105 """ # one url element usually looks like this: # - # https://kindoergarten.wordpress.com/2017/10/20/wuerfelblatt-trauben-bis-3-0047/ - # + # https://kindoergarten.wordpress.com/2018/07/30/buchstabe-k-0095/ # - # https://kindoergarten.files.wordpress.com/2017/08/ankuendigung-wuerfelblatt_trauben_bis3.jpg - # Ankuendigung-Wuerfelblatt_Trauben_bis3 + # https://kindoergarten.files.wordpress.com/2018/05/ankucc88ndigung_buchstabenk.jpg + # ankündigung_buchstabenK # - # 2018-05-29T20:47:12+00:00 + # 2018-05-29T21:12:41+00:00 # monthly - # + # items = from_xml_response(response) for item in items: @@ -71,7 +72,7 @@ def parse(self, response: scrapy.http.XmlResponse, **kwargs): if self.hasChanged(response) and skip_check is False: yield response.follow(item.loc, callback=self.parse_site, cb_kwargs={'sitemap_entry': item}) - def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): + def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None) -> BaseItem: """ parses metadata from an individual item both by its HtmlResponse and its sitemap tags @@ -144,7 +145,16 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE vs.add_value('discipline', 'Allgemein') vs.add_value('educationalContext', 'Elementarbereich') # vs.add_value('toolCategory', 'noGeneralDataProtectionRegulation') - vs.add_value('learningResourceType', 'other_asset_type') + # ToDo: remove old learningResourceType code when reaching crawler v0.1.4 + # vs.add_value('learningResourceType', 'other_asset_type') + vs.add_value('new_lrt', "65330f23-2802-4789-86ee-c21f9afe74b1") + # default for all scrapy items: "Frühkindliches Bildungsangebot und KITA", "Lehr- und Lernmaterial" + if "arbeitsblatt" in response.url: + vs.add_value('new_lrt', "36e68792-6159-481d-a97b-2c00901f4f78") # "Arbeitsblatt" + if "spiel" in response.url or "wuerfelblatt" in response.url: + vs.add_value('new_lrt', "b0495f44-b05d-4bde-9dc5-34d7b5234d76") # "Lernspiel" + if "malvorlage" in response.url: + vs.add_value('new_lrt', "39db0dbd-cb6f-4153-910f-9f11177b48f2") # "Mal- und Bastelvorlage" base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() From 62f67a4914791c64326d9e8ea6a8f0bb37423bd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Mon, 21 Feb 2022 14:51:15 +0100 Subject: [PATCH 043/590] fix rss_base / update irights_spider.py v0.1.1 (squashed) irights_spider v0.1.1: - add missing start_requests()-method since the crawler wasn't starting up properly (it previously collected 0 items and quit) - add new_lrt values -- change: new_lrt uses uuids -- change: new_lrt to: "Webseite und Portal (stabil)" rss_base.py: - fix: oversight regarding lom.technical.duration -- not all RSS-Feeds are videos or podcasts, therefore the LOM.technical.duration field only needs to be set if it's available in the XML --- converter/spiders/base_classes/rss_base.py | 5 ++++- converter/spiders/irights_spider.py | 11 ++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/converter/spiders/base_classes/rss_base.py b/converter/spiders/base_classes/rss_base.py index c4c370db..64952fef 100644 --- a/converter/spiders/base_classes/rss_base.py +++ b/converter/spiders/base_classes/rss_base.py @@ -74,7 +74,10 @@ def getLOMTechnical(self, response): # technical.add_value('size', item.xpath('enclosure/@length').get()) # technical.add_value('location', item.xpath('enclosure/@url').get()) technical.add_value("format", "text/html") - technical.add_value("duration", response.meta["item"].xpath("duration//text()").get().strip()) + if response.meta["item"].xpath("duration//text()").get() is not None: + # not all RSS-Feeds hold a "duration"-field (e.g. text-based article-feeds don't) + # therefore we need to make sure that duration is only set where it's appropriate + technical.add_value("duration", response.meta["item"].xpath("duration//text()").get().strip()) technical.add_value( "location", response.meta["item"].xpath("link//text()").get() ) diff --git a/converter/spiders/irights_spider.py b/converter/spiders/irights_spider.py index ed0fe7e8..9c8da06d 100644 --- a/converter/spiders/irights_spider.py +++ b/converter/spiders/irights_spider.py @@ -1,17 +1,20 @@ -from .base_classes import LomBase, RSSBase +from scrapy import Request +from .base_classes import LomBase, RSSBase -# Spider to fetch RSS from planet schule class IRightsSpider(RSSBase): name = "irights_spider" friendlyName = "iRights.info" start_urls = ["https://irights.info/feed"] - version = "0.1.0" + version = "0.1.1" # last update: 2022-02-21 def __init__(self, **kwargs): RSSBase.__init__(self, **kwargs) + def start_requests(self): + yield Request(url=self.start_urls[0], callback=self.parse) + def getLOMGeneral(self, response): general = RSSBase.getLOMGeneral(self, response) general.add_value( @@ -35,4 +38,6 @@ def getValuespaces(self, response): valuespaces.add_value("educationalContext", "erwachsenenbildung") valuespaces.add_value("discipline", "700") # Wirtschaftskunde valuespaces.add_value("discipline", "48005") # Gesellschaftskunde + # ToDo: confirm new_lrt values + valuespaces.add_value("new_lrt", "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9") # "Webseite und Portal (stabil)" return valuespaces From fc4fbdf190b4530f82313cf15ca8523b2d05b967 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Mon, 21 Feb 2022 17:32:31 +0100 Subject: [PATCH 044/590] mediothek_pixiothek_spider v0.1.1 - fix: Crawler was not starting anymore due to missing start_requests() method - rework parse()-method to make use of scrapy.http.TextResponse json()-method - minor code cleanup -- rename variable shadows --- .../spiders/mediothek_pixiothek_spider.py | 68 ++++++++----------- 1 file changed, 30 insertions(+), 38 deletions(-) diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index bfe81832..e7ddda53 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -1,10 +1,11 @@ -import json - +import scrapy +from scrapy import Request from scrapy.spiders import CrawlSpider + +from converter.constants import * from converter.items import * from .base_classes import LomBase -from converter.constants import * -import scrapy + class MediothekPixiothekSpider(CrawlSpider, LomBase): """ @@ -16,7 +17,7 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): name = "mediothek_pixiothek_spider" url = "https://www.schulportal-thueringen.de/" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "MediothekPixiothek" # name as shown in the search ui - version = "0.1" # the version of your crawler, used to identify if a reimport is necessary + version = "0.1.1" # last update: 2022-02-21 start_urls = [ "https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" ] @@ -24,47 +25,40 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) - def parse(self, response: scrapy.http.Response): + def start_requests(self): + for url in self.start_urls: + yield Request(url=url, callback=self.parse) - # Call Splash only once per page (that contains multiple XML elements). + def parse(self, response: scrapy.http.TextResponse, **kwargs): data = self.getUrlData(response.url) response.meta["rendered_data"] = data - elements = json.loads(response.body_as_unicode()) - for i, element in enumerate(elements): - copyResponse = response.copy() - - # Passing the dictionary for easier access to attributes. - copyResponse.meta["item"] = element - - # In case JSON string representation is preferred: - json_str = json.dumps(element, indent=4, sort_keys=True, ensure_ascii=False) - copyResponse._set_body(json_str) - print(json_str) - - if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) - - # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. - LomBase.parse(self, copyResponse) + # as of Scrapy 2.2 the JSON of a TextResponse can be loaded like this, + # see: https://doc.scrapy.org/en/latest/topics/request-response.html#scrapy.http.TextResponse.json + elements = response.json() + for element in elements: + copy_response = response.copy() + # Passing the dictionary for easier access to its attributes. + copy_response.meta["item"] = element + yield LomBase.parse(self, response=copy_response) # def _if_exists_add(self, edu_dict: dict, element_dict: dict, edu_attr: str, element_attr: str): # if element_attr in element_dict: # edu_dict[edu_attr] = element_dict[element_attr] - def getId(self, response): + def getId(self, response) -> str: # Element response as a Python dict. - element_dict = response.meta["item"] - - return element_dict["id"] + element_dict: dict = response.meta["item"] + element_id: str = element_dict["id"] + return element_id def getHash(self, response): # Element response as a Python dict. element_dict = response.meta["item"] + element_id = element_dict["id"] + element_timestamp = element_dict["pts"] # presentation timestamp (PTS) - id = element_dict["id"] - pts = element_dict["pts"] # date_object = datetime.strptime(hash, "%Y-%m-%d %H:%M:%S.%f").date() - return id + pts + return element_id + element_timestamp def mapResponse(self, response): r = ResponseItemLoader(response=response) @@ -73,9 +67,6 @@ def mapResponse(self, response): r.add_value("url", self.getUri(response)) return r - def handleEntry(self, response): - return LomBase.parse(self, response) - def getBase(self, response): base = LomBase.getBase(self, response) @@ -116,18 +107,18 @@ def getUri(self, response): return element_dict["downloadUrl"] def getLicense(self, response): - license = LomBase.getLicense(self, response) + license_loader = LomBase.getLicense(self, response) # Element response as a Python dict. element_dict = response.meta["item"] - license.replace_value( + license_loader.replace_value( "internal", Constants.LICENSE_NONPUBLIC if element_dict["oeffentlich"] == "1" else Constants.LICENSE_COPYRIGHT_LAW, ) - return license + return license_loader def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) @@ -138,7 +129,8 @@ def getLOMTechnical(self, response): return technical - def is_public(self, element_dict) -> bool: + @staticmethod + def is_public(element_dict) -> bool: """ Temporary solution to check whether the content is public and only save it if this holds. """ From 5c86470e19651f10fdbc616603d1187008aa508f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Tue, 22 Feb 2022 17:57:44 +0100 Subject: [PATCH 045/590] biologie_lernprogramme_spider v0.1.1 - inherit from CrawlSpider - add: start_requests()-method that was missing - expected items after successful crawl: 23 --- converter/spiders/biologie_lernprogramme_spider.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/converter/spiders/biologie_lernprogramme_spider.py b/converter/spiders/biologie_lernprogramme_spider.py index 6b0fd7bd..975784a9 100644 --- a/converter/spiders/biologie_lernprogramme_spider.py +++ b/converter/spiders/biologie_lernprogramme_spider.py @@ -1,13 +1,17 @@ import re +from scrapy import Request +from scrapy.spiders import CrawlSpider + from converter.constants import Constants from .base_classes import LernprogrammeSpiderBase -class BiologieLernprogrammeSpider(LernprogrammeSpiderBase): +class BiologieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): name = "biologie_lernprogramme_spider" friendlyName = "Biologie-Lernprogramme" url = "https://biologie-lernprogramme.de/" + version = "0.1.1" # last update: 2022-02-22 static_values = { "author": { @@ -69,3 +73,7 @@ class BiologieLernprogrammeSpider(LernprogrammeSpiderBase): } ), } + + def start_requests(self): + for url in self.start_urls: + yield Request(url=url, callback=self.parse) From 88ac1673205f2f599941c6150f96ef1f61e6888f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Tue, 22 Feb 2022 18:06:18 +0100 Subject: [PATCH 046/590] chemie_lernprogramme_spider v0.1.1 - inherit from CrawlSpider - add: start_requests()-method that was missing - expected items after successful crawl: 95 --- converter/spiders/chemie_lernprogramme_spider.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/converter/spiders/chemie_lernprogramme_spider.py b/converter/spiders/chemie_lernprogramme_spider.py index 72aa87d5..d5338233 100644 --- a/converter/spiders/chemie_lernprogramme_spider.py +++ b/converter/spiders/chemie_lernprogramme_spider.py @@ -1,13 +1,17 @@ import re +from scrapy import Request +from scrapy.spiders import CrawlSpider + from converter.constants import Constants from .base_classes import LernprogrammeSpiderBase -class ChemieLernprogrammeSpider(LernprogrammeSpiderBase): +class ChemieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): name = "chemie_lernprogramme_spider" friendlyName = "Chemie-Lernprogramme" url = "https://chemie-lernprogramme.de/" + version = "0.1.1" # last update: 2022-02-22 static_values = { "author": { @@ -69,3 +73,7 @@ class ChemieLernprogrammeSpider(LernprogrammeSpiderBase): } ), } + + def start_requests(self): + for url in self.start_urls: + yield Request(url=url, callback=self.parse) From e23af477d5ae5651abe8d742910be2f08f6d00d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Tue, 22 Feb 2022 18:14:35 +0100 Subject: [PATCH 047/590] quizdidaktik_spider v0.1.1 - inherit from CrawlSpider - add: start_requests()-method that was missing - expected items after successful crawl: 29 --- converter/spiders/quizdidaktik_spider.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/converter/spiders/quizdidaktik_spider.py b/converter/spiders/quizdidaktik_spider.py index 7febafdc..ebc9cc29 100644 --- a/converter/spiders/quizdidaktik_spider.py +++ b/converter/spiders/quizdidaktik_spider.py @@ -1,11 +1,15 @@ +from scrapy import Request +from scrapy.spiders import CrawlSpider + from converter.constants import Constants from .base_classes import LernprogrammeSpiderBase -class QuizdidaktikSpider(LernprogrammeSpiderBase): +class QuizdidaktikSpider(LernprogrammeSpiderBase, CrawlSpider): name = "quizdidaktik_spider" friendlyName = "Quizdidaktik" url = "https://quizdidaktik.de/" + version = "0.1.1" # last update: 2022-02-22 static_values = { "author": { @@ -50,3 +54,7 @@ def map_row(self, row: dict) -> dict: row["thumbnail"] ), } + + def start_requests(self): + for url in self.start_urls: + yield Request(url=url, callback=self.parse) From d1bb667e65f74b856492386dd57be0df7b6d0d3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 24 Feb 2022 21:04:11 +0100 Subject: [PATCH 048/590] grundschulkoenig_spider.py v0.0.3 (squashed) - fix: start_urls -- the sitemap address changed sometime in the last 2 months - change: use uuids for new_lrt - (items_scraped after a full crawl: 575) grundschulkoenig_spider v0.0.2 - initial work on the crawler was done by "-1" in the "docs_and_tests"-branch a year ago. -- basically rebuilt the crawler on top of that structure and simplified parts of it - rework: crawler navigation -- now using url exclusion lists to filter the unwanted sub-pages out before crawling them - add: new_lrt - add: competency_description (classification.description) - items_scraped after a full crawl: 571 --- converter/spiders/grundschulkoenig_spider.py | 185 +++++++++++++++++++ 1 file changed, 185 insertions(+) create mode 100644 converter/spiders/grundschulkoenig_spider.py diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py new file mode 100644 index 00000000..ed49e1b0 --- /dev/null +++ b/converter/spiders/grundschulkoenig_spider.py @@ -0,0 +1,185 @@ +import re + +import scrapy +from scrapy import Request + +from converter.constants import Constants +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, PermissionItemLoader, ResponseItemLoader, \ + LomLifecycleItemloader, LomClassificationItemLoader +from converter.spiders.base_classes import LomBase +from converter.util.sitemap import from_xml_response, SitemapEntry + + +class GrundSchulKoenigSpider(scrapy.Spider, LomBase): + """ + scrapes the Grundschulkönig website. + """ + + start_urls = ['https://www.grundschulkoenig.de/sitemap.xml?sitemap=pages&cHash=b8e1a6633393d69093d0ebe93a3d2616'] + name = 'grundschulkoenig_spider' + version = "0.0.3" # last update: 2022-04-14 + excluded_url_paths = ["/blog/", + "/rechtliches/", + "/footer-bottom/", + "/suche/", + "/404-page-not-found/"] + excluded_overview_pages = [ + "https://www.grundschulkoenig.de/deutsch/", + "https://www.grundschulkoenig.de/englisch/", + "https://www.grundschulkoenig.de/hsu-sachkunde/", + "https://www.grundschulkoenig.de/mehr/jahreskreis/" + "https://www.grundschulkoenig.de/mathe/", + "https://www.grundschulkoenig.de/musikkunst/kunst/", + "https://www.grundschulkoenig.de/musikkunst/musik/", + "https://www.grundschulkoenig.de/religion/", + "https://www.grundschulkoenig.de/weitere-faecher/", + "https://www.grundschulkoenig.de/vorschule/", + "https://www.grundschulkoenig.de/", + "https://www.grundschulkoenig.de/links/", + "https://www.grundschulkoenig.de/suchergebnisse/", + "https://www.grundschulkoenig.de/landing/", + "https://www.grundschulkoenig.de/globale-elemente/", + "" + ] + + def start_requests(self): + for url in self.start_urls: + yield Request(url=url, callback=self.parse_sitemap) + pass + + def getHash(self, response=None) -> str: + pass + + def getId(self, response=None) -> str: + pass + + def parse_sitemap(self, response: scrapy.http.XmlResponse): + """ + one url element usually looks like this:: + + + https://www.grundschulkoenig.de/mathe/1-klasse/zahlenraum-10/ + 2021-02-03T11:44:34+01:00 + 0.5 + + + Scrapy Contracts: + @url https://www.grundschulkoenig.de/sitemap.xml?sitemap=pages&cHash=06e4f67db47c88d09df2534dfa2ab810 + @returns requests 100 + """ + + items = from_xml_response(response) + for item in items: + response = response.copy() + response.meta['sitemap_entry'] = item + skip_url = False + for full_url in self.excluded_overview_pages: + # We don't want to parse the overview pages, but only the specific topic-pages + full_url_regex = re.compile(full_url) + if full_url_regex.fullmatch(item.loc) is not None: + skip_url = True + for url_pattern in self.excluded_url_paths: + current_page_regex = re.compile(url_pattern) + if current_page_regex.search(item.loc) is not None: + skip_url = True + if self.hasChanged(response) and skip_url is False: + yield response.follow(item.loc, callback=self.parse, cb_kwargs={'sitemap_entry': item}) + + def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): + title = response.xpath('//span[@class="nav__crumb nav__crumb--current"]/span/text()').get() + # content = response.xpath('//div[@class="page__content"]') + # Worksheets are grouped, sometimes several worksheet-containers per page exist + # worksheet_containers = response.xpath('//div[@class="module-worksheet"]') + # the worksheet_containers hold the links to invididual worksheet .pdf files + + base = BaseItemLoader(response=response) + base.add_value("sourceId", response.url) + hash_temp = str(sitemap_entry.lastmod + self.version) + base.add_value("hash", hash_temp) + base.add_value("type", Constants.TYPE_MATERIAL) + thumbnail_url = response.xpath('//meta[@property="og:image"]/@content').get() + if thumbnail_url is not None: + base.add_value('thumbnail', thumbnail_url) + base.add_value('lastModified', sitemap_entry.lastmod) + lom = LomBaseItemloader() + general = LomGeneralItemloader(response=response) + general.add_value('title', title) + description: str = response.xpath('//meta[@name="description"]/@content').get() + general.add_value('description', description) + # ToDo: check if "keywords" are available at the source when the next crawler update becomes necessary + lom.add_value("general", general.load_item()) + + technical = LomTechnicalItemLoader() + technical.add_value('format', 'text/html') + technical.add_value('location', sitemap_entry.loc) + lom.add_value("technical", technical.load_item()) + + lifecycle = LomLifecycleItemloader() + lifecycle.add_value('role', "publisher") + lifecycle.add_value('url', "https://www.grundschulkoenig.de/rechtliches/impressum/") + lifecycle.add_value('email', "kontakt@grundschulkoenig.de") + lifecycle.add_value('organization', "Grundschulkönig GmbH") + lom.add_value("lifecycle", lifecycle.load_item()) + + edu = LomEducationalItemLoader() + lom.add_value("educational", edu.load_item()) + + classification = LomClassificationItemLoader() + # competency description covers "Lernziele" and "Aufgaben" of the individual materials, + # not necessarily available for every crawled item + competency_description: list = response.xpath( + '//div[@class="aims__aim"]/span[@class="aim__bodytext"]/ul/li/text()').getall() + if len(competency_description) > 0: + # if there's no competency_description available, don't bother saving the empty list + classification.add_value('description', competency_description) + lom.add_value("classification", classification.load_item()) + + base.add_value("lom", lom.load_item()) + + vs = ValuespaceItemLoader() + vs.add_value('conditionsOfAccess', 'no_login') + vs.add_value('containsAdvertisement', 'yes') + vs.add_value('price', "yes_for_additional") + vs.add_value('accessibilitySummary', 'none') + # Datenschutzerklaerung -> https://www.grundschulkoenig.de/rechtliches/datenschutzerklaerung/ + vs.add_value('dataProtectionConformity', 'noGeneralDataProtectionRegulation') + vs.add_value('intendedEndUserRole', ["teacher", "learner", "parent"]) + if "/deutsch/" in response.url: + vs.add_value('discipline', 'Deutsch') + if "/englisch/" in response.url: + vs.add_value('discipline', 'Englisch') + if "/hsu-sachkunde/" in response.url: + vs.add_value('discipline', 'Sachunterricht') + if "/mathe/" in response.url: + vs.add_value('discipline', "Mathematik") + if "/musikkunst/musik/" in response.url: + vs.add_value('discipline', "Musik") + if "/musikkunst/kunst/" in response.url: + vs.add_value('discipline', "Kunst") + if "/religion/" in response.url: + vs.add_value('discipline', "Religionsunterricht") + vs.add_value('discipline', 'Allgemein') + vs.add_value('educationalContext', 'Primarstufe') + vs.add_value('sourceContentType', "Unterrichtsmaterial- und Aufgaben-Sammlung") + vs.add_value('learningResourceType', 'other_asset_type') + # ToDo: new_lrt + if "/vorschule/" in response.url: + vs.add_value('educationalContext', "Elementarbereich") + vs.add_value('new_lrt', "65330f23-2802-4789-86ee-c21f9afe74b1") # "Frühkindliches Bildungsangebot und KITA" + vs.add_value('new_lrt', ["5098cf0b-1c12-4a1b-a6d3-b3f29621e11d", "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9"]) + # "Unterrichtsbaustein", "Webseite und Portal (stabil) + base.add_value("valuespaces", vs.load_item()) + + lic = LicenseItemLoader() + lic.add_value('url', Constants.LICENSE_COPYRIGHT_LAW) + base.add_value("license", lic.load_item()) + + permissions = PermissionItemLoader(response=response) + base.add_value("permissions", permissions.load_item()) + + response_loader = ResponseItemLoader() + response_loader.add_value('url', response.url) + base.add_value("response", response_loader.load_item()) + + yield base.load_item() From e710681b4362dd1215e248ebf9a7460478bf2f20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Mon, 28 Feb 2022 18:20:10 +0100 Subject: [PATCH 049/590] memucho_spider v0.1.1 - implement missing start_requests()-method - improve keyword list (by cleaning up the placeholder values and using a set to filter out duplicates) - change: description is now gathered from the header instead of text from the body - use "DateModified" for hash instead of time of crawl - add new_lrt -- change: new_lrt uses uuid - optimize imports --- converter/spiders/memucho_spider.py | 147 ++++++++++++++++++---------- 1 file changed, 96 insertions(+), 51 deletions(-) diff --git a/converter/spiders/memucho_spider.py b/converter/spiders/memucho_spider.py index 4f124fd1..bc040de3 100644 --- a/converter/spiders/memucho_spider.py +++ b/converter/spiders/memucho_spider.py @@ -1,87 +1,126 @@ +import logging + +import scrapy +from scrapy import Request +from scrapy.exceptions import DropItem from scrapy.spiders import CrawlSpider -from converter.items import * -import time -from .base_classes import LomBase, JSONBase -import json + from converter.constants import Constants +from .base_classes import LomBase, JSONBase + -# Spider to fetch RSS from planet schule class MemuchoSpider(CrawlSpider, LomBase, JSONBase): name = "memucho_spider" friendlyName = "memucho" url = "https://memucho.de" start_urls = ["https://memucho.de/api/edusharing/search?pageSize=999999"] - version = "0.1" + version = "0.1.1" # last update: 2022-02-28 + + # The crawler uses the memucho API with the following item structure (example): + # { + # "TopicId": 199, + # "Name": "UNESCO-Weltkulturerbe", + # "ImageUrl": "https://memucho.de/Images/Categories/199_350s.jpg?t=20161105081125", + # "ItemUrl": "https://memucho.de/UNESCO-Weltkulturerbe/199", + # "Licence": "CC_BY", + # "Author": "Christof", + # "DateModified": 1643303499 + # } + # Since there are lots of deadlinks (Error 404, 500) in the API, the dupefilter will show a lot of dropped items def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) + def start_requests(self): + """ + + :return: + """ + for url in self.start_urls: + yield Request(url=url, callback=self.parse_sitemap) + pass + def mapResponse(self, response): return LomBase.mapResponse(self, response) def getId(self, response): - return response.meta["item"].get("TopicId") + return response.meta["item"].get("ItemUrl") def getHash(self, response): - # @TODO: Api currently does not seem to have a hash value - return time.time() + date_modified = response.meta["item"].get("DateModified") + hash_temp: str = str(date_modified) + self.version + # return time.time() + return hash_temp - def parse(self, response): - data = json.loads(response.body_as_unicode()) + def parse_sitemap(self, response): + json_items = response.json() - for item in data.get("Items"): - copyResponse = response.copy() - copyResponse.meta["item"] = item - if self.hasChanged(copyResponse): + for item in json_items.get("Items"): + copy_response = response.copy() + copy_response.meta["item"] = item + if self.hasChanged(copy_response): yield scrapy.Request( url=item.get("ItemUrl"), - callback=self.handleLink, + callback=self.parse, meta={"item": item}, ) - def handleLink(self, response): + def parse(self, response): return LomBase.parse(self, response) # thumbnail is always the same, do not use the one from rss def getBase(self, response): + + if response.url == "https://memucho.de/Fehler/404" or response.url == "https://memucho.de/Fehler/500": + # the API lists dozens of (personal) Wikis that are no longer maintained and forward to deadlinks, + # most of them get filtered by Scrapy's dupefilter, but some might slip through - so we drop those as well + raise DropItem(f"Deadlink found for entry {response.url}") + base = LomBase.getBase(self, response) - thumb = response.xpath('//meta[@property="og:image"]//@content').get() - if thumb: - base.add_value("thumbnail", self.url + thumb.replace("_350", "_1000")) - # base.add_value('thumbnail', self.url + '/Images/Categories/' + str(self.getId(response)) + '_1000.jpg') + thumbnail_url = response.meta["item"].get("ImageUrl") + # the API holds urls to thumbnails, but some thumbnails forward to Deadlinks - we try to filter them out: + if thumbnail_url == "https://memucho.de/Fehler/404" or thumbnail_url == "https://memucho.de/Fehler/500": + logging.warning(f"Deadlink found for thumbnail_url: {thumbnail_url}") + if thumbnail_url is not None: + base.add_value("thumbnail", thumbnail_url) + # alternative Thumbnail (just in case it's needed in the future): + # thumb = response.xpath('//meta[@property="og:image"]//@content').get() return base def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"].get("Name").strip()) - general.add_value( - "keyword", - list( - filter( - lambda x: x, - map( - lambda x: x.strip(), - response.xpath( - '//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()' - ).getall(), - ), - ) - ), - ) - description = "\n".join( - list( - filter( - lambda x: x, - map( - lambda x: x.strip(), - response.xpath( - '//*[@id="ContentModuleApp"]//*[@content-module-type="inlinetext"]//p//text()' - ).getall(), - ), - ) + # ToDo confirm if we keep these keywords or not, this has always been a workaround-solution + # keywords are grabbed from the link-descriptions of "Untergeordnete Themen" + # keywords = response.xpath('//*[@class="topic-name"]//text()').getall() + keyword_set = set( + filter( + lambda x: x, + map( + lambda x: x.strip(), + response.xpath( + '//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()' + ).getall(), + ), ) - ).strip() - general.add_value("description", description) + ) + keyword_set.remove("{{category.Name}}") # remove placeholders + general.add_value("keyword", list(keyword_set)) + description_from_header = response.xpath('//meta[@name="description"]/@content').getall() + # description_from_body = "\n".join( + # list( + # filter( + # lambda x: x, + # map( + # lambda x: x.strip(), + # response.xpath( + # '//*[@id="ContentModuleApp"]//*[@content-module-type="inlinetext"]//p//text()' + # ).getall(), + # ), + # ) + # ) + # ).strip() + general.add_value("description", description_from_header) return general def getLOMTechnical(self, response): @@ -91,10 +130,16 @@ def getLOMTechnical(self, response): return technical def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("url", Constants.LICENSE_CC_BY_40) - return license + license_loader = LomBase.getLicense(self, response) + license_loader.add_value("url", Constants.LICENSE_CC_BY_40) + author = response.meta["item"].get("Author") + # the author information comes straight from the memucho API and consists of memucho usernames + # ToDo: confirm if memucho usernames are desired values for author information + if author is not None: + license_loader.add_value("author", author) + return license_loader def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value("new_lrt", "6b9748e4-fb3b-4082-ae08-c7a11c717256") # "Wiki (dynamisch)" return valuespaces From a54207bb22a5273b61f08039aec59a3595858e4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 4 Mar 2022 17:21:30 +0100 Subject: [PATCH 050/590] leifi_spider v0.1.1 - fix: the crawler used outdated HTMLParser methods, which are now within Python's "html"-library - add: explanations in regards to local debugging/testing of the crawler - add: version declaration - rename variable to avoid variable shadowing --- converter/spiders/leifi_spider.py | 65 +++++++++++++++++++------------ 1 file changed, 40 insertions(+), 25 deletions(-) diff --git a/converter/spiders/leifi_spider.py b/converter/spiders/leifi_spider.py index 96c29f31..8dc0d1eb 100644 --- a/converter/spiders/leifi_spider.py +++ b/converter/spiders/leifi_spider.py @@ -1,17 +1,34 @@ -from converter.items import * -from .base_classes import LomBase -from converter.valuespace_helper import Valuespaces -import requests import html -from converter.constants import Constants +import os +import pathlib + +import requests import scrapy -# LEIFIphysik spider for xml data file +from converter.constants import Constants +from converter.items import * +from converter.valuespace_helper import Valuespaces +from .base_classes import LomBase + + class LeifiSpider(scrapy.Spider, LomBase): + """ + LeifiSpider uses a local .xml file (which contains the RSS feed of leifiphysik.de) to crawl its elements. + + This crawler can only be run or locally debugged if you have the "leifi_feed_rss.xml" file + in the correct directory, either locally or on your HTTP-Server in "/sources/leifi_feed_rss.xml". + """ name = "leifi_spider" friendlyName = "LEIFIphysik" url = "https://www.leifiphysik.de/" - rssUrl = "http://localhost/sources/leifi_feed_rss.xml" + version = "0.1.1" # last update: 2022-03-04 + # ToDo: enable the localhost rssUrl + # rssUrl = "http://localhost/sources/leifi_feed_rss.xml" + + # For local testing/debugging ONLY: + # first create a folder in this project root folder called 'sources' and add the 'leifi_feed_rss.xml' + # ToDo: don't forget to enable the localhost rssUrl before commiting your changes! + rssUrl = pathlib.Path(os.path.abspath('sources/leifi_feed_rss.xml')).as_uri() def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -21,17 +38,17 @@ def getUri(self, response): return response.meta["item"].xpath("url_datensatz//text()").get() def start_requests(self): - yield scrapy.Request(url=self.rssUrl, callback=self.parseList) + yield scrapy.Request(url=self.rssUrl, callback=self.parse_xml) - def parseList(self, response): + def parse_xml(self, response): ids = [] for item in response.xpath("//elixier/datensatz"): - id = item.xpath("id_local//text()").get() - if not id in ids: - ids.append(id) - copyResponse = response.copy() - copyResponse.meta["item"] = item - yield self.parse(copyResponse) + item_id = item.xpath("id_local//text()").get() + if item_id not in ids: + ids.append(item_id) + copy_response = response.copy() + copy_response.meta["item"] = item + yield self.parse(copy_response) def parse(self, response): return LomBase.parse(self, response) @@ -73,22 +90,20 @@ def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) general.add_value( "title", - HTMLParser().unescape(response.meta["item"].xpath("titel//text()").get()), + html.unescape(response.meta["item"].xpath("titel//text()").get()), ) general.add_value( "language", response.meta["item"].xpath("sprache//text()").get() ) general.add_value( "keyword", - HTMLParser() - .unescape(response.meta["item"].xpath("schlagwort//text()").get()) - .split("; "), + html.unescape(response.meta["item"].xpath("schlagwort//text()").get()).split("; "), ) desc = response.meta["item"].xpath("beschreibung//text()").get().strip() # dirty cleaning of invalid descriptions # not perfect yet, these objects also appear inside the content if not desc.startswith("swiffyobject_"): - general.add_value("description", HTMLParser().unescape(desc)) + general.add_value("description", html.unescape(desc)) return general def getLOMTechnical(self, response): @@ -100,10 +115,10 @@ def getLOMTechnical(self, response): return technical def getLicense(self, response): - license = LomBase.getLicense(self, response) + license_loader = LomBase.getLicense(self, response) if ( - response.meta["item"].xpath("rechte//text()").get() - == "Keine Angabe, es gilt die gesetzliche Regelung" + response.meta["item"].xpath("rechte//text()").get() + == "Keine Angabe, es gilt die gesetzliche Regelung" ): - license.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) - return license + license_loader.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) + return license_loader From 02928e82ea929b8a21a3cf4074731434ba0617b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Tue, 15 Mar 2022 18:18:36 +0100 Subject: [PATCH 051/590] rpi_virtuell_spider v0.0.6 - due to changes in rpi-virtuell's "wp-json"-API, the crawler needed several adjustments -- the initial API response is a proper dictionary again (instead of a weirdly structured list that we needed to manually clean up) -- the "json_ld"-script seems to be completely missing from the individual DOM - add: try to grab author name from header if the wp-json item doesn't hold any author names - fix: dateModified, datePublished - remove: lifecycle.organization info (since the data is no longer supplied by a json_ld, we can't reliably fetch it anymore) --- converter/spiders/rpi_virtuell_spider.py | 136 +++++++---------------- 1 file changed, 40 insertions(+), 96 deletions(-) diff --git a/converter/spiders/rpi_virtuell_spider.py b/converter/spiders/rpi_virtuell_spider.py index ac4d9889..1bd9f442 100644 --- a/converter/spiders/rpi_virtuell_spider.py +++ b/converter/spiders/rpi_virtuell_spider.py @@ -1,5 +1,4 @@ import html -import json import re from typing import Optional @@ -31,7 +30,7 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): # 'DUPEFILTER_DEBUG': True } wp_json_pagination_parameters = { - # wp-json API returns up to 100 records per request, with the amount of pages total depending on the chosen + # wp-json API returns up to 100 records per request, with the amount of pages in total depending on the chosen # pagination parameters, see https://developer.wordpress.org/rest-api/using-the-rest-api/pagination/ 'start_page_number': 0, # number of records that should be returned per request: @@ -53,7 +52,7 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): 'Sekundarstufe': "http://w3id.org/openeduhub/vocabs/educationalContext/sekundarstufe_1", 'Unterrichtende': "" } # copyright is only available as a String (description) on the material_review_url itself, this debug list could be - # deleted once its confirmed with rpi-virtuell which OER model they actually use here: + # deleted once it's confirmed with rpi-virtuell which OER model they actually use here: copyright_debug_list = { 'Zur Wiederverwendung und Veränderung gekennzeichnet': "", 'Zur Wiederverwendung und Veränderung gekennzeichnet\t \t \t\t frei zugänglich': "", @@ -88,7 +87,8 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): 'Gebet/Lied': "", 'Gottesdienstentwurf': "", 'Internetportal': "web page", - 'Lernorte': "", 'Lernstationen': "", + 'Lernorte': "", + 'Lernstationen': "", 'Lokale Einrichtung': "", 'Medien': "audiovisual medium", 'Online Lesson': "", @@ -123,7 +123,7 @@ def start_requests(self): Before starting the actual parsing this method determines in which format the url in start_urls was provided. If "?page="-query-parameters are missing, it attaches these via urljoin before parsing. """ - # typically we want to iterate through all pages, starting at 1: + # typically, we want to iterate through all pages, starting at 1: # https://material.rpi-virtuell.de/wp-json/mymaterial/v1/material/?page=1&per_page=100 # the following method checks if the urls listed in start_urls are in a format that we can use, e.g. either ends # with [...]/material/ @@ -152,8 +152,8 @@ def parse(self, response, **kwargs): first_page = int(self.get_first_page_parameter()) last_page = int(self.get_total_pages(response)) - print("LAST PAGE will be: ", last_page) - # first_run_page_number helps avoiding duplicate requests + # logging.debug(f"LAST PAGE will be: {last_page}") + # first_run_page_number helps avoid duplicate requests first_run_page_number = self.get_current_page_number(response) for i in range(first_page, (last_page + 1)): if i == first_run_page_number: @@ -175,9 +175,9 @@ def iterate_through_pages_slowly(self, current_url, response): yield response.follow(current_url, callback=self.parse_page) next_page_number = current_page_number + 1 if current_page_number < last_page: - print("Next Page #: ", next_page_number) + # logging.debug(f"Next Page #: {next_page_number}") next_url = response.urljoin(f'?page={next_page_number}&per_page={self.get_per_page_parameter()}') - print("Next URL will be: ", next_url) + # logging.debug(f"Next URL will be: {next_url}") yield response.follow(next_url, callback=self.parse) def get_first_page_parameter(self) -> int: @@ -214,7 +214,7 @@ def get_current_page_number(response) -> int: @staticmethod def get_total_pages(response) -> str: """ - the number of total_pages that are returned by the "wp_json"-API are dependant on which + the number of total_pages that are returned by the "wp_json"-API are dependent on which "?per_page"-query-parameter was used during a GET-Request. This method grabs "X-WP-TotalPages" from the header to determine how many "wp_json"-pages need to be parsed in @@ -224,53 +224,33 @@ def get_total_pages(response) -> str: :return: the amount of pages that can be returned by the API """ - # the number of total_pages is dependant on how many elements per_page are served during a GET-Request + # the number of total_pages is dependent on how many elements per_page are served during a GET-Request if response.headers.get("X-Wp-TotalPages") is not None: # X-WP-TotalPages is returned as a byte, therefore we need to decode it first total_pages = response.headers.get("X-Wp-TotalPages").decode() # logging.debug("Total Pages: ", total_pages) return total_pages - def parse_page(self, response: scrapy.http.Response = None): + def parse_page(self, response: scrapy.http.TextResponse = None): """ - Parses a "wp_json"-page for individual json items. After fetching an json-item, a dictionary consisting of the + Parses a "wp_json"-page for individual json items. After fetching a json-item, a dictionary consisting of the "material_review_url" and a copy of the json item is passed on to the "get_metadata_from_review_url"-method. :param response: the current "wp_json"-page that needs to be parsed for individual json items """ - print("REACHED PARSE_PAGE") - current_page_json: dict = json.loads(response.body) - # on 2021-09-01: the rpi-virtuell API response format changed - # since, for some reason, the API returns a JSON with keys ranging from ("0", "1", ... "99" and adds - # "width": 1000, - # "height": 700, - # "html": "" - # right before the final closing }, we now have to make sure we're actually parsing an actual element - # and not these last 3 strings. - # ATTENTION: The numbers are strings, so current_page_json[0] won't access a value, - # but current_page_json["0"] will. - - # First step is cleaning up the list of valid keys: - current_page_json_keys = list(current_page_json.keys()) - if 'width' in current_page_json_keys: - current_page_json_keys.remove('width') - if 'height' in current_page_json_keys: - current_page_json_keys.remove('height') - if 'html' in current_page_json_keys: - current_page_json_keys.remove('html') - # this should give us a list of keys (strings) that we need to call the individual items with: - for key in current_page_json_keys: - temp_key = str(key) - # we have to access individual items by current_page_json["0"] instead of current_page_json[0] - if isinstance(current_page_json[temp_key], dict) and current_page_json[temp_key] != '': - item_copy: dict = current_page_json[temp_key] - wp_json_item = { - "id": item_copy.get("material_review_url"), - "item": item_copy - } - review_url = item_copy.get("material_review_url") - yield scrapy.Request(url=review_url, callback=self.get_metadata_from_review_url, - cb_kwargs=wp_json_item) + # logging.debug("REACHED PARSE_PAGE") + current_page_json: dict = response.json() + + for item in current_page_json: + item_copy: dict = item + wp_json_item = { + "id": item_copy.get("material_review_url"), + "item": item_copy + } + review_url = item_copy.get("material_review_url") + yield scrapy.Request(url=review_url, + callback=self.get_metadata_from_review_url, + cb_kwargs=wp_json_item) def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs): """ @@ -285,49 +265,18 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) # logging.debug("DEBUG inside get_metadata_from_review_url: response type = ", type(response), # "url =", response.url) - ld_json_string = response.xpath('/html/head/script[@type="application/ld+json"]/text()').get().strip() - ld_json_string = html.unescape(ld_json_string) - - ld_json = json.loads(ld_json_string) - - hash_temp: Optional[str] = None - language_temp: Optional[str] = None - pub_date: Optional[str] = None - organization_id: Optional[str] = None - organization_name: Optional[str] = None - date_modified: Optional[str] = None - # this is a workaround to make sure that we actually grab the following data, - # no matter where they are positioned in the list: - # - dateModified - # - inLanguage - # - datePublished - # - organization_name and url - # e.g.: since there seems to be fluctuation how many elements the "@graph"-Array holds, we can't be sure - # which position "dateModified" actually has: - # sometimes it's ld_json.get("@graph")[2], sometimes on [3] etc., therefore we must check all of them - ld_graph_items = ld_json.get("@graph") - for item in ld_graph_items: - if item.get("dateModified") is not None: - date_modified = item.get("dateModified") # this can be used instead of 'date' in lastModified - hash_temp = item.get("dateModified") + self.version - if item.get("@type") == "WebSite": - language_temp = item.get("inLanguage") - if item.get("@type") == "WebPage": - pub_date = item.get("datePublished") - if item.get("@type") == "Organization": - organization_id = item.get("@id") - organization_name = item.get("name") - base = BaseItemLoader() base.add_value("sourceId", response.url) + date_modified: str = response.xpath('//meta[@property="og:article:modified_time"]/@content').get() + hash_temp = date_modified + self.version base.add_value("hash", hash_temp) # base.add_value("response", super().mapResponse(response).load_item()) - base.add_value("type", Constants.TYPE_MATERIAL) # TODO: is this correct? use mapping for edu-context? + base.add_value("type", Constants.TYPE_MATERIAL) base.add_value("thumbnail", wp_json_item.get("material_screenshot")) # base.add_value("lastModified", wp_json_item.get("date")) # is "date" from wp_json for lastModified correct? - base.add_value("lastModified", date_modified) # or is this one better (grabbed from from material_review_url)? + base.add_value("lastModified", date_modified) # or is this one better (grabbed from material_review_url)? lom = LomBaseItemloader() general = LomGeneralItemloader(response=response) @@ -340,10 +289,7 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) raw_description = w3lib.html.strip_html5_whitespace(raw_description) clean_description = w3lib.html.replace_escape_chars(raw_description) general.add_value("description", clean_description) - general.add_value("identifier", wp_json_item.get("id")) - if language_temp is not None: - general.add_value("language", language_temp) kw_temp = list() for item in wp_json_item.get("material_schlagworte"): @@ -352,23 +298,17 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() - technical.add_value("format", "text/html") technical.add_value("location", wp_json_item.get("material_review_url")) lom.add_value("technical", technical.load_item()) lifecycle = LomLifecycleItemloader() - if organization_name is not None: - lifecycle.add_value("organization", organization_name) - if organization_id is not None: - lifecycle.add_value("url", organization_id) - if pub_date is not None: - lifecycle.add_value("date", pub_date) - + date_published = response.xpath('//meta[@property="og:article:published_time"]/@content').get() + if date_published is not None: + lifecycle.add_value("date", date_published) lom.add_value("lifecycle", lifecycle.load_item()) educational = LomEducationalItemLoader() - if wp_json_item.get("material_altersstufe") is not None: # age range is returned as a list of --Strings, possible return values are: # e.g. "01-05", "05-10", "10-13", "13-15", "15-19" and "18-99" @@ -386,7 +326,6 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) age_range_item_loader.add_value("fromRange", min(age_range)) age_range_item_loader.add_value("toRange", max(age_range)) educational.add_value("typicalAgeRange", age_range_item_loader.load_item()) - lom.add_value("educational", educational.load_item()) base.add_value("lom", lom.load_item()) @@ -441,7 +380,7 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) cc_by_nc_nd = license_regex_nc_reuse.search(license_description) cc_by_nc_sa = license_regex_nc_reuse_and_change.search(license_description) - # if the RegEx search finds something, it returns a match-object. otherwise by default it returns None + # if the RegEx search finds something, it returns a match-object. otherwise, by default it returns None if cc_by_nc_nd is not None: lic.add_value("url", Constants.LICENSE_CC_BY_NC_ND_40) if cc_by_nc_sa is not None: @@ -469,7 +408,12 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) if item.get("name") is not None: if item.get("name").strip() != "": authors.append(item.get("name")) - lic.add_value("author", authors) + if len(authors) == 0: + author_from_header: str = response.xpath('//meta[@name="author"]/@content').get() + if author_from_header is not None: + authors.append(author_from_header) + if len(authors) > 0: + lic.add_value("author", authors) base.add_value("valuespaces", vs.load_item()) From 8cc97e9762640fc190d3579d7e77ef63f8522341 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 11 Apr 2022 00:55:17 +0200 Subject: [PATCH 052/590] umwelt_im_unterricht_spider.py v0.0.3 (squashed) - fix: RuntimeError due to iterating over a currently used set() -- instead: using a copy of that set - add: mapping for "Grundschule" -- since apparently the valuespaces pipeline doesn't automatically map altLabels? (only seems to map prefLabels) - change: parsing local date (de_DE, "08.11.2021") to ISO format (since it's used in the hash) - add: fallback license for items that have missing license information - add: new_lrt values -- change: new_lrt uses uuid -- disabled the previous learningResourceType --- .../spiders/umwelt_im_unterricht_spider.py | 65 ++++++++++++++----- 1 file changed, 48 insertions(+), 17 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 4dc5f0f8..d6faed5f 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -1,3 +1,4 @@ +import datetime import re import scrapy @@ -32,12 +33,16 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", # Typ: Bilderserie ] - version = "0.0.2" # last update: 2021-10-08 + version = "0.0.3" # last update: 2022-04-12 topic_urls = set() # urls that need to be parsed will be added here topic_urls_parsed = set() # this set is used for 'checking off' already parsed (individual) topic urls overview_urls_already_parsed = set() # this set is used for 'checking off' already parsed overview_pages EDUCATIONAL_CONTEXT_MAPPING: dict = { + 'Grundschule': "Primarstufe", + # ToDo: find out why the pipeline doesn't map altLabels by itself + # while "Grundschule" is the altLabel for "Primarstufe" in our educationalContext Vocab, + # the valuespaces converter / pipeline only seems to map to 'prefLabel' entries? 'Sekundarstufe': ['Sekundarstufe I', 'Sekundarstufe II'] } DISCIPLINE_MAPPING: dict = { @@ -95,7 +100,7 @@ def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http. self.overview_urls_already_parsed.update(overview_urls_parsed) # checking off the (10) URLs that we yielded parsed_urls: set = set() # temporary set used for checking off already visited topics - for url in self.topic_urls: + for url in self.topic_urls.copy(): if url not in self.topic_urls_parsed: # making sure that we don't accidentally crawl individual pages more than once yield scrapy.Request(url=url, callback=self.parse) @@ -116,6 +121,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value('sourceId', response.url) date_raw: str = response.xpath('//div[@class="b-cpsuiu-show-info"]/span/text()').get() date_cleaned_up: str = w3lib.html.strip_html5_whitespace(date_raw) + if date_cleaned_up is not None: + # converting the german date format "DD.MM.YYYY" to YYYY-MM-DD + date_iso = datetime.datetime.strptime(date_cleaned_up, "%d.%m.%Y") + date_cleaned_up = date_iso.isoformat() hash_temp = str(date_cleaned_up + self.version) base.add_value('hash', hash_temp) base.add_value('lastModified', date_cleaned_up) @@ -193,21 +202,36 @@ def parse(self, response: scrapy.http.Response, **kwargs): vs = ValuespaceItemLoader() + vs.add_value('new_lrt', "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9") + # 'Webseite und Portal (stabil)' # depending on the website-category, we need to set a specific learningResourceType - # because the value 'website' for all crawled items would not be helpful enough + # because just the value 'website' for all crawled items would not be helpful enough if "/wochenthemen/" in current_url or "/unterrichtsvorschlaege/" in current_url: - vs.add_value('learningResourceType', 'lesson plan') + # vs.add_value('learningResourceType', 'lesson plan') # ToDo + vs.add_value('new_lrt', '7381f17f-50a6-4ce1-b3a0-9d85a482eec0') # Unterrichtsplanung if "/hintergrund/" in current_url: - vs.add_value('learningResourceType', 'Text') + # vs.add_value('learningResourceType', 'Text') # ToDo + vs.add_value('new_lrt', ['b98c0c8c-5696-4537-82fa-dded7236081e', '7381f17f-50a6-4ce1-b3a0-9d85a482eec0']) + # "Artikel und Einzelpublikation" , "Unterrichtsplanung" if "/medien/dateien/" in current_url: - # topics categorized as "Arbeitsmaterial" offer customizable worksheets to teachers - vs.add_value('learningResourceType', 'worksheet') + # topics categorized as "Arbeitsmaterial" offer customizable worksheets to teachers, most of the time + # consisting of both an "Unterrichtsvorschlag" and a worksheet + # vs.add_value('learningResourceType', 'worksheet') # ToDo + vs.add_value('new_lrt', ['36e68792-6159-481d-a97b-2c00901f4f78', '7381f17f-50a6-4ce1-b3a0-9d85a482eec0']) + # "Arbeitsblatt", "Unterrichtsplanung" if "/medien/videos/" in current_url: - vs.add_value('learningResourceType', 'video') + # each video is served together with one or several "Unterrichtsvorschlag"-documents + # vs.add_value('learningResourceType', 'video') # ToDo + vs.add_value('new_lrt', ['7a6e9608-2554-4981-95dc-47ab9ba924de', '7381f17f-50a6-4ce1-b3a0-9d85a482eec0']) + # "Video (Material)" ,"Unterrichtsplanung" if "/medien/bilder/" in current_url: - # topics categorized as "Bilderserie" hold several images in a gallery (with individual licenses) - vs.add_value('learningResourceType', 'image') - + # topics categorized as "Bilderserie" hold several images in a gallery (with individual licenses), + # they also come with one or several "Unterrichtsvorschlag"-documents that are linked to further below + # vs.add_value('learningResourceType', 'image') # ToDo + vs.add_value('new_lrt', ["a6d1ac52-c557-4151-bc6f-0d99b0b96fb9", "7381f17f-50a6-4ce1-b3a0-9d85a482eec0"]) + # "Bild (Material)" , "Unterrichtsplanung" + # ToDo: once new_lrt goes live: + # - remove the old learningResourceType with the next crawler update vs.add_value('price', 'no') vs.add_value('containsAdvertisement', 'no') vs.add_value('conditionsOfAccess', 'no login') @@ -237,17 +261,18 @@ def parse(self, response: scrapy.http.Response, **kwargs): educational_context_raw = response.xpath('//div[@class="b-cpsuiu-show-targets"]/ul/li/a/text()').getall() if len(educational_context_raw) >= 1: # the educationalContext-mapping is only done when there's at least one educational_context found - educational_context = list() + educational_context = set() for educational_context_value in educational_context_raw: # self.debug_educational_context_values.add(educational_context_value) if educational_context_value in self.EDUCATIONAL_CONTEXT_MAPPING.keys(): educational_context_value = self.EDUCATIONAL_CONTEXT_MAPPING.get(educational_context_value) if type(educational_context_value) is list: - educational_context.extend(educational_context_value) - else: - educational_context.append(educational_context_value) + for educational_context_list_item in educational_context_value: + educational_context.add(educational_context_list_item) + if type(educational_context_value) is str: + educational_context.add(educational_context_value) if len(educational_context) >= 1: - vs.add_value('educationalContext', educational_context) + vs.add_value('educationalContext', list(educational_context)) base.add_value('valuespaces', vs.load_item()) @@ -257,7 +282,13 @@ def parse(self, response: scrapy.http.Response, **kwargs): if license_url.startswith("http://"): # the license-mapper expects urls that are in https:// format, but UIU uses http:// links to CC-licenses license_url = license_url.replace("http://", "https://") - lic.add_value('url', license_url) + lic.replace_value('url', license_url) + else: + lic.add_value('url', Constants.LICENSE_COPYRIGHT_LAW) + # ToDo: change the fallback-license if necessary + # since there are a lot of articles with missing license-information (especially "Thema der Woche", + # "Bilderserien" and other mixed forms of articles), we're setting the default license to "copyright" until we + # get a response/confirmation from Umwelt-im-Unterricht in regards to what the default should be license_description_raw: str = response.xpath('//div[@class="cc-licence-info"]').get() if license_description_raw is not None: From 342e1dd768968dd561e30ed4f0aa89dd1f557463 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Apr 2022 16:24:38 +0200 Subject: [PATCH 053/590] youtube_spider v0.2.1 - youtube.csv (data export from 2022-04-05) - add: fskRating - fix: replace deprecated logging.warn() with logging.warning() - add: references to the YouTube Data API v3 docs where they are used (to make maintainability easier in the future) --- converter/spiders/youtube_spider.py | 46 ++++++++++++++++++++--------- csv/youtube.csv | 6 ++-- 2 files changed, 36 insertions(+), 16 deletions(-) mode change 100644 => 100755 csv/youtube.csv diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index 01586cc1..60ecdc32 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -37,7 +37,7 @@ class YoutubeSpider(Spider): name = "youtube_spider" friendlyName = "Youtube" url = "https://www.youtube.com/" - version = "0.2.0" + version = "0.2.1" # last update: 2022-04-07 @staticmethod def get_video_url(item: dict) -> str: @@ -101,11 +101,12 @@ def request_row(self, row: dict) -> Request: def request_channel(self, channel_id: str, meta: dict) -> Request: part = ["snippet", "contentDetails", "statistics"] + # see: https://developers.google.com/youtube/v3/docs/channels request_url = ( - "https://www.googleapis.com/youtube/v3/channels" - + "?part={}&id={}&key={}".format( - "%2C".join(part), channel_id, env.get("YOUTUBE_API_KEY", False) - ) + "https://www.googleapis.com/youtube/v3/channels" + + "?part={}&id={}&key={}".format( + "%2C".join(part), channel_id, env.get("YOUTUBE_API_KEY", False) + ) ) return Request(url=request_url, meta=meta, callback=self.parse_channel) @@ -120,11 +121,12 @@ def parse_channel(self, response: Response) -> Request: def request_playlist(self, playlist_id: str, meta: dict) -> Request: part = ["snippet"] + # see: https://developers.google.com/youtube/v3/docs/playlists request_url = ( - "https://www.googleapis.com/youtube/v3/playlists" - + "?part={}&id={}&key={}".format( - "%2C".join(part), playlist_id, env.get("YOUTUBE_API_KEY"), - ) + "https://www.googleapis.com/youtube/v3/playlists" + + "?part={}&id={}&key={}".format( + "%2C".join(part), playlist_id, env.get("YOUTUBE_API_KEY"), + ) ) return Request(request_url, meta=meta, callback=self.parse_playlist) @@ -137,6 +139,7 @@ def parse_playlist(self, response: Response): def request_playlist_items(self, playlist_id: str, meta: dict) -> Request: part = ["snippet"] + # see: https://developers.google.com/youtube/v3/docs/playlistItems request_url = ( "https://www.googleapis.com/youtube/v3/playlistItems" + "?part={}&playlistId={}&key={}".format( @@ -160,6 +163,7 @@ def parse_playlist_items(self, response: Response): def request_videos(self, ids: List[str], meta: dict): part = ["snippet", "status", "contentDetails"] + # see: https://developers.google.com/youtube/v3/docs/videos request_url = ( "https://www.googleapis.com/youtube/v3/videos" + "?part={}&id={}&key={}".format( @@ -182,7 +186,7 @@ def parse_custom_url(self, response: Response) -> Request: channel_id = match.group(1) return self.request_channel(channel_id, meta=response.meta) else: - logging.warn("Could not extract channel id for {}".format(response.url)) + logging.warning("Could not extract channel id for {}".format(response.url)) class YoutubeLomLoader(LomBase): @@ -190,10 +194,10 @@ class YoutubeLomLoader(LomBase): # - `row`: The row of the CSV file containing the channel or playlist to be scraped with some # additional information regarding all found videos. # - `item`: Information about the video, obtained from the Youtube API. - # - `channel`: Information about the Youtube channel, obtained from the Youtuber API. Only + # - `channel`: Information about the YouTube channel, obtained from the YouTube API. Only # populated if an entire channel was given in the CSV row. - # - `playlist`: Information about the Youtube playlist, obtained from the Youtuber API. These - # information are more relevant than the channel information when a specific playlist was + # - `playlist`: Information about the YouTube playlist, obtained from the YouTube API. This + # information is more relevant than the channel information when a specific playlist was # given in the CSV row. However, when an entire channel was requested, we fall back to the # `uploads` playlist, which provides only a generated title. @@ -354,5 +358,19 @@ def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: valuespaces.add_value( "educationalContext", self.parse_csv_field(row[CSVBase.COLUMN_EDUCATIONAL_CONTEXT]), ) + if "fskRating" in response.meta["item"]["contentDetails"]: + # the majority of videos doesn't have a fskRating, but if they do, we try to map the YT values to our vocab: + fsk_rating_yt: str = response.meta["item"]["contentDetails"]["fskRating"] + # see: https://developers.google.com/youtube/v3/docs/videos#contentDetails.contentRating.fskRating + # YouTube's "fskRating"-property allows a value ("fskUnrated") which isn't in SkoHub-Vocab (yet) + if fsk_rating_yt == "fsk0": + valuespaces.add_value("fskRating", "0") + if fsk_rating_yt == "fsk6": + valuespaces.add_value("fskRating", "6") + if fsk_rating_yt == "fsk12": + valuespaces.add_value("fskRating", "12") + if fsk_rating_yt == "fsk16": + valuespaces.add_value("fskRating", "16") + if fsk_rating_yt == "fsk18": + valuespaces.add_value("fskRating", "18") return valuespaces - diff --git a/csv/youtube.csv b/csv/youtube.csv old mode 100644 new mode 100755 index c6607463..5cd28b13 --- a/csv/youtube.csv +++ b/csv/youtube.csv @@ -61,6 +61,7 @@ https://www.youtube.com/channel/UCaMpov1PPVXGcKYgwHjXB3g/featured,The Virtual Li https://www.youtube.com/channel/UComfd9z6KFVP3nggiME6-7w/featured,German as a Foreign Language,video,28002,Sekundarstufe 2; Erwachsenenbildung,learner; teacher,16,99,de; ar,, https://www.youtube.com/channel/UC7mZyCH5ppdYdJrHuxjJtkw/featured,Educational Robotics,video,"320, 04005",,teacher; learner,16,99,de; en,, ,,,,,,,,,, +,,,,,,,,,, Playlists,,,,,,,,,, https://www.youtube.com/playlist?list=PLC9D2mzTyJeXYa6E1y_d0fc_7-V7BJnSq,DigiFernunterricht,video,720,,teacher,18,99,de,, https://www.youtube.com/playlist?list=PLFhPjADeGDodbVSSL8LE00SNjQIPiyamr,Webinare Deutsches Lehrkräfteforum,video,720,,teacher,18,99,de,, @@ -186,7 +187,7 @@ https://www.youtube.com/playlist?list=PL25spyP49IXbxMAjaFKgB4LLvYB52ELP1,Hui,vid https://www.youtube.com/playlist?list=PL25spyP49IXap7Z7Wxlln1TTXMraWcfI2,Hui,video,460,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, https://www.youtube.com/playlist?list=PL25spyP49IXYzHTLn8L-2u43uX2H2ZzQv,Hui,video,460,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, https://www.youtube.com/playlist?list=PL25spyP49IXaL7wsV-W4hvrYymByjVkL_,Hui,video,380,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher", , ,de,, -,,,,,"learner, teacher",,,de,, +,,,,,,,,,, ,,,,, , , ,,, ,,,,, , , ,,, Kanäle,,,,,,,,,, @@ -204,4 +205,5 @@ https://www.youtube.com/channel/UCwRH985XgMYXQ6NxXDo8npw,Dinge erklärt - Kurzge https://www.youtube.com/channel/UCKjJ1nCoMFTHzQlUtHHBBsw,Akademie für Lerncoaching,video,720,Primarstufe; Sekundarstufe1; Sekundarstufe 2,teacher,,,de,, https://www.youtube.com/channel/UCFSS2FtaFNKMei4jGQOVL3w,Chemie und Bio in der Schule,video,100; 080,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, https://www.youtube.com/channel/UCk0aUAhu9RxfOX1iMXAJ-2g,Chemistry Kicksass,video,100,Sekundarstufe 1; Sekundarstufe 2,learner,,,de,, -https://www.youtube.com/channel/UCWNvo3l-K-X6CPSBcP9NCNg,Chemie - simpleclub,video,100,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,,,de,, \ No newline at end of file +https://www.youtube.com/channel/UCWNvo3l-K-X6CPSBcP9NCNg,Chemie - simpleclub,video,100,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,,,de,, +https://www.youtube.com/channel/UC1a400owZ_Qa-3Ood22cMKg,Ecole Science,video,460,Sekundarstufe 1; Sekundarstufe 2,teacher; learner,10,99,de,, \ No newline at end of file From 4ad57723aa75caf1d6822289b16670dacd4ddbf2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 12 Mar 2022 00:21:09 +0100 Subject: [PATCH 054/590] serlo_spider v0.2 (complete overhaul) - WIP! - since the old serlo API isn't available anymore, this crawler uses the new serlo GraphQL API -- for redundancy uses the GraphQL API and the JSON_LD on each page - crawls 8450 Items (2022-03-14) --- converter/spiders/serlo_spider.py | 449 +++++++++++++++++++++--------- 1 file changed, 321 insertions(+), 128 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 10f85421..2064b778 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -1,136 +1,329 @@ -from converter.items import * -from .base_classes import LomBase, JSONBase import json import logging -import html -import re -from converter.constants import Constants + +import requests import scrapy +from scrapy.spiders import CrawlSpider + +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader +from converter.spiders.base_classes import LomBase -# Spider to fetch API from Serlo -class SerloSpider(scrapy.Spider, LomBase, JSONBase): + +class SerloSpider(CrawlSpider, LomBase): name = "serlo_spider" - friendlyName = "Serlo" - url = "https://de.serlo.org" - version = "0.1.0" + friendlyName = "serlo_spider" + # start_urls = ["https://de.serlo.org"] + API_URL = "https://api.serlo.org/graphql" + # for the API description, please check: https://lenabi.serlo.org/metadata-api + version = "0.2" # last update: 2022-03-14 - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) + graphql_items = list() + # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: + # https://www.dublincore.org/specifications/lrmi/concept_schemes/#educational-audience-role + # https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl + EDU_AUDIENCE_ROLE_MAPPING = { + "administrator": ["manager", "counsellor"], + # A trainer or educator with administrative authority and responsibility. + "general public": "other", + # The public at large. + "mentor": "author", + # Someone who advises, trains, supports, and/or guides. + "peer tutor": ["learner", "other"], + # The peer learner serving as tutor of another learner. + "professional": "other", + # Someone already practicing a profession; an industry partner, or professional development trainer. + "student": "learner", + # "parent": "parent", # no mapping needed + # "teacher": "teacher" # no mapping needed + } - def start_requests(self): - url = self.url + "/entity/api/json/export/article" - # current dummy fallback since the Serlo API is basically down - url = "http://localhost/sources/serlo.json" - yield scrapy.Request(url=url, callback=self.parseList) - - # some fields are having xml entities (for whatever reason), we will unescape them here - def get(self, *params, response): - data = JSONBase.get(self, *params, json=response.meta["json"]) - try: - return HTMLParser().unescape(data) - except: - try: - result = [] - for p in data: - result.append(HTMLParser().unescape(p)) - return result - except: - return data - - def parseList(self, response): - data = json.loads(response.body) - for j in data: - responseCopy = response.replace(url=self.url + j["link"] + "?contentOnly") - responseCopy.meta["json"] = j - if self.hasChanged(responseCopy): - yield scrapy.Request( - url=responseCopy.url, - callback=self.parse, - meta={"json": j, "url": responseCopy.url}, - ) - - def getId(self, response=None): - return self.get("guid", response=response) - - def getHash(self, response=None): - return self.version + self.get("lastModified.date", response=response) - - def parse(self, response): - if not self.get("description", response=response): - logging.info("skipping empty entry in serlo") - return None - return LomBase.parse(self, response) - - def mapResponse(self, response): - r = LomBase.mapResponse(self, response) - text = r.load_item()["text"].split( - "Dieses Werk steht unter der freien Lizenz CC BY-SA 4.0 Information" - )[0] - r.replace_value("text", text) - return r - - def getBase(self, response): - base = LomBase.getBase(self, response) - base.add_value("lastModified", self.get("lastModified.date", response=response)) - base.add_value( - "ranking", - 0.9 - + ( - float(self.get("revisionsCount", response=response)) / 2 - + float(self.get("authorsCount", response=response)) - ) - / 50, + def __init__(self, *a, **kw): + super().__init__(*a, **kw) + self.graphql_items = self.fetch_all_graphql_pages() + # logging.debug(f"Gathered {len(self.graphql_items)} items from the GraphQL API") + + def fetch_all_graphql_pages(self): + all_entities = list() + pagination_string: str = "" + has_next_page = True + while has_next_page is True: + current_page = self.query_graphql_page(pagination_string=pagination_string)["data"]["metadata"]["entities"] + all_entities += current_page["nodes"] + has_next_page = current_page["pageInfo"]["hasNextPage"] + if has_next_page: + pagination_string = current_page["pageInfo"]["endCursor"] + else: + break + return all_entities + + def query_graphql_page(self, amount_of_nodes: int = 500, pagination_string: str = None) -> dict: + amount_of_nodes = amount_of_nodes + # specifies the amount of nodes that shall be requested (per page) from the GraphQL API + # (default: 100 // max: 500) + pagination_string = pagination_string + graphql_metadata_query_body = { + "query": f""" + query {{ + metadata {{ + entities(first: {amount_of_nodes}, after: "{pagination_string}"){{ + nodes + pageInfo {{ + hasNextPage + endCursor + }} + }} + }} + }} + """ + } + request = requests.post( + url=self.API_URL, + headers={ + "Content-Type": "application/json" + }, + json=graphql_metadata_query_body ) - return base - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response=response) - text = self.get("categories", response=response)[0].split("/")[0] - # manual mapping to Mathematik - if text == "Mathe": - text = "Mathematik" - valuespaces.add_value("discipline", text) - # for entry in ProcessValuespacePipeline.valuespaces['discipline']: - # if len(list(filter(lambda x:x['@value'].casefold() == text.casefold(), entry['label']))): - # valuespaces.add_value('discipline',entry['id']) - - primarySchool = re.compile("Klasse\s[1-4]$", re.IGNORECASE) - if len( - list(filter(lambda x: primarySchool.match(x), self.getKeywords(response))) - ): - valuespaces.add_value("educationalContext", "Grundschule") - sek1 = re.compile("Klasse\s([5-9]|10)$", re.IGNORECASE) - if len(list(filter(lambda x: sek1.match(x), self.getKeywords(response)))): - valuespaces.add_value("educationalContext", "Sekundarstufe 1") - sek2 = re.compile("Klasse\s1[1-2]", re.IGNORECASE) - if len(list(filter(lambda x: sek2.match(x), self.getKeywords(response)))): - valuespaces.add_value("educationalContext", "Sekundarstufe 2") - return valuespaces - - def getKeywords(self, response): - try: - keywords = list(self.get("keywords", response=response).values()) - except: - keywords = self.get("keywords", response=response) - for c in self.get("categories", response=response): - keywords += c.split("/") - return set(keywords) - - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response=response) - general.add_value("title", self.get("title", response=response)) - general.add_value("keyword", self.getKeywords(response)) - general.add_value("description", self.get("description", response=response)) - return general - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.add_value("location", response.url) - technical.add_value("format", "text/html") - technical.add_value("size", len(response.body)) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("url", Constants.LICENSE_CC_BY_SA_40) - return license + return request.json() + + def start_requests(self): + for graphql_item in self.graphql_items: + # logging.debug(f"{graphql_item}") + item_url = graphql_item["id"] + yield scrapy.Request(url=item_url, + callback=self.parse, + cb_kwargs={ + "graphql_item": graphql_item + } + ) + + def getId(self, response=None) -> str: + # we set this value in the parse()-method as 'sourceId' in the BaseItemLoader + pass + + def getHash(self, response=None) -> str: + # we set this value in the parse()-method as 'hash' in the BaseItemLoader + pass + + def parse(self, response, **kwargs): + graphql_json: dict = kwargs.get("graphql_item") + # logging.debug(f"GraphQL Item: {graphql_json}") + + json_ld = response.xpath('//*[@type="application/ld+json"]/text()').get() + json_ld = json.loads(json_ld) + + base = BaseItemLoader() + # # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py + # # TODO: fill "base"-keys with values for + # # - thumbnail recommended + + # The actual URL of a learning material is dynamic and can change at any given time + # (e.g. when the title gets changed by a serlo editor), therefore we use the "id"-field + # or the identifier number as a stable ID + # base.add_value('sourceId', graphql_json["id"]) # e.g.: "id": "https://serlo.org/2097" + base.add_value('sourceId', graphql_json["identifier"]["value"]) # e.g.: "value": "2097" + hash_temp: str = graphql_json["dateModified"] + self.version + base.add_value('hash', hash_temp) + base.add_value('lastModified', graphql_json["dateModified"]) + type_list: list = graphql_json["type"] + base.add_value('type', type_list) + # thumbnail_url: str = "This string should hold the thumbnail URL" + # base.add_value('thumbnail', thumbnail_url) + if "publisher" in json_ld: + base.add_value('publisher', json_ld["publisher"]) + + lom = LomBaseItemloader() + + general = LomGeneralItemloader() + # # TODO: fill LOM "general"-keys with values for + # # - keyword required + # # - coverage optional + # # - structure optional + # # - aggregationLevel optional + general.add_value('identifier', graphql_json["id"]) + title_1st_try: str = graphql_json["headline"] + # not all materials carry a title in the GraphQL API, therefore we're trying to grab a valid title from + # different sources (GraphQL > json_ld > header) + if title_1st_try is not None: + general.add_value('title', title_1st_try) + elif title_1st_try is None: + title_2nd_try = json_ld["name"] + if title_2nd_try is not None: + general.add_value('title', title_2nd_try) + if title_1st_try is None and title_2nd_try is None: + title_from_header = response.xpath('//meta[@property="og:title"]/@content').get() + if title_from_header is not None: + general.add_value('title', title_from_header) + # not all graphql entries have a description either, therefore we try to grab that from different sources + # (GraphQL > json_ld > header > first paragraph (from the DOM itself)) + if "description" in graphql_json: + description_1st_try: str = graphql_json["description"] + if description_1st_try is not None and len(description_1st_try) != 0: + general.add_value('description', description_1st_try) + elif "description" in json_ld: + # some json_ld containers don't have a description + description_2nd_try: str = json_ld["description"] + if description_2nd_try is not None and len(description_2nd_try) != 0: + general.add_value('description', description_2nd_try) + # elif len(description_1st_try) == 0 and len(description_2nd_try) == 0: + else: + description_from_header: str = response.xpath('//meta[@name="description"]/@content').get() + if description_from_header is not None and len(description_from_header) != 0: + general.add_value('description', description_from_header) + else: + description_from_first_paragraph = response.xpath('//p[@class="serlo-p"]/text()').get() + if len(description_from_first_paragraph) != 0: + general.add_value('description', description_from_first_paragraph) + in_language: list = graphql_json["inLanguage"] + general.add_value('language', in_language) + # ToDo: keywords would be extremely useful, but aren't supplied by neither the API / JSON_LD nor the header + # # once we've added all available values to the necessary keys in our LomGeneralItemLoader, + # # we call the load_item()-method to return a (now filled) LomGeneralItem to the LomBaseItemLoader + lom.add_value('general', general.load_item()) + + technical = LomTechnicalItemLoader() + # # TODO: fill "technical"-keys with values for + # # - size optional + # # - requirement optional + # # - installationRemarks optional + # # - otherPlatformRequirements optional + # # - duration optional (only applies to audiovisual content like videos/podcasts) + technical.add_value('format', 'text/html') # e.g. if the learning object is a web-page + technical.add_value('location', graphql_json["id"]) # we could also use response.url here + + lom.add_value('technical', technical.load_item()) + + lifecycle = LomLifecycleItemloader() + # # TODO: fill "lifecycle"-keys with values for + # # - role recommended + # # - firstName recommended + # # - lastName recommended + # # - uuid optional + if "publisher" in json_ld: + lifecycle.add_value('organization', "Serlo Education e. V.") + lifecycle.add_value('role', 'publisher') # supported roles: "author" / "editor" / "publisher" + # for available roles mapping, please take a look at converter/es_connector.py + lifecycle.add_value('url', json_ld["publisher"]) + lifecycle.add_value('email', "de@serlo.org") + for language_item in in_language: + if language_item == "en": + lifecycle.replace_value('email', "en@serlo.org") + lifecycle.add_value('date', graphql_json["dateCreated"]) + lom.add_value('lifecycle', lifecycle.load_item()) + + educational = LomEducationalItemLoader() + # # TODO: fill "educational"-keys with values for + # # - description recommended (= "Comments on how this learning object is to be used") + # # - interactivityType optional + # # - interactivityLevel optional + # # - semanticDensity optional + # # - typicalAgeRange optional + # # - difficulty optional + # # - typicalLearningTime optional + educational.add_value('language', in_language) + + lom.add_value('educational', educational.load_item()) + + # classification = LomClassificationItemLoader() + # # TODO: fill "classification"-keys with values for + # # - cost optional + # # - purpose optional + # # - taxonPath optional + # # - description optional + # # - keyword optional + # lom.add_value('classification', classification.load_item()) + + # # once you've filled "general", "technical", "lifecycle" and "educational" with values, + # # the LomBaseItem is loaded into the "base"-BaseItemLoader + base.add_value('lom', lom.load_item()) + + vs = ValuespaceItemLoader() + # # for possible values, either consult https://vocabs.openeduhub.de + # # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs + # # TODO: fill "valuespaces"-keys with values for + # # - conditionsOfAccess recommended + # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/conditionsOfAccess.ttl) + # # - educationalContext optional + # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/educationalContext.ttl) + # # - toolCategory optional + # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/toolCategory.ttl) + # # - accessibilitySummary optional + # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/accessibilitySummary.ttl) + # # - dataProtectionConformity optional + # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/dataProtectionConformity.ttl) + # # - fskRating optional + # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/fskRating.ttl) + + if "audience" in json_ld: + # mapping educationalAudienceRole to IntendedEndUserRole here + intended_end_user_roles = list() + for audience_item in json_ld["audience"]: + edu_audience_role = audience_item["prefLabel"]["en"] + if edu_audience_role == "professional": + vs.add_value('educationalContext', ["Further Education", "vocational education"]) + if edu_audience_role in self.EDU_AUDIENCE_ROLE_MAPPING.keys(): + edu_audience_role = self.EDU_AUDIENCE_ROLE_MAPPING.get(edu_audience_role) + intended_end_user_roles.append(edu_audience_role) + vs.add_value('intendedEndUserRole', intended_end_user_roles) + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) + + if "about" in json_ld and len(json_ld["about"]) != 0: + # not every json_ld-container has an "about"-key, e.g.: https://de.serlo.org/5343/5343 + # we need to make sure that we only try to access "about" if it's actually available + # making sure that we only try to look for a discipline if the "about"-list actually has list items + disciplines = list() + for list_item in json_ld["about"]: + if "de" in list_item["prefLabel"]: + discipline_de: str = list_item["prefLabel"]["de"] + disciplines.append(discipline_de) + elif "en" in list_item["prefLabel"]: + discipline_en: str = list_item["prefLabel"]["en"] + disciplines.append(discipline_en) + if len(disciplines) > 0: + vs.add_value('discipline', disciplines) + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) + # if the json_ld doesn't hold a discipline value for us, we'll try to grab the discipline from the url path + else: + if "/mathe/" in response.url: + vs.add_value('discipline', "Mathematik") + if "/biologie/" in response.url: + vs.add_value('discipline', "Biologie") + if "/chemie/" in response.url: + vs.add_value('discipline', "Chemie") + if "/nachhaltigkeit/" in response.url: + vs.add_value('discipline', "Nachhaltigkeit") + if "/informatik/" in response.url: + vs.add_value('discipline', "Informatik") + vs.add_value('containsAdvertisement', 'No') + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/containsAdvertisement.ttl) + # serlo doesn't want to distract learners with ads, therefore we can set it by default to 'no' + if graphql_json["isAccessibleForFree"] is True: + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/price.ttl) + vs.add_value('price', 'no') + elif graphql_json["isAccessibleForFree"] is False: + # only set the price to "kostenpflichtig" if it's explicitly stated, otherwise we'll leave it empty + vs.add_value('price', 'yes') + if graphql_json["learningResourceType"] is not None: + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) + vs.add_value('learningResourceType', graphql_json["learningResourceType"]) + vs.add_value('sourceContentType', "Lernportal") + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/sourceContentType.ttl) + + base.add_value('valuespaces', vs.load_item()) + + lic = LicenseItemLoader() + # # TODO: fill "license"-keys with values for + # # - author recommended + # # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) + license_url = graphql_json["license"]["id"] + if license_url is not None: + lic.add_value('url', license_url) + base.add_value('license', lic.load_item()) + + permissions = super().getPermissions(response) + base.add_value('permissions', permissions.load_item()) + + response_loader = super().mapResponse(response) + base.add_value('response', response_loader.load_item()) + + yield base.load_item() From 5b0ed97f2763ecbb8057dbec53cc5432d1693f21 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Apr 2022 13:34:50 +0200 Subject: [PATCH 055/590] JSON output shows more fields by default - enabled "hash", "lastModified", "valuespaces" and "type" in the JSONStorePipeline --- converter/pipelines.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index e5b6da26..fb5f1261 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -439,14 +439,15 @@ def open_spider(self, spider): file, fields_to_export=[ "sourceId", + "hash", + "lastModified", + "type", "lom", - # "valuespaces", + "valuespaces", "license", - # "type", # "origin", # "fulltext", # "ranking", - # "lastModified", # "thumbnail", ], encoding='utf-8', From ec4d758e8e35186a2288a15921f71cbb03e8ef8a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Apr 2022 20:39:22 +0200 Subject: [PATCH 056/590] niedersachsen_abi_spider.py v0.0.3 (squashed): - add: new_lrt -- change: new_lrt uses uuid - fix: DirectoryScanner overlooked "Lehrer"-subfolders -- using glob in recursive mode should be more efficient than the previous method - fix: keyword-mapping for "Lehrer"-files -- extends the learning object title by "Erwartungshorizont / Bewertungsbogen" if a .pdf for teachers is detected - fix: keyword-mapper to detect "A1", "A2" etc. as assignment parts - minor code cleanup --- converter/spiders/niedersachsen_abi_spider.py | 30 ++++++++++++------- .../lower_saxony_abi/directory_routine.py | 15 +++++----- .../lower_saxony_abi/keyword_mapper.py | 15 +++++++--- 3 files changed, 39 insertions(+), 21 deletions(-) diff --git a/converter/spiders/niedersachsen_abi_spider.py b/converter/spiders/niedersachsen_abi_spider.py index de895dd4..79459910 100644 --- a/converter/spiders/niedersachsen_abi_spider.py +++ b/converter/spiders/niedersachsen_abi_spider.py @@ -19,7 +19,7 @@ class NiedersachsenAbiSpider(scrapy.Spider, LomBase): allowed_domains = ['za-aufgaben.nibis.de'] start_urls = ['https://za-aufgaben.nibis.de'] - version = "0.0.2" + version = "0.0.3" # last update: 2022-04-12 # Default values for the 2 expected parameters. Parameter "filename" is always required, "skip_unzip" is optional. filename = None skip_unzip = False @@ -31,9 +31,6 @@ class NiedersachsenAbiSpider(scrapy.Spider, LomBase): # -a skip_unzip="yes" # Make sure that there is a corresponding .zip file inside the /zip_download/-folder in the project root - # def start_requests(self): - # yield self.parse(None) - def __init__(self, **kwargs): super().__init__(**kwargs) # logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s') @@ -70,13 +67,13 @@ def __init__(self, **kwargs): logging.debug(un_zipper.zip_files_already_extracted) # always scan the /zip_extract/-directory for pdfs and try to extract metadata - print( + logging.debug( f"Analyzing file paths for '.pdf'-files inside " f"{directory_paths.path_storage.path_to_extraction_directory}") pdfs_in_directory: dict = \ DirectoryScanner.scan_directory_for_pdfs(directory_paths.path_storage.path_to_extraction_directory) # logging.debug(pp.pformat(pdfs_in_directory)) - print(f"Total .pdf items in the above mentioned directory: {len(pdfs_in_directory.keys())}") + logging.debug(f"Total .pdf items in the above mentioned directory: {len(pdfs_in_directory.keys())}") if len(pdfs_in_directory.keys()) == 0: raise Exception(f"No .pdf files found inside {directory_paths.path_storage.path_to_extraction_directory}. " f"Please make sure that you've run the crawler with '-a filename=' " @@ -93,8 +90,6 @@ def getHash(self, response=None) -> str: pass def parse(self, response, **kwargs): - # print(f"filename = {self.filename}") - # print(f"skip_unzip = {self.skip_unzip}") logging.debug(f"The .pdf (general) dictionary has {len(self.pdf_dictionary_general.keys())} files") logging.debug(f"The dictionary for additional .pdf files has " f"{len(self.pdf_dictionary_additional.keys())} entries") @@ -102,7 +97,6 @@ def parse(self, response, **kwargs): # first we're scraping all the .pdf files that follow the more general RegEx syntax for pdf_item in self.pdf_dictionary_general: current_dict: dict = self.pdf_dictionary_general.get(pdf_item) - # pprint.pprint(current_dict) base = BaseItemLoader() base.add_value('sourceId', pdf_item) hash_temp = str(f"{datetime.now().isoformat()}{self.version}") @@ -134,10 +128,19 @@ def parse(self, response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() + # all files are considered "Abituraufgaben" + vs.add_value('new_lrt', "9cf3c183-f37c-4b6b-8beb-65f530595dff") + # "Klausur, Klassenarbeit und Test" if current_dict.get('discipline') is not None: vs.add_value('discipline', current_dict.get('discipline')) if current_dict.get('intendedEndUserRole') is not None: vs.add_value('intendedEndUserRole', current_dict.get('intendedEndUserRole')) + if current_dict.get("intendedEndUserRole") == "teacher": + # filenames that are ending with "L" or "Lehrer" are always + # "Erwartungshorizont/Bewertungsmuster"-type of pdfs, therefore we can derive the new_lrt from + # the filename + vs.add_value('new_lrt', "7c236821-bfae-4eeb-bc79-590bf8ea1d96") + # "Lösungs(beispiel) und Erwartungshorizont" base.add_value('valuespaces', vs.load_item()) lic = LicenseItemLoader() @@ -154,7 +157,6 @@ def parse(self, response, **kwargs): # Making sure that we also grab the additional .pdf files that don't follow the general filename syntax for pdf_item in self.pdf_dictionary_additional: current_dict: dict = self.pdf_dictionary_additional.get(pdf_item) - # pprint.pprint(current_dict) base = BaseItemLoader() base.add_value('sourceId', pdf_item) hash_temp = str(f"{datetime.now().isoformat()}{self.version}") @@ -187,6 +189,14 @@ def parse(self, response, **kwargs): vs = ValuespaceItemLoader() if current_dict.get('discipline') is not None: vs.add_value('discipline', current_dict.get('discipline')) + # all files are considered "Abituraufgaben": + vs.add_value('new_lrt', "9cf3c183-f37c-4b6b-8beb-65f530595dff") + # "Klausur, Klassenarbeit und Test" + if current_dict.get("intendedEndUserRole") == "teacher": + # filenames that are ending with "L" or "Lehrer" are always "Erwartungshorizont/Bewertungsmuster"-type + # of pdfs, therefore we can derive the new_lrt from the filename + vs.add_value('new_lrt', "7c236821-bfae-4eeb-bc79-590bf8ea1d96") + # "Lösungs(beispiel) und Erwartungshorizont" base.add_value('valuespaces', vs.load_item()) lic = LicenseItemLoader() diff --git a/converter/spiders/scripts/lower_saxony_abi/directory_routine.py b/converter/spiders/scripts/lower_saxony_abi/directory_routine.py index cd869528..e169edc0 100644 --- a/converter/spiders/scripts/lower_saxony_abi/directory_routine.py +++ b/converter/spiders/scripts/lower_saxony_abi/directory_routine.py @@ -1,5 +1,7 @@ +import glob import logging import os +import pathlib import pprint import zipfile from dataclasses import dataclass @@ -235,12 +237,11 @@ def scan_directory_for_pdfs(target_directory): dict() = { filename : directory } """ directory_to_scan = target_directory - pdf_list = set() pdf_dictionary_temp = dict() - for folder_name, sub_folders, filenames in os.walk(directory_to_scan): - for _ in sub_folders: - for filename in filenames: - if filename.endswith('.pdf') and filename not in pdf_list: - pdf_list.add(filename) - pdf_dictionary_temp.update({filename: folder_name}) + pdf_iterator = glob.iglob(f"{directory_to_scan}/**/*.pdf", recursive=True) + for pdf_item in pdf_iterator: + pdf_pure_path = pathlib.PurePath(pdf_item) + pdf_name = pdf_pure_path.name + pdf_directory = str(pdf_pure_path.parent) + pdf_dictionary_temp.update({pdf_name: pdf_directory}) return pdf_dictionary_temp diff --git a/converter/spiders/scripts/lower_saxony_abi/keyword_mapper.py b/converter/spiders/scripts/lower_saxony_abi/keyword_mapper.py index 1ca04752..48c00c79 100644 --- a/converter/spiders/scripts/lower_saxony_abi/keyword_mapper.py +++ b/converter/spiders/scripts/lower_saxony_abi/keyword_mapper.py @@ -56,6 +56,8 @@ class LoSaxKeywordMapper: 'GA': 'Kurs auf grundlegendem Anforderungsniveau (gA)', 'HV': 'Hörverständnis', 'ME': 'Material', # for students or teachers + 'L': 'Erwartungshorizont / Bewertungsbogen (Lehrer)', + 'Lehrer': 'Erwartungshorizont / Bewertungsbogen (Lehrer)', 'mitExp': 'mit Experimentieren', 'ohneExp': 'ohne Experimentieren', 'mitExpElektrik': 'mit Experimentieren - Elektrik', @@ -120,9 +122,9 @@ def extract_pdf_metadata(self, pdf_dictionary): # Allgemein / LinAlg / analytische Geometrie / Stochastik r'(?PAnlagen|AnlagenTSP|TS|TS\d{4})?' # TSP bzw. TS = Thematische Schwerpunkte / Themenschwerpunkte - r'(?PAufg\d)?' - r'(?PLehrer)?' - r'(.pdf)') + r'(?PAufg\d|A\d)?' + r'(?PLehrer|L)?' + r'(\.pdf)') if regex_general.search(pdf_item) is not None: regex_result_dict = regex_general.search(pdf_item).groupdict() @@ -153,8 +155,13 @@ def extract_pdf_metadata(self, pdf_dictionary): for potential_keyword in only_valid_values: if potential_keyword in self.keyword_mapping: potential_keyword = self.keyword_mapping.get(potential_keyword) - if potential_keyword.startswith('Aufg'): + assignment_regex = re.compile(r"Aufg\d") + if assignment_regex.search(potential_keyword) is not None: potential_keyword = potential_keyword.replace('Aufg', 'Aufgabe ') + assignment_regex = re.compile(r"A\d") + if assignment_regex.search(potential_keyword) is not None: + # matches "A1", "A2" etc. to find "Aufgabe"-acronyms + potential_keyword = potential_keyword.replace('A', 'Aufgabe ') keywords_cleaned_and_mapped.append(potential_keyword) logging.debug(self.pp.pformat(keywords_cleaned_and_mapped)) From f0c4c715a7515af9f8d51eb1a6e9319599349296 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 13 Apr 2022 12:05:03 +0200 Subject: [PATCH 057/590] add: "new_lrt" to mediawiki_base (squashed) - adds "Wiki (dynamisch)" as a static value to all objects crawled from a Mediawiki -- change: new_lrt uses uuid - remove: new_lrt implementation within the zum_klexikon_spider - optimize: imports --- .../spiders/base_classes/mediawiki_base.py | 6 ++---- converter/spiders/zum_klexikon.py | 19 +++---------------- 2 files changed, 5 insertions(+), 20 deletions(-) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index 264275ec..ea0d9baf 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -9,7 +9,6 @@ import requests import scrapy -from converter.constants import Constants from converter.items import BaseItemLoader, LomGeneralItemloader, LomTechnicalItemLoader, LicenseItemLoader from converter.spiders.base_classes.meta_base import SpiderBase from .lom_base import LomBase @@ -155,7 +154,7 @@ def start_requests(self): yield self.query_for_pages() - def query_for_pages(self, continue_token: dict[str,str] = None): + def query_for_pages(self, continue_token: dict[str, str] = None): params = self._query_params if continue_token is None: continue_token = {} @@ -251,6 +250,5 @@ def getValuespaces(self, response): loader.add_value("discipline", categories) loader.add_value("educationalContext", categories) loader.add_value("intendedEndUserRole", categories) + loader.add_value("new_lrt", "6b9748e4-fb3b-4082-ae08-c7a11c717256") # "Wiki (dynamisch)" return loader - - diff --git a/converter/spiders/zum_klexikon.py b/converter/spiders/zum_klexikon.py index bb215787..6b0fc30c 100644 --- a/converter/spiders/zum_klexikon.py +++ b/converter/spiders/zum_klexikon.py @@ -5,7 +5,7 @@ import w3lib.html from scrapy import Selector -from converter.items import LomTechnicalItem, LicenseItem, LomGeneralItemloader, ValuespaceItemLoader +from converter.items import LomTechnicalItem, LicenseItem, LomGeneralItemloader, ValuespaceItemLoader, ValuespaceItem from .base_classes.mediawiki_base import MediaWikiBase, jmes_pageids, jmes_title, jmes_links, jmes_continue from ..constants import Constants @@ -90,23 +90,10 @@ def getLOMGeneral(self, response=None) -> LomGeneralItemloader: general.add_value('description', first_paragraph) return general - def getValuespaces(self, response) -> ValuespaceItemLoader: + def valuespace_item(self, response) -> ValuespaceItem: """ Scrapy Contracts: @url https://klexikon.zum.de/api.php?format=json&action=parse&pageid=10031&prop=text|langlinks|categories|links|templates|images|externallinks|sections|revid|displaytitle|iwlinks|properties """ response.meta['item'] = json.loads(response.body) - vs = ValuespaceItemLoader() - data = response.meta['item'] - # this jmespath expression doesn't distinguish between values for - # 'discipline', 'educationalContext' or 'intendedEndUserRole' - # it tries to fit the values into each metadata-field - # and the non-fitting values get dropped by the valuespaces pipeline - jmes_categories = jmespath.compile('parse.categories[]."*"') - categories = jmes_categories.search(data) # ['Ethik', 'Sekundarstufe_1'] - if categories is not None: - vs.add_value("discipline", categories) - vs.add_value("educationalContext", categories) - vs.add_value("intendedEndUserRole", categories) - vs.add_value("new_lrt", "Wiki (dynamisch)") - return vs + return self.getValuespaces(response).load_item() From ac48a60f65d62a58786e2ef34a7b8ab5e45ed6d0 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 19 Apr 2022 14:27:23 +0200 Subject: [PATCH 058/590] fix:map new_lrt to edu-sharing --- converter/es_connector.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index c4a1f530..7e278bd1 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -346,6 +346,7 @@ def transformItem(self, uuid, spider, item): "intendedEndUserRole": "ccm:educationalintendedenduserrole", "educationalContext": "ccm:educationalcontext", "learningResourceType": "ccm:educationallearningresourcetype", + "new_lrt": "ccm:oeh_lrt", "sourceContentType": "ccm:sourceContentType", "toolCategory": "ccm:toolCategory", "conditionsOfAccess": "ccm:conditionsOfAccess", From 9e3d42bd993e9224bb238867ed8bd80e1b193163 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 21 Apr 2022 17:14:35 +0200 Subject: [PATCH 059/590] fix:support trees for vocabs, add new_lrt as a vocab --- valuespace_converter/app/valuespaces.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/valuespace_converter/app/valuespaces.py b/valuespace_converter/app/valuespaces.py index 359dd25f..b92e1e4c 100644 --- a/valuespace_converter/app/valuespaces.py +++ b/valuespace_converter/app/valuespaces.py @@ -5,7 +5,7 @@ class Valuespaces: idsVocabs = ['intendedEndUserRole', 'discipline', 'educationalContext', 'learningResourceType', - 'sourceContentType', 'toolCategory', 'conditionsOfAccess', 'oer'] + 'sourceContentType', 'toolCategory', 'conditionsOfAccess', 'oer', 'new_lrt'] idsW3ID = ['containsAdvertisement', 'price', 'accessibilitySummary', 'dataProtectionConformity', 'fskRating'] data = {} def __init__(self): @@ -17,10 +17,17 @@ def __init__(self): for url in urls: #try: r = requests.get(url['url']) - self.data[url['key']] = r.json()['hasTopConcept'] + self.data[url['key']] = self.flatten(r.json()['hasTopConcept']) #except: # self.valuespaces[v] = {} + def flatten(self, tree: []): + result = tree + for leaf in tree: + if 'narrower' in leaf: + result.extend(self.flatten(leaf['narrower'])) + return result + @staticmethod def findKey(valuespaceId: string, id: string, valuespace = None): if not valuespace: From d0c8cbc02a4d64a71401fce2144f6b6a15851da3 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Apr 2022 18:18:16 +0200 Subject: [PATCH 060/590] fix: disable learningResourceType since new_lrt is already set --- converter/spiders/grundschulkoenig_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index ed49e1b0..2802eea1 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -162,7 +162,7 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry vs.add_value('discipline', 'Allgemein') vs.add_value('educationalContext', 'Primarstufe') vs.add_value('sourceContentType', "Unterrichtsmaterial- und Aufgaben-Sammlung") - vs.add_value('learningResourceType', 'other_asset_type') + # vs.add_value('learningResourceType', 'other_asset_type') # ToDo: new_lrt if "/vorschule/" in response.url: vs.add_value('educationalContext', "Elementarbereich") From f397f8cafa51a3d97ae18194c988f05cd18ab8f7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sun, 24 Apr 2022 19:27:47 +0200 Subject: [PATCH 061/590] change: don't use memucho usernames for the author field --- converter/spiders/memucho_spider.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/converter/spiders/memucho_spider.py b/converter/spiders/memucho_spider.py index bc040de3..47026634 100644 --- a/converter/spiders/memucho_spider.py +++ b/converter/spiders/memucho_spider.py @@ -132,11 +132,11 @@ def getLOMTechnical(self, response): def getLicense(self, response): license_loader = LomBase.getLicense(self, response) license_loader.add_value("url", Constants.LICENSE_CC_BY_40) - author = response.meta["item"].get("Author") + # author = response.meta["item"].get("Author") # the author information comes straight from the memucho API and consists of memucho usernames # ToDo: confirm if memucho usernames are desired values for author information - if author is not None: - license_loader.add_value("author", author) + # if author is not None: + # license_loader.add_value("author", author) return license_loader def getValuespaces(self, response): From 2dfc476da31ce1135e2a3db3b7c53854f6d0cf7d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 11 Apr 2022 17:39:22 +0200 Subject: [PATCH 062/590] requirements.txt update for scrapy v2.6.1 - Scrapy v2.6.1 officially supports Python 3.10 - a lot of libraries and frameworks received significant (security) updates since the last time we updated the dependencies --- requirements.txt | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/requirements.txt b/requirements.txt index bb114a1c..7947e3f9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,26 +1,26 @@ -wheel +wheel==0.37.1 image -dateparser==1.0.0 -isodate==0.6.0 -pyppeteer==0.2.6 +dateparser==1.1.1 +isodate==0.6.1 +pyppeteer==1.0.2 html2text~=2020.1.16 -scrapy-splash -python-dateutil~=2.8.1 -python-dotenv==0.13.0 -Scrapy==2.4.1 -requests==2.23.0 +scrapy-splash==0.8.0 +python-dateutil==2.8.2 +python-dotenv==0.20.0 +Scrapy==2.6.1 +requests==2.27.1 vobject==0.9.6.1 xmltodict~=0.12.0 overrides==3.1.0 -jmespath~=0.10.0 -flake8==3.9.1 -pytest==6.2.3 -extruct~=0.12.0 +jmespath==1.0.0 +flake8==4.0.1 +pytest==7.1.1 +extruct~=0.13.0 lxml~=4.6.3 w3lib~=1.22.0 itemloaders~=1.0.4 -Pillow~=8.2.0 -itemadapter~=0.2.0 -six~=1.15.0 -certifi~=2020.12.5 -urllib3~=1.25.11 \ No newline at end of file +Pillow==9.1.0 +itemadapter==0.5.0 +six==1.16.0 +certifi==2021.10.8 +urllib3~=1.26.09 \ No newline at end of file From 221069774055f23bbe12df0a1d233a70bb117e40 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Apr 2022 11:11:27 +0200 Subject: [PATCH 063/590] zum_dwu_spider v0.0.2 - fix: skip title.strip() if the crawled page doesn't have a valid title --- converter/spiders/zum_dwu_spider.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/converter/spiders/zum_dwu_spider.py b/converter/spiders/zum_dwu_spider.py index f2e89561..502713d7 100644 --- a/converter/spiders/zum_dwu_spider.py +++ b/converter/spiders/zum_dwu_spider.py @@ -19,7 +19,7 @@ class ZumDwuSpider(CrawlSpider, LomBase): "http://www.zum.de/dwu/umamtg.htm", # Mathematik-Teilgebiete "http://www.zum.de/dwu/umaptg.htm" # Physik-Teilgebiete ] - version = "0.0.1" + version = "0.0.2" # last update: 2022-04-21 parsed_urls = set() # holds the already parsed urls to minimize the amount of duplicate requests debug_xls_set = set() # The author used a HTML suite for building the .htm documents (Hot Potatoes by Half-Baked Software) @@ -124,7 +124,8 @@ def parse(self, response: scrapy.http.Response, **kwargs): # therefore we need to grab the title from a better suited element. # This also means that the "description" is most probably wrong and needs a replacement as well: title = response.xpath('//td[@class="tt1math"]/text()').get() - title = title.strip() + if title is not None: + title = title.strip() # desc_list = response.xpath('/html/body/table[2]/tr/td/table/tr[1]/td[1]/text()').getall() desc_list = response.xpath('//td[@class="t1fbs"]/text()').getall() if desc_list is not None and len(desc_list) == 0: From 58d71a3f8e366b18c7daa068031e98703f2f149a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 25 Apr 2022 00:16:50 +0200 Subject: [PATCH 064/590] add: playwright integration into web_tools - uses the same docker container as the previous pyppeteer-implementation - currently uses PLAYWRIGHT_WS_ENDPOINT variable in the .env file -- make sure to update your .env file accordingly! --- converter/.env.example | 4 +++- converter/web_tools.py | 26 +++++++++++++++++++++++++- requirements.txt | 3 ++- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/converter/.env.example b/converter/.env.example index ebd91381..6d62e1a2 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -17,7 +17,9 @@ SPLASH_URL = "http://localhost:8050" # PYPPETEER Integration settings, as needed for the local container (as used in kmap_spider.py) # for more information, see: https://github.com/pyppeteer/pyppeteer -PYPPETEER_WS_ENDPOINT = "ws://localhost:3000" +PYPPETEER_WS_ENDPOINT="ws://localhost:3000" +# Playwright Integration, as needed for the local container (https://hub.docker.com/r/browserless/chrome#playwright) +PLAYWRIGHT_WS_ENDPOINT="ws://localhost:3000" # Edu-Sharing instance that the crawlers should upload to EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" diff --git a/converter/web_tools.py b/converter/web_tools.py index 95b6a98f..f1c65a52 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -5,6 +5,7 @@ import html2text import pyppeteer import requests +from playwright.async_api import async_playwright from scrapy.utils.project import get_project_settings from converter import env @@ -13,8 +14,10 @@ class WebEngine(Enum): # Splash (default engine) Splash = 'splash', - # Pyppeteer is controlling a headless chrome + # Pyppeteer is controlling a headless Chrome browser Pyppeteer = 'pyppeteer' + # Playwright is controlling a headless Chrome browser + Playwright = 'playwright' class WebTools: @@ -24,6 +27,8 @@ def getUrlData(url: str, engine=WebEngine.Splash): return WebTools.__getUrlDataSplash(url) elif engine == WebEngine.Pyppeteer: return WebTools.__getUrlDataPyppeteer(url) + elif engine == WebEngine.Playwright: + return WebTools.__getUrlDataPlaywright(url) raise Exception("Invalid engine") @@ -33,6 +38,11 @@ def __getUrlDataPyppeteer(url: str): html = asyncio.run(WebTools.fetchDataPyppeteer(url)) return {"html": html, "text": WebTools.html2Text(html), "cookies": None, "har": None} + @staticmethod + def __getUrlDataPlaywright(url: str): + html = asyncio.run(WebTools.fetchDataPlaywright(url)) + return {"html": html, "text": WebTools.html2Text(html), "cookies": None, "har": None} + @staticmethod def __getUrlDataSplash(url: str): settings = get_project_settings() @@ -76,6 +86,20 @@ async def fetchDataPyppeteer(url: str): # await page.close() return content + @staticmethod + async def fetchDataPlaywright(url: str): + # relevant docs for this implementation: https://hub.docker.com/r/browserless/chrome#playwright and + # https://playwright.dev/python/docs/api/class-browsertype#browser-type-connect-over-cdp + async with async_playwright() as p: + browser = await p.chromium.connect_over_cdp(endpoint_url=env.get("PLAYWRIGHT_WS_ENDPOINT")) + page = await browser.new_page() + await page.goto(url, wait_until="networkidle", timeout=90000) + # waits for page to fully load (= no network traffic for 500ms), + # maximum timeout: 90s + content = await page.content() + # await page.close() + return content + @staticmethod def html2Text(html: str): h = html2text.HTML2Text() diff --git a/requirements.txt b/requirements.txt index 7947e3f9..9ebc65b0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,4 +23,5 @@ Pillow==9.1.0 itemadapter==0.5.0 six==1.16.0 certifi==2021.10.8 -urllib3~=1.26.09 \ No newline at end of file +urllib3~=1.26.09 +playwright==1.21.0 \ No newline at end of file From cc6c79f5e647b4728c8eabad4ececdfef158caf8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 25 Apr 2022 01:21:26 +0200 Subject: [PATCH 065/590] materialnetzwerk_spider v0.0.6 (squashed) - change: using playwright instead of pyppeteer for more consistent crawl results -- now properly gathers all 83 item bundles instead of only ~40% due to how the source handles lazy loads - add: new_lrt - add: competency description, educationalLevel - rework: metadata fields from the bundle_overview were gathered too inconsistently -- using better XPath selectors now, optional metadata fields are gathered more reliably - optimize imports, add: custom settings --- converter/spiders/materialnetzwerk_spider.py | 123 ++++++++++++------- 1 file changed, 81 insertions(+), 42 deletions(-) diff --git a/converter/spiders/materialnetzwerk_spider.py b/converter/spiders/materialnetzwerk_spider.py index be1f0ff5..049c2fe1 100644 --- a/converter/spiders/materialnetzwerk_spider.py +++ b/converter/spiders/materialnetzwerk_spider.py @@ -1,5 +1,4 @@ import json -import logging from typing import Optional import scrapy.http @@ -7,16 +6,16 @@ from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ + LomClassificationItemLoader from converter.spiders.base_classes import LomBase -from converter.valuespace_helper import ValuespaceHelper from converter.web_tools import WebTools, WebEngine class MaterialNetzwerkSpider(CrawlSpider, LomBase): name = "materialnetzwerk_spider" friendlyName = "Materialnetzwerk.org" - version = "0.0.6" # last update: 2022-04-14 + version = "0.0.6" # last update: 2022-04-24 start_urls = [ # 'https://editor.mnweg.org/?p=1&materialType=bundle', # this doesn't list any materials since they're loaded dynamically @@ -28,7 +27,15 @@ class MaterialNetzwerkSpider(CrawlSpider, LomBase): # inward-facing ] custom_settings = { - 'ROBOTSTXT_OBEY': False + 'CONCURRENT_REQUESTS': 32, + 'CONCURRENT_REQUESTS_PER_DOMAIN': 12, + # 'AUTOTHROTTLE_ENABLED': True, + # 'AUTOTHROTTLE_DEBUG': True, + # 'AUTOTHROTTLE_START_DELAY': 0.25, + # 'AUTOTHROTTLE_MAX_DELAY': 5, + # 'AUTOTHROTTLE_TARGET_CONCURRENCY': 2, + 'RETRY_TIMES_3': 3, + 'RETRY_PRIORITY_ADJUST': 1, } discipline_mapping = { 'AES': "Ernährung und Hauswirtschaft", # Ernährung und Hauswirtschaft @@ -74,10 +81,6 @@ def parse_start_url(self, response: scrapy.http.Response, **kwargs): bundle_urls.append(current_url) yield scrapy.Request(url=current_url, callback=self.parse_bundle_overview) - # for debugging only, to check if the urls are valid (and which urls were gathered) - # bundle_urls.sort() - # print(bundle_urls) - def parse_bundle_overview(self, response: scrapy.http.Response): """ @@ -89,32 +92,50 @@ def parse_bundle_overview(self, response: scrapy.http.Response): :return: yields a scrapy.Request for the first worksheet """ + bundle_dict = dict() + bundle_dict["bundle_url"] = response.url # render the web page to execute js and copy to the response - body = WebTools.getUrlData(response.url, WebEngine.Pyppeteer) + body = WebTools.getUrlData(response.url, WebEngine.Playwright) response = response.replace(body=body['html']) # a typical bundle_overview looks like this: https://editor.mnweg.org/mnw/sammlung/das-menschliche-skelett-m-78 # there's minimal metadata to be found, but we can grab the descriptions of each worksheet and use the # accumulated strings as our description for the bundle page - bundle_title = response.xpath('//*/div[@class="l-container content"]/h2/text()').get() - bundle_description = response.xpath('/html/head/meta[@property="description"]/@content').get() + bundle_title = response.xpath('//*[@class="l-container content"]/header/h2/text()').get() + if bundle_title is None: + # if we can't get the (clean) title, we need to grab the title from the header and clean it up manually + bundle_title: str = response.xpath('//head/meta[@property="og:title"]/@content').get() + if bundle_title.endswith(" — mnweg.org"): + bundle_title = bundle_title.replace("— mnweg.org", "").strip() + bundle_dict["bundle_title"] = bundle_title + bundle_dict["bundle_description"] = response.xpath('//head/meta[@property="description"]/@content').get() # div class tutoryMark holds the same content as the description in the header # bundle_tutory_mark = response.xpath('//div[@class="tutoryMark"]/text()').getall() - meta_values_fach = response.xpath('//dl[@class="metaValues"]/dt[1]/text()').get() - bundle_discipline = str() - education_level = list() - if meta_values_fach == "Fach": - meta_values_fach_value = response.xpath('//dl[@class="metaValues"]/dd[1]/text()').get() + # there are some basic metadata values in a "metaValues" container, keys differ from topic to topic + # keys could be: "Fach", "Kompetenzbereich", "Phase" or "Niveaustufe" + mv_keys = response.xpath('//dl[@class="metaValues"]/dt/text()').getall() + mv_values = response.xpath('//dl[@class="metaValues"]/dd/text()').getall() + meta_values_dict = dict(zip(mv_keys, mv_values)) + + if "Fach" in meta_values_dict: + meta_values_fach_value = meta_values_dict.get("Fach") if meta_values_fach_value is not None: # self.debug_disciplines.add(meta_values_fach_value) - bundle_discipline = meta_values_fach_value + bundle_dict["bundle_discipline"] = meta_values_fach_value # "phase" is their term for "Klassenstufe" - meta_values_phase = response.xpath('//dl[@class="metaValues"]/dt[2]/text()').get() - if meta_values_phase == "Phase": - meta_values_phase_value = response.xpath('//dl[@class="metaValues"]/dd[2]/text()').get() + if "Phase" in meta_values_dict: + meta_values_phase_value = meta_values_dict.get("Phase") edu_level_temp = meta_values_phase_value.replace(" ", "") # stripping empty spaces between the comma - education_level = edu_level_temp.split(',') # these values will be used for educationLevel + educational_level = edu_level_temp.split(',') # these values will be used for educationLevel + bundle_dict["bundle_educational_level"] = educational_level + if "Kompetenzbereich" in meta_values_dict: + meta_values_competency_value = meta_values_dict.get("Kompetenzbereich") + bundle_dict["bundle_competency"] = meta_values_competency_value + if "Niveaustufe" in meta_values_dict: + meta_values_niveau = meta_values_dict.get("Niveaustufe") + bundle_dict["bundle_niveau"] = meta_values_niveau + # materialnetzwerk lists 3 "Niveaustufen": M, R, E # meta_values_niveaustufe = response.xpath('//dl[@class="metaValues"]/dt[3]/text()').get() # if meta_values_niveaustufe == "Niveaustufe": @@ -135,6 +156,7 @@ def parse_bundle_overview(self, response: scrapy.http.Response): # worksheet_url = worksheet.xpath('@href').get() # print(worksheet_descriptions) worksheet_description_string: str = ''.join(worksheet_descriptions) + bundle_dict["worksheet_description_summary"] = worksheet_description_string # debug output to check if there are new disciplines that still need to be mapped: # debug_disciplines_sorted = list(self.debug_disciplines) @@ -143,39 +165,41 @@ def parse_bundle_overview(self, response: scrapy.http.Response): # There are two "application/ld+json"-scripts on the website -> XPath: /html/body/script[1] # one is of @type Organization, the other of @type LocalBusiness # ld_json_string = response.xpath('/html/body/script[@type="application/ld+json"]/text()').get().strip() - ld_json_organization = dict() - ld_json_local_business = dict() + for ld_json_block in response.xpath('/html/body/script[@type="application/ld+json"]/text()'): ld_json_string = ld_json_block.get().strip() ld_json_temp = json.loads(ld_json_string) if ld_json_temp.get("@type") == "Organization": ld_json_organization = json.loads(ld_json_string) + bundle_dict["bundle_ld_json_organization"] = ld_json_organization elif ld_json_temp.get("@type") == "LocalBusiness": ld_json_local_business = json.loads(ld_json_string) + bundle_dict["bundle_ld_json_local_business"] = ld_json_local_business # the publication date is only available on the individual worksheet page, but it seems like the individual # pages of a bundle are all carrying the same date, therefore it should be enough to only parse the first # worksheet (and reduce load on the website) first_worksheet_url = response.xpath('//a[@class="worksheet"]/@href').get() first_worksheet_thumbnail = response.xpath('/html/body/main/div/ul/a[1]/div[1]/img/@data-src').get() + bundle_dict["bundle_thumbnail"] = first_worksheet_thumbnail # there isn't a lot of metadata available on the bundle overview page, but we still need to carry it over to # the parse method since that's where the BaseItemLoader is built - bundle_dict = { - 'bundle_title': bundle_title, - 'bundle_description': bundle_description, - 'bundle_url': response.url, - 'worksheet_description_summary': worksheet_description_string, - 'bundle_discipline': bundle_discipline, - 'bundle_education_level': education_level, - 'bundle_ld_json_organization': ld_json_organization, - 'bundle_ld_json_local_business': ld_json_local_business, - 'bundle_thumbnail': first_worksheet_thumbnail - } + + # bundle_dict contains the following keys: + # - bundle_description + # - bundle_competency (optional) + # - bundle_discipline (optional) + # - bundle_educational_level (optional) + # - bundle_ld_json_organization + # - bundle_ld_json_local_business + # - bundle_niveau (optional, currently unmapped in edu-sharing) + # - bundle_thumbnail + # - bundle_title + # - bundle_url + # - worksheet_description_summary + if first_worksheet_url is not None: - logging.debug(first_worksheet_url) yield scrapy.Request(url=first_worksheet_url, callback=self.parse, cb_kwargs=bundle_dict) - # print(debug_disciplines_sorted) - pass def parse(self, response: scrapy.http.Response, **kwargs): """ @@ -244,11 +268,20 @@ def parse(self, response: scrapy.http.Response, **kwargs): lifecycle.add_value('date', date_published) lom.add_value('lifecycle', lifecycle.load_item()) + classification = LomClassificationItemLoader() + competency_description = kwargs.get("bundle_competency") + if competency_description is not None: + classification.add_value('description', competency_description) + lom.add_value('classification', classification.load_item()) + educational = LomEducationalItemLoader() - # TODO: educationalLevel is currently unsupported in the items.py backend? educational_level = kwargs.get('bundle_educational_level') - if educational_level is not None: - educational.add_value('educationalLevel', educational_level) + + # TODO: educationalLevel is currently unsupported in the items.py backend? (there exists a vocab for it, though: + # https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/educationalLevel.ttl + # if educational_level is not None: + # educational.add_value('educationalLevel', educational_level) + lom.add_value('educational', educational.load_item()) base.add_value('lom', lom.load_item()) @@ -272,7 +305,13 @@ def parse(self, response: scrapy.http.Response, **kwargs): vs.add_value('price', 'no') # we can map "Phase" to our educationalContext with the following ValuespaceHelper method: if educational_level is not None: - vs.add_value("educationalContext", ValuespaceHelper.educationalContextByGrade(educational_level)) + for educational_level_item in educational_level: + if int(educational_level_item) <= 4: + vs.add_value("educationalContext", "grundschule") + if 4 < int(educational_level_item) <= 10: + vs.add_value("educationalContext", "sekundarstufe_1") + if 10 < int(educational_level_item) <= 13: + vs.add_value("educationalContext", "sekundarstufe_2") lic = LicenseItemLoader() # everything is CC-BY-SA 3.0 according to the FAQs: https://mnweg.org/faqs From 65cdc93412cbb9edd70e77a917452149512fbffc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 25 Apr 2022 10:55:49 +0200 Subject: [PATCH 066/590] kmap_spider 0.0.6 (squashed) - change: use playwright instead of pyppeteer -- items_scraped_count should be 532 after a successful crawl - optimize imports --- converter/spiders/kmap_spider.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/converter/spiders/kmap_spider.py b/converter/spiders/kmap_spider.py index 5f27a1ca..58c41629 100644 --- a/converter/spiders/kmap_spider.py +++ b/converter/spiders/kmap_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader from converter.spiders.base_classes import LomBase from converter.util.sitemap import from_xml_response from converter.web_tools import WebEngine, WebTools @@ -16,7 +16,7 @@ class KMapSpider(CrawlSpider, LomBase): name = "kmap_spider" friendlyName = "KMap.eu" - version = "0.0.5" # last update: 2021-10-04 + version = "0.0.6" # last update: 2022-04-25 sitemap_urls = [ "https://kmap.eu/server/sitemap/Mathematik", "https://kmap.eu/server/sitemap/Physik" @@ -58,8 +58,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: @returns item 1 """ last_modified = kwargs.get("lastModified") - url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Pyppeteer) - splash_html_string = url_data_splash_dict.get('html') + url_data_web_tools_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + splash_html_string = url_data_web_tools_dict.get('html') json_ld_string: str = Selector(text=splash_html_string).xpath('//*[@id="ld"]/text()').get() json_ld: dict = json.loads(json_ld_string) # TODO: skip item method - (skips item if it's an empty knowledge map) @@ -126,5 +126,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: base.add_value("permissions", permissions.load_item()) base.add_value('response', super().mapResponse(response).load_item()) - + # KMap doesn't deliver fulltext to neither splash nor playwright, the fulltext object will be showing up as + # 'text': 'JavaScript wird benötigt!\n\n', + # in the final "scrapy.Item". As long as KMap doesn't change the way it's delivering its JavaScript content, + # our crawler won't be able to work around this limitation. return base.load_item() From 5da546eabaaf38e21d731fa24e929ab41936de46 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 26 Apr 2022 11:45:43 +0200 Subject: [PATCH 067/590] optimize imports --- converter/spiders/serlo_spider.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 2064b778..744e9a8c 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -1,5 +1,4 @@ import json -import logging import requests import scrapy @@ -107,7 +106,6 @@ def getHash(self, response=None) -> str: def parse(self, response, **kwargs): graphql_json: dict = kwargs.get("graphql_item") - # logging.debug(f"GraphQL Item: {graphql_json}") json_ld = response.xpath('//*[@type="application/ld+json"]/text()').get() json_ld = json.loads(json_ld) From bdbad9d020df2f9aaf312b290f3f6e611ce90500 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 26 Apr 2022 16:12:20 +0200 Subject: [PATCH 068/590] workaround: don't try to scrape .pdf or .docx files with Splash - Splash can't handle .pdf or .docx files, therefore trying to get "html"-, "text"-, "cookies"- or the "har"-field would result in KeyError: 'har' during a Splash-enabled crawl -- these errors would otherwise occur in biologie_lernprogramme_spider and chemie_lernprogramme_spider.py as soon as you enable Splash --- converter/web_tools.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index f1c65a52..2e7a0c16 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -47,7 +47,10 @@ def __getUrlDataPlaywright(url: str): def __getUrlDataSplash(url: str): settings = get_project_settings() # html = None - if settings.get("SPLASH_URL"): + if settings.get("SPLASH_URL") and not url.endswith((".pdf", ".docx")): + # Splash can't handle some binary direct-links (Splash will throw "LUA Error 400: Bad Request" as a result) + # ToDo: which additional filetypes need to be added to the exclusion list? + # ToDo: find general solution for extracting metadata from .pdf-files? result = requests.post( settings.get("SPLASH_URL") + "/render.json", json={ From 7afd4046a16db370989ed5d1eb5060790b5d4042 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 26 Apr 2022 16:15:16 +0200 Subject: [PATCH 069/590] add: custom_settings --- converter/spiders/chemie_lernprogramme_spider.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/chemie_lernprogramme_spider.py b/converter/spiders/chemie_lernprogramme_spider.py index d5338233..12e39118 100644 --- a/converter/spiders/chemie_lernprogramme_spider.py +++ b/converter/spiders/chemie_lernprogramme_spider.py @@ -11,7 +11,10 @@ class ChemieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): name = "chemie_lernprogramme_spider" friendlyName = "Chemie-Lernprogramme" url = "https://chemie-lernprogramme.de/" - version = "0.1.1" # last update: 2022-02-22 + version = "0.1.1" # last update: 2022-04-26 + custom_settings = { + "ROBOTSTXT_OBEY": False + } static_values = { "author": { From a08c191574c4ca55471a9bb283a35eba6b06f855 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 26 Apr 2022 18:31:52 +0200 Subject: [PATCH 070/590] umwelt_im_unterricht_spider v0.0.4 - change: fallback-license from copyright to CC-BY-SA 4.0 -- Umwelt-im-Unterricht confirmed via E-Mail that the texts are considered CC-BY-SA 4.0 by default (if not stated otherwise on the individual page) -- individual pictures within a "Bilderserie" might still carry their own license, but this information is clearly visible to the website visitors --- converter/spiders/umwelt_im_unterricht_spider.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index d6faed5f..eaef7be3 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -33,7 +33,7 @@ class UmweltImUnterrichtSpider(CrawlSpider, LomBase): "https://www.umwelt-im-unterricht.de/suche/?tx_solr%5Bfilter%5D%5B0%5D=type%3Amaterials_images", # Typ: Bilderserie ] - version = "0.0.3" # last update: 2022-04-12 + version = "0.0.4" # last update: 2022-04-26 topic_urls = set() # urls that need to be parsed will be added here topic_urls_parsed = set() # this set is used for 'checking off' already parsed (individual) topic urls overview_urls_already_parsed = set() # this set is used for 'checking off' already parsed overview_pages @@ -155,7 +155,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): lifecycle = LomLifecycleItemloader() lifecycle.add_value('role', 'publisher') lifecycle.add_value('date', date_cleaned_up) - lifecycle.add_value('url', "https://www.umwelt-im-unterricht.de/impressum/") + lifecycle.add_value('url', "https://www.umwelt-im-unterricht.de/") lifecycle.add_value('organization', 'Bundesministerium für Umwelt, Naturschutz und nukleare Sicherheit (BMU)') lom.add_value('lifecycle', lifecycle.load_item()) @@ -284,11 +284,13 @@ def parse(self, response: scrapy.http.Response, **kwargs): license_url = license_url.replace("http://", "https://") lic.replace_value('url', license_url) else: - lic.add_value('url', Constants.LICENSE_COPYRIGHT_LAW) - # ToDo: change the fallback-license if necessary - # since there are a lot of articles with missing license-information (especially "Thema der Woche", - # "Bilderserien" and other mixed forms of articles), we're setting the default license to "copyright" until we - # get a response/confirmation from Umwelt-im-Unterricht in regards to what the default should be + lic.add_value('url', Constants.LICENSE_CC_BY_SA_40) + # since there are a lot of articles with missing license-information (especially "Thema der Woche", + # "Bilderserien" and other mixed forms of articles), we're setting the default license to CC-BY-SA 4.0 + # EMail-Confirmation from Umwelt-im-Unterricht (2022-04-26): + # this license is covering the texts that were produced by UIU! (teasers, intro-texts, summaries) + # individual pictures from "Bilderserie"-type of topics still carry their own respective licenses (which we + # currently don't crawl individually) license_description_raw: str = response.xpath('//div[@class="cc-licence-info"]').get() if license_description_raw is not None: From f1e8a81722d762070120ac0590f776186fbab1e3 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 6 May 2022 18:18:32 +0200 Subject: [PATCH 071/590] grundschulkoenig_spider v0.0.4 - add: custom_settings -- skip robots.txt, which ends in a misleading 404-error-forward anyway, which causes Scrapy to throw exceptions -- enable Autothrottle to make sure we're not hammering their servers accidentally and cause timeouts - add: fallback-method for gathering missing "description" (the webmaster forgot to add the meta-field on some pages) - optimize: "skip_url"-check and its exclusion list, doing less unnecessary for-loop iterations - fix: crawler now inherits from CrawlSpider instead of scrapy.Spider (this should get rid of deprecation warnings) - optimize imports --- converter/spiders/grundschulkoenig_spider.py | 63 ++++++++++++-------- 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index 2802eea1..341209d0 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -2,51 +2,60 @@ import scrapy from scrapy import Request +from scrapy.spiders import CrawlSpider from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, PermissionItemLoader, ResponseItemLoader, \ - LomLifecycleItemloader, LomClassificationItemLoader + LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, LomLifecycleItemloader, \ + LomClassificationItemLoader from converter.spiders.base_classes import LomBase from converter.util.sitemap import from_xml_response, SitemapEntry -class GrundSchulKoenigSpider(scrapy.Spider, LomBase): +class GrundSchulKoenigSpider(CrawlSpider, LomBase): """ scrapes the Grundschulkönig website. """ start_urls = ['https://www.grundschulkoenig.de/sitemap.xml?sitemap=pages&cHash=b8e1a6633393d69093d0ebe93a3d2616'] name = 'grundschulkoenig_spider' - version = "0.0.3" # last update: 2022-04-14 - excluded_url_paths = ["/blog/", - "/rechtliches/", + version = "0.0.4" # last update: 2022-05-06 + custom_settings = { + "ROBOTSTXT_OBEY": False, + # while there is no robots.txt, there is a 404-forward-page that gets misinterpreted by Scrapy + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True + } + + excluded_url_paths = ["/404-page-not-found/", + "/blog/", "/footer-bottom/", + "/rechtliches/", "/suche/", - "/404-page-not-found/"] + ] excluded_overview_pages = [ + "https://www.grundschulkoenig.de/", "https://www.grundschulkoenig.de/deutsch/", + "https://www.grundschulkoenig.de/deutsch/deutsch-als-fremdsprache/", "https://www.grundschulkoenig.de/englisch/", + "https://www.grundschulkoenig.de/globale-elemente/", "https://www.grundschulkoenig.de/hsu-sachkunde/", - "https://www.grundschulkoenig.de/mehr/jahreskreis/" + "https://www.grundschulkoenig.de/landing/", + "https://www.grundschulkoenig.de/links/", + "https://www.grundschulkoenig.de/mehr/", + "https://www.grundschulkoenig.de/mehr/jahreskreis/", "https://www.grundschulkoenig.de/mathe/", "https://www.grundschulkoenig.de/musikkunst/kunst/", "https://www.grundschulkoenig.de/musikkunst/musik/", "https://www.grundschulkoenig.de/religion/", "https://www.grundschulkoenig.de/weitere-faecher/", "https://www.grundschulkoenig.de/vorschule/", - "https://www.grundschulkoenig.de/", - "https://www.grundschulkoenig.de/links/", "https://www.grundschulkoenig.de/suchergebnisse/", - "https://www.grundschulkoenig.de/landing/", - "https://www.grundschulkoenig.de/globale-elemente/", - "" ] def start_requests(self): for url in self.start_urls: yield Request(url=url, callback=self.parse_sitemap) - pass def getHash(self, response=None) -> str: pass @@ -79,11 +88,15 @@ def parse_sitemap(self, response: scrapy.http.XmlResponse): full_url_regex = re.compile(full_url) if full_url_regex.fullmatch(item.loc) is not None: skip_url = True - for url_pattern in self.excluded_url_paths: - current_page_regex = re.compile(url_pattern) - if current_page_regex.search(item.loc) is not None: - skip_url = True - if self.hasChanged(response) and skip_url is False: + break + if skip_url is False: + # in case the URL is already marked as to-be-skipped, we can skip this additional check + for url_pattern in self.excluded_url_paths: + current_page_regex = re.compile(url_pattern) + if current_page_regex.search(item.loc) is not None: + skip_url = True + break + if skip_url is False: yield response.follow(item.loc, callback=self.parse, cb_kwargs={'sitemap_entry': item}) def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): @@ -91,7 +104,7 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry # content = response.xpath('//div[@class="page__content"]') # Worksheets are grouped, sometimes several worksheet-containers per page exist # worksheet_containers = response.xpath('//div[@class="module-worksheet"]') - # the worksheet_containers hold the links to invididual worksheet .pdf files + # the worksheet_containers hold the links to individual worksheet .pdf files base = BaseItemLoader(response=response) base.add_value("sourceId", response.url) @@ -106,6 +119,9 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry general = LomGeneralItemloader(response=response) general.add_value('title', title) description: str = response.xpath('//meta[@name="description"]/@content').get() + if description is None: + # this is a workaround for (currently: 4) sub-pages that have no description in the header meta-fields + description = response.xpath('//div[@class="content-item module-headline-paragraph"]/p/text()').get() general.add_value('description', description) # ToDo: check if "keywords" are available at the source when the next crawler update becomes necessary lom.add_value("general", general.load_item()) @@ -175,11 +191,10 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry lic.add_value('url', Constants.LICENSE_COPYRIGHT_LAW) base.add_value("license", lic.load_item()) - permissions = PermissionItemLoader(response=response) + permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) - response_loader = ResponseItemLoader() - response_loader.add_value('url', response.url) - base.add_value("response", response_loader.load_item()) + response_loader = super().mapResponse(response) + base.add_value('response', response_loader.load_item()) yield base.load_item() From 45b8c1add15ff2968dd5b7ebd78f7fc62796f8a7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 9 May 2022 11:26:10 +0200 Subject: [PATCH 072/590] add custom_settings --- converter/spiders/biologie_lernprogramme_spider.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/converter/spiders/biologie_lernprogramme_spider.py b/converter/spiders/biologie_lernprogramme_spider.py index 975784a9..6e94e773 100644 --- a/converter/spiders/biologie_lernprogramme_spider.py +++ b/converter/spiders/biologie_lernprogramme_spider.py @@ -12,6 +12,9 @@ class BiologieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): friendlyName = "Biologie-Lernprogramme" url = "https://biologie-lernprogramme.de/" version = "0.1.1" # last update: 2022-02-22 + custom_settings = { + "ROBOTSTXT_OBEY": False + } static_values = { "author": { From 35624ecd985758f233f41bd2631ab9deeeba4b26 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 9 May 2022 18:09:02 +0200 Subject: [PATCH 073/590] add custom_settings --- converter/spiders/mediothek_pixiothek_spider.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index e7ddda53..310403f3 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -17,10 +17,13 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): name = "mediothek_pixiothek_spider" url = "https://www.schulportal-thueringen.de/" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "MediothekPixiothek" # name as shown in the search ui - version = "0.1.1" # last update: 2022-02-21 + version = "0.1.1" # last update: 2022-05-09 start_urls = [ "https://www.schulportal-thueringen.de/tip-ms/api/public_mediothek_metadatenexport/publicMediendatei" ] + custom_settings = { + "ROBOTSTXT_OBEY": False, + } def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) From 3bfd6b73a2457f6cef3b2dd11b59c867d90a3415 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 12 May 2022 02:55:53 +0200 Subject: [PATCH 074/590] dilertube_spider v0.0.1 - crawls 1346 videos as of 2022-05-16 -- some videos on DiLerTube have missing descriptions/keywords, therefore get dropped (currently: 48) - thumbnail-direct-links on DiLerTube are locked behind an "Error 423"-response, therefore temporarily disabled the thumbnail gathering until we can figure out a workaround - add custom_settings - code cleanup & documentation --- converter/spiders/dilertube_spider.py | 329 ++++++++++++++++++++++++++ 1 file changed, 329 insertions(+) create mode 100644 converter/spiders/dilertube_spider.py diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py new file mode 100644 index 00000000..87555c2e --- /dev/null +++ b/converter/spiders/dilertube_spider.py @@ -0,0 +1,329 @@ +import re +from datetime import datetime + +import dateparser +import scrapy +import w3lib.html +from scrapy.spiders import CrawlSpider + +from converter.constants import Constants +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ + LicenseItemLoader +from converter.spiders.base_classes import LomBase +from converter.util.sitemap import from_xml_response + + +class DiLerTubeSpider(CrawlSpider, LomBase): + name = "dilertube_spider" + friendlyName = "DiLerTube" + start_urls = ["https://www.dilertube.de/sitemap.xml"] + version = "0.0.1" # last update: 2022-05-16 + custom_settings = { + "ROBOTSTXT_OBEY": False + } + + # debug_video_url_set = set() + + LICENSE_MAPPING = { + "CC BY 4.0": "https://creativecommons.org/licenses/by/4.0", + "CC BY-SA 4.0": "https://creativecommons.org/licenses/by-sa/4.0", + "CC BY-ND 4.0": "https://creativecommons.org/licenses/by-nd/4.0", + "CC BY-NC 4.0": "https://creativecommons.org/licenses/by-nc/4.0", + "CC BY-NC-SA 4.0": "https://creativecommons.org/licenses/by-nc-sa/4.0", + "CC BY-NC-ND 4.0": "https://creativecommons.org/licenses/by-nc-nd/4.0" + } + # ToDo: we're missing several licenses in converter.Constants (either keep using this mapping or update Constants) + DISCIPLINE_MAPPING = { + "Alltagskultur, Ernährung, Soziales (AES)": "Ernährung und Hauswirtschaft", + # ToDo: AES discipline exists since 2016 in BaWü, probably needs its own entry in the "disciplines.ttl"-Vocab + "Berufsorientierung": "Berufliche Bildung", + "Bildende Kunst": "Kunst", + # "Biologie": "Biologie", + # "Chemie": "Chemie", + # "Deutsch": "Deutsch", + # "Englisch": "Englisch", + "Ethik": "Ethik", + # "Französisch": "Französisch", + "Gemeinschaftskunde": "", + "Geographie": "Geografie", + # "Geschichte": "Geschichte", + "Gesundheit und Soziales": "", + "Informatik und Medienbildung": "", + "Lateinisch": "Latein", + "Materie Natur Technik (MNT)": "", + # "Mathematik": "Mathematik", + # "Musik": "Musik", + # "Pädagogik": "Pädagogik", + # "Philosophie": "Philosophie", + # "Religion": "Religion", + # "Sachunterricht": "Sachunterricht", + # "Spanisch": "Spanisch", + # "Sport": "Sport", + "Technik": "Arbeitslehre", + # "Wirtschaftskunde": "Wirtschaftskunde", + } + CATEGORY_IS_ACTUALLY_A_KEYWORD = [ + "DiLer Tutorials", "Führerscheine", "Imagefilme von Schulen", "Kanäle", "Methoden", "Naturphänomene", + "Sonstige", "Schülerprojekte", "Technik" + ] + + def start_requests(self) -> scrapy.Request: + """ + + :return: scrapy.Request + + Scrapy Contracts: + @url https://www.dilertube.de/sitemap.xml + @returns item 1 + """ + for start_url in self.start_urls: + yield scrapy.Request(url=start_url, callback=self.parse_sitemap) + + def parse_sitemap(self, response) -> scrapy.Request: + """ + Iterates through the sitemap and yields a scrapy.Request for every url found inside a -element. + :param response: + :return: scrapy.Request + + Scrapy Contracts: + @url https://www.dilertube.de/sitemap.xml + @returns requests 30 + """ + sitemap_items = from_xml_response(response) + # the sitemap contains the urls to all video categories (currently: 37); a single sitemap_item looks like this: + # + # https://www.dilertube.de/bildende-kunst.html + # weekly + # 0.5 + # + for sitemap_item in sitemap_items: + yield scrapy.Request(url=sitemap_item.loc, callback=self.parse_video_overview) + + def parse_video_overview(self, response) -> scrapy.Request: + """ + Gathers individual video urls from a category overview (e.g. "Englisch") and yields individual video_urls to the + parse()-method. + :param response: + :return: a scrapy.Request for every available video_url + + Scrapy Contracts: + @url https://wwww.dilertube.de/bildende-kunst.html + @returns requests 13 + """ + url_list = response.xpath('//*[@class="card-title m-0 mb-2"]/a/@href').getall() + # the individual links from the video-overview look like this: + # '/bildende-kunst/oer-video/kudivi-geschichte-der-kunstpaedagogik-03-bauhaus.html' + + # logging.debug(f"Video-links from {response.url}: \n {url_list}") + for url in url_list: + video_url: str = str("https://www.dilertube.de" + url) + # self.debug_video_url_set.add(video_url) + yield scrapy.Request(url=video_url, callback=self.parse) + + def getId(self, response=None) -> str: + pass + + def getHash(self, response=None) -> str: + pass + + def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + """ + Gathers metadata from a video-url, nests the metadata within a BaseItemLoader and yields a complete BaseItem by + calling the .load_item()-method. + :param response: scrapy.http.Response + :param kwargs: + :return: yields a converter.items.BaseItem by calling the ".load_item()"-method on its scrapy.ItemLoader + + Scrapy Contracts: + @url https://www.dilertube.de/ethik/oer-video/solidaritaet.html + @returns item 1 + """ + date_string_raw, date_string = str(), str() + date_regex = re.compile(r'(?P\d{2}).(?P\d{2}).(?P\d{4})') + channel_info_box: list = response.xpath('//div[@class="jv-channel"]/small/text()').getall() + for channel_info_item in channel_info_box: + if "Veröffentlicht am" in channel_info_item: + date_string_raw: str = channel_info_item + if date_string_raw is not None: + date_string_raw = w3lib.html.strip_html5_whitespace(date_string_raw) + if date_regex.search(date_string_raw): + date_string = date_regex.search(date_string_raw).group() + if date_string is not None: + date_parsed: datetime = dateparser.parse(date_string) + if date_parsed is not None: + date_string = date_parsed.isoformat() + else: + # fallback value: current time (in case we can't gather the published_date ("Veröffentlicht am: ...") + # from the DOM) + date_string = datetime.now().isoformat() + published_date = date_string + + # Below a video, these possible metadata fields might be available in the video-information-box: + # "Lizenz" always? (freeform text, set by the video-uploader) + # "Autor" optional (freeform text) + # "Quelle" optional (= the original source of the video, freeform text) + # "Produktionsjahr des Videos (ca.)" optional (year, e.g. "2020") + # "Produktionsdatum" optional (date, e.g. "09.03.2021") + license_description_raw = response.xpath('//div[@class="customFieldValue license"]/text()').get() + video_info_dict = dict() + if license_description_raw is not None: + license_description = w3lib.html.strip_html5_whitespace(license_description_raw) + video_info_dict.update({'license_description': license_description}) + if license_description is not None: + cc_pattern = re.compile(r'\((?PC{2})\)\s' + r'(?P\D{2}(-\D{2})*)' + r'.*' + r'(?<=\s)(?P\d\.\d)?(?=\s)' + ) + if cc_pattern.search(license_description) is not None: + cc_pattern_result_dict = cc_pattern.search(license_description).groupdict() + # cc_string_ready_for_mapping = str(cc_pattern_result_dict.get("CC") + "_" + # + cc_pattern_result_dict.get("CC_TYPE").replace("-", "_") + "_" + # + cc_pattern_result_dict.get("CC_VERSION").replace(".", "")) + # ToDo map license url with converter.Constants instead? (some licenses are missing there) + cc_string = str(cc_pattern_result_dict.get("CC") + " " + cc_pattern_result_dict.get("CC_TYPE") + + " " + cc_pattern_result_dict.get("CC_VERSION")) + if cc_string in self.LICENSE_MAPPING.keys(): + cc_url = self.LICENSE_MAPPING.get(cc_string) + video_info_dict.update({'cc_url': cc_url}) + + video_info_box = \ + response.xpath('//ul[@class="list-group mx-0 my-0"]//div[@class="card-body"]/div[@class="mb-2"]').getall() + for video_info_field in video_info_box: + selector_item = scrapy.Selector(text=video_info_field) + video_info_field_description = selector_item.xpath('//h4[@class="customFieldLabel "]/text()').get() + # the class-name "customFieldLabel " needs to come with that trailing whitespace! this is NOT A TYPO! + if video_info_field_description is not None: + if "Autor" in video_info_field_description: + author_string = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() + if author_string is not None: + author_string = w3lib.html.strip_html5_whitespace(author_string) + video_info_dict.update({'author': author_string}) + if "Quelle" in video_info_field_description: + source_string = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() + if source_string is not None: + source_string = w3lib.html.strip_html5_whitespace(source_string) + video_info_dict.update({'source': source_string}) + if "Produktionsjahr" in video_info_field_description: + production_year: str = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() + if production_year is not None: + production_year = w3lib.html.strip_html5_whitespace(production_year) + video_info_dict.update({'production_year': production_year}) + if "Produktionsdatum" in video_info_field_description: + production_date: str = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() + if production_date is not None: + production_date = w3lib.html.strip_html5_whitespace(production_date) + video_info_dict.update({'production_date': production_date}) + + base = BaseItemLoader() + + base.add_value('sourceId', response.url) + hash_temp: str = published_date + self.version + base.add_value('hash', hash_temp) + last_modified = published_date + # while this is not strictly the last_modified date, it is the only date we can gather from the OOM + base.add_value('lastModified', last_modified) + base.add_value('type', Constants.TYPE_MATERIAL) + # thumbnail_url: str = response.xpath('//meta[@property="og:image"]/@content').get() + # ToDo: DiLerTube provides thumbnails, but they are locked behind an error 423 when directly accessing the link + # if thumbnail_url is not None: + # base.add_value('thumbnail', thumbnail_url) + + if "source" in video_info_dict.keys(): + base.add_value('publisher', video_info_dict.get("source")) + + categories = list() + keywords = list() + categories_and_keywords_list: list = response.xpath('//ul[@class="list-group mx-0 my-0"]/li[' + '@class="list-group-item"]').getall() + # categories and keywords both use the same generic class names for its elements, therefore we try to identify + # the description-text and use its -siblings to extract the text-values: + for category_or_keyword_item in categories_and_keywords_list: + selector_item = scrapy.Selector(text=category_or_keyword_item) + category_or_keyword_description = selector_item.xpath('//span[@class="title"]/text()').get() + if "Kategorie" in category_or_keyword_description: + categories_temp = selector_item.xpath('//a[@class="badge-primary badge-pill"]/text()').getall() + if len(categories_temp) >= 1: + for category_potential_candidate in categories_temp: + if category_potential_candidate.startswith("||| "): + # there are some categories which are not school-disciplines but rather keywords + # e.g. "||| Methoden": https://www.dilertube.de/sonstige/oer-videos/methoden.html + category_potential_candidate: str = category_potential_candidate.replace("||| ", "") + if category_potential_candidate in self.CATEGORY_IS_ACTUALLY_A_KEYWORD: + keywords.append(category_potential_candidate) + else: + categories.append(category_potential_candidate) + if "Schlagwörter" in category_or_keyword_description: + keywords_temp = selector_item.xpath('//a[@class="badge-primary badge-pill"]/text()').getall() + if len(keywords_temp) >= 1: + keywords.extend(keywords_temp) + + lom = LomBaseItemloader() + + general = LomGeneralItemloader() + general.add_value('identifier', response.url) + general.add_value('title', response.xpath('//meta[@property="og:title"]/@content').get()) + general.add_value('description', response.xpath('//meta[@property="og:description"]/@content').get()) + general.add_value('language', response.xpath('/html/@lang').get()) + # grabs the language from the html language; there seem to be additional translations of DiLerTube in the works: + # the german URLs use 'de-DE' by default, + # while the english translations use 'en-GB', so this looks like a suitable indicator + general.add_value('keyword', keywords) + lom.add_value('general', general.load_item()) + + technical = LomTechnicalItemLoader() + technical.add_value('format', 'text/html') + technical.add_value('location', response.url) + lom.add_value('technical', technical.load_item()) + + lifecycle = LomLifecycleItemloader() + if "production_year" in video_info_dict.keys(): + lifecycle.add_value('date', video_info_dict.get("production_year")) + if "production_date" in video_info_dict.keys(): + lifecycle.add_value('date', video_info_dict.get("production_date")) + lom.add_value('lifecycle', lifecycle.load_item()) + + educational = LomEducationalItemLoader() + lom.add_value('educational', educational.load_item()) + + classification = LomClassificationItemLoader() + lom.add_value('classification', classification.load_item()) + + # once you've filled "general", "technical", "lifecycle" and "educational" with values, + # the LomBaseItem is loaded into the "base"-BaseItemLoader + base.add_value('lom', lom.load_item()) + + vs = ValuespaceItemLoader() + for category_item in categories: + if category_item in self.DISCIPLINE_MAPPING.keys(): + discipline = self.DISCIPLINE_MAPPING.get(category_item) + vs.add_value('discipline', discipline) + else: + vs.add_value('discipline', category_item) + vs.add_value('new_lrt', "7a6e9608-2554-4981-95dc-47ab9ba924de") # Video (Material) + vs.add_value('intendedEndUserRole', ["learner", "teacher"]) + vs.add_value('conditionsOfAccess', "no login") + vs.add_value('containsAdvertisement', "no") + vs.add_value('dataProtectionConformity', "Datensparsam") + # see https://www.dilertube.de/datenschutz.html + vs.add_value('price', "no") + base.add_value('valuespaces', vs.load_item()) + + lic = LicenseItemLoader() + if "license_description" in video_info_dict.keys(): + # DiLerTube allows the uploaders to enter freeform text into the license field + lic.add_value('description', video_info_dict.get("license_description")) + if "cc_url" in video_info_dict.keys(): + lic.add_value('url', video_info_dict.get("cc_url")) + if "author" in video_info_dict.keys(): + lic.add_value('author', video_info_dict.get("author")) + base.add_value('license', lic.load_item()) + + permissions = super().getPermissions(response) + base.add_value('permissions', permissions.load_item()) + + response_loader = super().mapResponse(response) + base.add_value('response', response_loader.load_item()) + + yield base.load_item() From ac9bf85f38fc7ab5b9e54178a07cfe42a30c434d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 16 May 2022 15:00:31 +0200 Subject: [PATCH 075/590] fix: date, production_year and production_date detection - replace: dateparser's (currently bugged in v1.1.1) date-detection with a RegEx expression before handing the date over to the pipeline --- converter/spiders/dilertube_spider.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index 87555c2e..8015578e 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -1,7 +1,6 @@ import re from datetime import datetime -import dateparser import scrapy import w3lib.html from scrapy.spiders import CrawlSpider @@ -140,7 +139,9 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: @returns item 1 """ date_string_raw, date_string = str(), str() - date_regex = re.compile(r'(?P\d{2}).(?P\d{2}).(?P\d{4})') + date_regex = re.compile(r'((?P\d{2})\.)?' + r'((?<=\.)(?P\d{2})\.)?' + r'(?P\d{4})') channel_info_box: list = response.xpath('//div[@class="jv-channel"]/small/text()').getall() for channel_info_item in channel_info_box: if "Veröffentlicht am" in channel_info_item: @@ -150,7 +151,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: if date_regex.search(date_string_raw): date_string = date_regex.search(date_string_raw).group() if date_string is not None: - date_parsed: datetime = dateparser.parse(date_string) + # ToDo RegEx discerning between Year-only and proper dates + date_parsed: datetime = datetime.strptime(date_string, "%d.%m.%Y") if date_parsed is not None: date_string = date_parsed.isoformat() else: @@ -279,9 +281,16 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: lifecycle = LomLifecycleItemloader() if "production_year" in video_info_dict.keys(): - lifecycle.add_value('date', video_info_dict.get("production_year")) + # this is a necessary workaround because dateparser.parse() would mis-calculate year-only representations of + # the date + datetime_production_year: datetime = datetime.strptime(video_info_dict.get("production_year"), "%Y") + datetime_production_year: str = datetime_production_year.isoformat() + lifecycle.add_value('date', datetime_production_year) if "production_date" in video_info_dict.keys(): - lifecycle.add_value('date', video_info_dict.get("production_date")) + # this is a necessary workaround because dateparser.parse() would confuse de-DE time-formats as en-US + datetime_production_date: datetime = datetime.strptime(video_info_dict.get("production_date"), "%d.%m.%Y") + datetime_production_date: str = datetime_production_date.isoformat() + lifecycle.add_value('date', datetime_production_date) lom.add_value('lifecycle', lifecycle.load_item()) educational = LomEducationalItemLoader() From e165fc80ca4303584b9b921906b1b50de546939f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 20 May 2022 14:07:24 +0200 Subject: [PATCH 076/590] add: custom_settings (Autothrottle enabled) - without AUTOTHROTTLE_ENABLED the crawler would receive timeouts and miss items while crawling - a successful crawl currently (2022-05-20) yields 534 items --- converter/spiders/kmap_spider.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/converter/spiders/kmap_spider.py b/converter/spiders/kmap_spider.py index 58c41629..2aaca434 100644 --- a/converter/spiders/kmap_spider.py +++ b/converter/spiders/kmap_spider.py @@ -16,11 +16,16 @@ class KMapSpider(CrawlSpider, LomBase): name = "kmap_spider" friendlyName = "KMap.eu" - version = "0.0.6" # last update: 2022-04-25 + version = "0.0.6" # last update: 2022-05-20 sitemap_urls = [ "https://kmap.eu/server/sitemap/Mathematik", "https://kmap.eu/server/sitemap/Physik" ] + custom_settings = { + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_ENABLED": True, + # "AUTOTHROTTLE_DEBUG": True + } allowed_domains = ['kmap.eu'] # keep the console clean from spammy DEBUG-level logging messages, adjust as needed: logging.getLogger('websockets.server').setLevel(logging.ERROR) From 6dacacafbad0e4955a48e4dd60425dc2ea4b53d5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 20 May 2022 16:13:44 +0200 Subject: [PATCH 077/590] digitallearninglab_spider v0.1.2 - add: custom_settings -- Autothrottle enabled and CONCURRENCY set to 1 (otherwise the crawler gets recognized and blocked by the website) - fix: the previous crawler-version used a (now deprecated) .body_as_unicode()-method, which resulted in Exceptions -- now uses scrapy.http.Textresponse.json(), since this built-in function is available as of Scrapy 2.2 - fix: several (weak) warnings -- ToDo: pyCharm shows 15 weak warnings due to shadowed variable names and too broad 'except'-clauses (fix these the next time when the API changes or the crawler stops working) - optimize imports --- .../spiders/digitallearninglab_spider.py | 78 +++++++++++-------- 1 file changed, 46 insertions(+), 32 deletions(-) diff --git a/converter/spiders/digitallearninglab_spider.py b/converter/spiders/digitallearninglab_spider.py index 473a1eda..470b73d3 100644 --- a/converter/spiders/digitallearninglab_spider.py +++ b/converter/spiders/digitallearninglab_spider.py @@ -1,24 +1,37 @@ -from converter.items import * -import time -from .base_classes import LrmiBase -import json import html -from converter.valuespace_helper import ValuespaceHelper -from converter.constants import Constants +import time + import scrapy +from scrapy.spiders import CrawlSpider + +from converter.constants import Constants +from converter.valuespace_helper import ValuespaceHelper +from .base_classes import LrmiBase + -# Spider to fetch RSS from planet schule -class DigitallearninglabSpider(scrapy.Spider, LrmiBase): +class DigitallearninglabSpider(CrawlSpider, LrmiBase): name = "digitallearninglab_spider" friendlyName = "digital.learning.lab" url = "https://digitallearninglab.de" - version = "0.1.1" + version = "0.1.2" # last update: 2022-05-20 + custom_settings = { + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_ENABLED": True, + # Digital Learning Lab recognizes and blocks crawlers that are too fast: + # without the Autothrottle we'll be seeing HTTP Errors 503 (and therefore missing out on lots of items) + # "AUTOTHROTTLE_DEBUG": True, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 1, + "AUTOTHROTTLE_START_DELAY": 0.25 + } apiUrl = "https://digitallearninglab.de/api/%type?q=&sorting=latest&page=%page" + # Unterrichtsbausteine (API "count" value): 228 + # tools: 182 + # therefore we expect 410 items after a successful crawl def __init__(self, **kwargs): LrmiBase.__init__(self, **kwargs) - def mapResponse(self, response): + def mapResponse(self, response, **kwargs): return LrmiBase.mapResponse(self, response) def getId(self, response): @@ -28,42 +41,43 @@ def getHash(self, response): modified = self.getLRMI("dateModified", response=response) if modified: return modified + self.version - # fallback if lrmi was unparsable + # fallback if LRMI was not parsable return time.time() - def startRequest(self, type, page): + def start_request(self, type, page): return scrapy.Request( url=self.apiUrl.replace("%page", str(page)).replace("%type", type), - callback=self.parseRequest, + callback=self.parse_request, headers={"Accept": "application/json", "Content-Type": "application/json"}, meta={"page": page, "type": type}, ) def start_requests(self): - yield self.startRequest("unterrichtsbausteine", 1) - yield self.startRequest("tools", 1) + yield self.start_request("unterrichtsbausteine", 1) + yield self.start_request("tools", 1) - def parseRequest(self, response): - data = json.loads(response.body_as_unicode()) + def parse_request(self, response: scrapy.http.TextResponse): + data = response.json() results = data.get("results") if results: for item in results: - copyResponse = response.replace(url=self.url + item.get("url")) - copyResponse.meta["item"] = item - if self.hasChanged(copyResponse): + copy_response = response.replace(url=self.url + item.get("url")) + copy_response.meta["item"] = item + if self.hasChanged(copy_response): yield scrapy.Request( - url=copyResponse.url, - callback=self.handleEntry, + url=copy_response.url, + callback=self.handle_entry, meta={"item": item, "type": response.meta["type"]}, ) - yield self.startRequest( + yield self.start_request( response.meta["type"], response.meta["page"] + 1 ) - def handleEntry(self, response): + def handle_entry(self, response): return LrmiBase.parse(self, response) - def getType(self, response): + @staticmethod + def get_type(response): if response.meta["type"] == "tools": return Constants.TYPE_TOOL else: @@ -77,7 +91,7 @@ def getBase(self, response): "thumbnail", response.xpath('//img[@class="content-info__image"]/@src').get(), ) - base.replace_value("type", self.getType(response)) + base.replace_value("type", self.get_type(response)) return base def getLOMGeneral(self, response): @@ -108,10 +122,10 @@ def getValuespaces(self, response): response.xpath( '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-level")]/parent::*//text()' ) - .get() - .replace("Stufe", "") - .strip() - .split(" - ") + .get() + .replace("Stufe", "") + .strip() + .split(" - ") ) if len(range): valuespaces.add_value( @@ -130,7 +144,7 @@ def getValuespaces(self, response): lrt = response.meta["item"].get("type") valuespaces.add_value("learningResourceType", lrt) try: - toolType = list( + tool_type = list( map( lambda x: x.strip(), response.xpath( @@ -139,7 +153,7 @@ def getValuespaces(self, response): ) ) # @TODO: proper mapping, maybe specialised tool field? - valuespaces.add_value("learningResourceType", toolType) + valuespaces.add_value("learningResourceType", tool_type) except: pass return valuespaces From bb6c4598142dbeaea9bdb1b388ed2a80e5b1f9b4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 20 May 2022 17:31:01 +0200 Subject: [PATCH 078/590] segu_spider v0.1.1 - fix: the previous crawler-version used a (now deprecated) .body_as_unicode()-method, which resulted in Exceptions -- now uses scrapy.http.Textresponse.json() - fix: several (weak) warnings by matching method signatures to the base-classes - optimize imports --- converter/spiders/segu_spider.py | 64 ++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 28 deletions(-) diff --git a/converter/spiders/segu_spider.py b/converter/spiders/segu_spider.py index eb72920e..1395e8a8 100644 --- a/converter/spiders/segu_spider.py +++ b/converter/spiders/segu_spider.py @@ -1,31 +1,35 @@ -from converter.items import * -from .base_classes import LomBase, JSONBase import json + import requests -from converter.constants import * import scrapy -# Spider to fetch RSS from planet schule -class SeguSpider(scrapy.Spider, LomBase, JSONBase): +from scrapy.spiders import CrawlSpider + +from converter.constants import * +from .base_classes import LomBase, JSONBase + + +class SeguSpider(CrawlSpider, LomBase, JSONBase): name = "segu_spider" friendlyName = "segu" url = "https://segu-geschichte.de/" - version = "0.1.0" + version = "0.1.1" # last update: 2022-05-20 apiUrl = "https://segu-geschichte.de/wp-json/wp/v2/pages?page=%page" categories = {} + def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) - def mapResponse(self, response): + def mapResponse(self, response, **kwargs): r = LomBase.mapResponse(self, response, fetchData=False) r.replace_value("text", "") r.replace_value("html", "") r.replace_value("url", response.meta["item"].get("link")) return r - def getId(self, response): + def getId(self, response=None) -> str: return response.meta["item"].get("id") - def getHash(self, response): + def getHash(self, response=None): return response.meta["item"].get("modified") + self.version def start_requests(self): @@ -37,31 +41,32 @@ def start_requests(self): for cat in categories: self.categories[cat["id"]] = cat["name"] - yield self.fetchPage() + yield self.fetch_page() - def fetchPage(self, page = 1): + def fetch_page(self, page=1): return scrapy.Request( url=self.apiUrl.replace("%page", str(page)), - callback=self.parseRequest, + callback=self.parse_request, headers={"Accept": "application/json", "Content-Type": "application/json"}, meta={"page": page, "type": type}, ) - def parseRequest(self, response): - results = json.loads(response.body_as_unicode()) + + def parse_request(self, response: scrapy.http.TextResponse): + results = response.json() if results: for item in results: - copyResponse = response.copy() - copyResponse.meta["item"] = item - if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) - yield self.fetchPage(response.meta["page"] + 1) + response_copy = response.copy() + response_copy.meta["item"] = item + if self.hasChanged(response_copy): + yield self.handle_entry(response_copy) + yield self.fetch_page(response.meta["page"] + 1) - def handleEntry(self, response): + def handle_entry(self, response): return LomBase.parse(self, response) - # thumbnail is always the same, do not use the one from rss - def getBase(self, response): + def getBase(self, response=None): base = LomBase.getBase(self, response) + # thumbnail is always the same, do not use the one from rss base.replace_value( "thumbnail", self.get("acf.thumbnail.url", json=response.meta["item"]) ) @@ -73,7 +78,7 @@ def getBase(self, response): ) return base - def getLOMGeneral(self, response): + def getLOMGeneral(self, response=None): general = LomBase.getLOMGeneral(self, response) general.replace_value( "title", @@ -93,7 +98,7 @@ def getLOMGeneral(self, response): general.add_value("keyword", list(map(lambda x: self.categories[x], cat))) return general - def getLOMTechnical(self, response): + def getLOMTechnical(self, response=None): technical = LomBase.getLOMTechnical(self, response) technical.replace_value("format", "text/html") technical.replace_value( @@ -101,12 +106,15 @@ def getLOMTechnical(self, response): ) return technical - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("url", Constants.LICENSE_CC_BY_SA_40) - return license + def getLicense(self, response=None): + license_loader = LomBase.getLicense(self, response) + license_loader.add_value("url", Constants.LICENSE_CC_BY_SA_40) + return license_loader def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) valuespaces.add_value("discipline", "Geschichte") return valuespaces + + def parse(self, response, **kwargs): + super().parse(response) From 0e6372509d47f5a024d98c69796733ee5b67bbb5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 23 May 2022 12:26:44 +0200 Subject: [PATCH 079/590] add custom_settings (Autothrottle) - the Autothrottle should guarantee that even when the ZUM servers are taking long to respond, we won't lose (m)any items -- a successful crawl should yield 680 Items (as of 2022-05-23) --- converter/spiders/zum_dwu_spider.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/converter/spiders/zum_dwu_spider.py b/converter/spiders/zum_dwu_spider.py index 502713d7..bef89a4f 100644 --- a/converter/spiders/zum_dwu_spider.py +++ b/converter/spiders/zum_dwu_spider.py @@ -20,6 +20,11 @@ class ZumDwuSpider(CrawlSpider, LomBase): "http://www.zum.de/dwu/umaptg.htm" # Physik-Teilgebiete ] version = "0.0.2" # last update: 2022-04-21 + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + # "AUTOTHROTTLE_DEBUG": True + } + parsed_urls = set() # holds the already parsed urls to minimize the amount of duplicate requests debug_xls_set = set() # The author used a HTML suite for building the .htm documents (Hot Potatoes by Half-Baked Software) From 11527038f1e01536597ae1d0e6749b87221dcfc7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 23 May 2022 14:09:33 +0200 Subject: [PATCH 080/590] zum_mathe_apps / zum_physik_apps v0.0.6 - add: Autothrottle - replace Pyppeteer with Playwright -- Pyppeteer was dropping connections, therefore losing items -- a successful crawl yields 95 items for zum_mathe_apps_spider and 55 items for zum_physik_apps_spider (2022-05-23) --- converter/spiders/zum_mathe_apps_spider.py | 8 ++++++-- converter/spiders/zum_physik_apps_spider.py | 11 +++++++---- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/converter/spiders/zum_mathe_apps_spider.py b/converter/spiders/zum_mathe_apps_spider.py index d4fbb90d..92d1f44b 100644 --- a/converter/spiders/zum_mathe_apps_spider.py +++ b/converter/spiders/zum_mathe_apps_spider.py @@ -21,7 +21,11 @@ class ZumMatheAppsSpider(scrapy.Spider, LomBase): "https://www.walter-fendt.de/html5/mde/", # "http://www.zum.de/ma/fendt/mde/" ] - version = "0.0.5" # reflects the structure of ZUM Mathe Apps on 2021-09-30 + version = "0.0.6" # last update: 2022-05-23 - items expected after a successful crawl: 95 + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + # "AUTOTHROTTLE_DEBUG": True + } # keep the console clean from spammy DEBUG-level logging messages, adjust as needed: logging.getLogger('websockets.server').setLevel(logging.ERROR) logging.getLogger('websockets.protocol').setLevel(logging.ERROR) @@ -80,7 +84,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): @returns items 1 """ # fetching publication date and lastModified from dynamically loaded

-element: - url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Pyppeteer) + url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) splash_html_string = url_data_splash_dict.get('html') page_end_element = Selector(text=splash_html_string).xpath('//p[@class="Ende"]').get() line_regex = re.compile(r'
') diff --git a/converter/spiders/zum_physik_apps_spider.py b/converter/spiders/zum_physik_apps_spider.py index 0256a1a5..e6d10fef 100644 --- a/converter/spiders/zum_physik_apps_spider.py +++ b/converter/spiders/zum_physik_apps_spider.py @@ -20,9 +20,12 @@ class ZumPhysikAppsSpider(scrapy.Spider, LomBase): "https://www.walter-fendt.de/html5/phde/", # "https://www.zum.de/ma/fendt/phde/" ] - version = "0.0.5" # reflects the structure of ZUM Physik Apps on 2021-09-30 (there should be 55 scraped items - - # when the crawling process is done) + version = "0.0.6" # last update: 2022-05-23 + # expected amount of items after a successful crawl: 55 + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + # "AUTOTHROTTLE_DEBUG": True + } def getId(self, response=None) -> str: return response.url @@ -61,7 +64,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): @returns item 1 """ # fetching publication date and lastModified from dynamically loaded

-element: - url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Pyppeteer) + url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) splash_html_string = url_data_splash_dict.get('html') page_end_element = Selector(text=splash_html_string).xpath('//p[@class="Ende"]').get() line_regex = re.compile(r'
') From 31118c89a3b336d730ee3037d4aa6dee7bfa827e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 23 May 2022 16:47:26 +0200 Subject: [PATCH 081/590] tutory_spider v0.1.2 - add: Autothrottle - fix: warnings due to not using the same method signatures -- fix: weak warnings (PEP8 naming conventions for method names) - change: use CrawlSpider instead of scrapy.Spider (allows us to use custom_settings) - optimize imports --- converter/spiders/tutory_spider.py | 62 ++++++++++++++++-------------- 1 file changed, 33 insertions(+), 29 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 44778551..fa90b866 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -1,44 +1,49 @@ -from converter.items import * -from .base_classes import LomBase, JSONBase -import json -from converter.constants import Constants -from scrapy.selector import Selector import scrapy +from scrapy.selector import Selector +from scrapy.spiders import CrawlSpider + +from converter.constants import Constants +from .base_classes import LomBase, JSONBase + -# Spider to fetch API from Serlo -class TutorySpider(scrapy.Spider, LomBase, JSONBase): +class TutorySpider(CrawlSpider, LomBase, JSONBase): name = "tutory_spider" friendlyName = "tutory" url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.1" + version = "0.1.2" # last update: 2022-05-23 + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_DEBUG": True + } def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def start_requests(self): url = self.baseUrl + "worksheet?groupSlug=entdecken&pageSize=999999" - yield scrapy.Request(url=url, callback=self.parseList) + yield scrapy.Request(url=url, callback=self.parse_list) - def parseList(self, response): - data = json.loads(response.body) + def parse_list(self, response: scrapy.http.TextResponse): + data = response.json() for j in data["worksheets"]: - responseCopy = response.replace(url=self.objectUrl + j["id"]) - responseCopy.meta["item"] = j - if self.hasChanged(responseCopy): - yield self.parse(responseCopy) + response_copy = response.replace(url=self.objectUrl + j["id"]) + response_copy.meta["item"] = j + if self.hasChanged(response_copy): + yield self.parse(response_copy) - def getId(self, response): + def getId(self, response=None): return str(response.meta["item"]["id"]) - def getHash(self, response): + def getHash(self, response=None): return response.meta["item"]["updatedAt"] + self.version - def parse(self, response): + def parse(self, response, **kwargs): return LomBase.parse(self, response) - def getBase(self, response): + def getBase(self, response=None): base = LomBase.getBase(self, response) base.add_value("lastModified", response.meta["item"]["updatedAt"]) base.add_value( @@ -60,15 +65,16 @@ def getValuespaces(self, response): ) valuespaces.add_value("discipline", discipline) - valuespaces.add_value("learningResourceType", "worksheet") + # valuespaces.add_value("learningResourceType", "worksheet") # remove this value when reaching crawler v0.1.3 + valuespaces.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # Arbeitsblatt return valuespaces - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) - return license + def getLicense(self, response=None): + license_loader = LomBase.getLicense(self, response) + license_loader.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) + return license_loader - def getLOMGeneral(self, response): + def getLOMGeneral(self, response=None): general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["name"]) if 'description' in response.meta["item"]: @@ -77,9 +83,7 @@ def getLOMGeneral(self, response): html = self.getUrlData(response.url)["html"] if html: data = ( - Selector(text=html) - .xpath('//ul[contains(@class,"worksheet-pages")]//text()') - .getall() + Selector(text=html).xpath('//ul[contains(@class,"worksheet-pages")]//text()').getall() ) cutoff = 4 if len(data) > cutoff: @@ -91,7 +95,7 @@ def getLOMGeneral(self, response): general.add_value("description", text) return general - def getLOMTechnical(self, response): + def getLOMTechnical(self, response=None): technical = LomBase.getLOMTechnical(self, response) technical.add_value("location", response.url) technical.add_value("format", "text/html") From 94e038156a91b8b4ba40f6ac941f574a9484c826 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 23 May 2022 18:14:36 +0200 Subject: [PATCH 082/590] fobizz_spider v0.0.2 - fix: 31 Exceptions occured on overview-pages that hold no json_ld for us -- implemented a to-be-skipped-url-list that gets checked before handing over the potential item URL to the parse_site()-method - optimize imports - a succesful crawl should yield 1620 items (as of 2022-05-23) --- converter/spiders/fobizz_spider.py | 61 ++++++++++++++++++++++++------ 1 file changed, 50 insertions(+), 11 deletions(-) diff --git a/converter/spiders/fobizz_spider.py b/converter/spiders/fobizz_spider.py index a22e2c01..9377250e 100644 --- a/converter/spiders/fobizz_spider.py +++ b/converter/spiders/fobizz_spider.py @@ -1,13 +1,16 @@ from __future__ import annotations + +from urllib import parse + import scrapy +from extruct.jsonld import JsonLdExtractor + from converter.constants import Constants -from converter.items import BaseItemLoader, LomClassificationItemLoader, LomGeneralItemloader, LomBaseItemloader, LomLifecycleItemloader, LomTechnicalItemLoader, \ - LicenseItemLoader, PermissionItemLoader, ResponseItemLoader, LomEducationalItemLoader, ValuespaceItemLoader, \ - LomLifecycleItemloader, LomClassificationItemLoader -from converter.util.sitemap import SitemapEntry, from_xml_response -from urllib import parse +from converter.items import LomGeneralItemloader, LomBaseItemloader, LomTechnicalItemLoader, \ + LicenseItemLoader, ResponseItemLoader, LomEducationalItemLoader, ValuespaceItemLoader, \ + LomLifecycleItemloader from converter.spiders.base_classes import LomBase -from extruct.jsonld import JsonLdExtractor +from converter.util.sitemap import SitemapEntry, from_xml_response jslde = JsonLdExtractor() @@ -16,6 +19,7 @@ "Handlungsfeld Gesellschaft": "Gesellschaftskunde" } + class FobizzSpider(scrapy.Spider, LomBase): """ scrapes the fobizz website. @@ -24,7 +28,41 @@ class FobizzSpider(scrapy.Spider, LomBase): start_urls = ['https://plattform.fobizz.com/sitemap'] name = 'fobizz_spider' - version = '0.0.1' + version = '0.0.2' # last update: 2022-05-23 + + overview_pages_without_a_json_ld = [ + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Religion", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Economy", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Biology", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Ethics", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Philosophie", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Geographie", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/History", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Politics", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Other", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Media", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Computer%20Science", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Sport", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Art", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Natural%20Sciences", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Technology", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Personal%20Education", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Physics", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Chemistry", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Englisch", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Unspecified", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/German", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Foreign%20Languages", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Music", + "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Math", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Other", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Lower%20Grade", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Upper%20School", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Middle%20Level", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Elementary%20School", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Special%20School", + "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Vocational%20School" + ] def getId(self, response: scrapy.http.Response = None) -> str: return parse.urlparse(response.meta["sitemap_entry"].loc).path @@ -45,6 +83,9 @@ def parse(self, response: scrapy.http.XmlResponse, **kwargs): for item in items: if not item.loc.startswith("https://plattform.fobizz.com/unterrichtsmaterialien/"): continue + if item.loc in self.overview_pages_without_a_json_ld: + # there are 31 overview-pages that don't hold a json_ld, therefore can't be parsed + continue # there are some pages in the sitemap which direct to empty pages # they contain grade_type oder subject_type in their url elif "grade_type" in item.loc: @@ -107,9 +148,9 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE lic.add_value('url', data.get("license", None)) for creator in data.get("creator", []): lic.add_value("author", creator.get("name", "")) - + base.add_value("license", lic.load_item()) - + permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) @@ -117,5 +158,3 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE response_loader.add_value('url', response.url) base.add_value("response", response_loader.load_item()) yield base.load_item() - - From c28d467f1594bb75f1f3d02f85ccd2c3608f3035 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 Jun 2022 15:12:03 +0200 Subject: [PATCH 083/590] edulabs_spider v0.0.1 (squashed) - add: scrapy Contracts - code cleanup - expected items of a successful crawl: 51 --- converter/spiders/edulabs_spider.py | 267 ++++++++++++++++++++++++++++ 1 file changed, 267 insertions(+) create mode 100644 converter/spiders/edulabs_spider.py diff --git a/converter/spiders/edulabs_spider.py b/converter/spiders/edulabs_spider.py new file mode 100644 index 00000000..e9a55b94 --- /dev/null +++ b/converter/spiders/edulabs_spider.py @@ -0,0 +1,267 @@ +import datetime +import json + +import scrapy +import w3lib.html + +from converter.constants import Constants +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ + LicenseItemLoader +from converter.spiders.base_classes import LomBase + + +class EdulabsSpider(scrapy.Spider, LomBase): + name = "edulabs_spider" + start_urls = ["https://edulabs.de/oer/"] + friendlyName = "edulabs" + version = "0.0.1" + allowed_domains = ["edulabs.de"] + + MAPPING_DISCIPLINES = { + "Erdkunde/Geografie": "Erdkunde", + "Mathe": "Mathematik", + "Sachkunde": "Sachunterricht", + "SoWi": "Social education", + } + MAPPING_EDUCATIONAL_CONTEXT = { + "KITA/VORSCHULE": "Elementarbereich", + "GRUNDSTUFE (1-3)": "Primarstufe", + "SEKUNDARSTUFE 1": "Sekundarstufe 1", + "SEKUNDARSTUFE 2": "Sekundarstufe 2" + } + + def start_requests(self) -> scrapy.Request: + for start_url in self.start_urls: + yield scrapy.Request(url=start_url, callback=self.parse_overview) + + def parse_overview(self, response: scrapy.http.Response) -> scrapy.Request: + """ + + :param response: scrapy.http.Response + :return: scrapy.Request + + Scrapy Contracts: + @url https://edulabs.de/oer/ + @returns requests 48 + """ + url_list = response.xpath('//a[@class="teaser-inner"]/@href').getall() + for url in url_list: + # ToDo: "/blog/"-entries have a different structure than materials + yield response.follow(url=url, callback=self.parse) + + def getId(self, response=None) -> str: + pass + + def getHash(self, response=None) -> str: + pass + + def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + """ + + Scrapy Contracts: + @url https://edulabs.de/oer/30utp/ + @returns item 1 + """ + disciplines = list() + educational_context = list() + keywords = list() + typical_learning_time = list() + digital_competencies: list = response.xpath('//h4[@class="filter-items-headline"][contains(text(),"DIGITALE ' + 'KOMPETENZEN")]/following-sibling::div/div/text()').getall() + + target_age_group: list = response.xpath('//div[@class="col-xs-24 col-md-12 edusprint-zielgruppe ' + 'edusprint-grey filter-items"]/div/a/span/text()').getall() + if target_age_group: + # mapping "Zielgruppe" from edulabs to educational_context + for potential_educontext in target_age_group: + if potential_educontext.startswith("GRUNDSTUFE"): + keywords.append(potential_educontext) + if potential_educontext in self.MAPPING_EDUCATIONAL_CONTEXT: + educontext_mapped = self.MAPPING_EDUCATIONAL_CONTEXT.get(potential_educontext) + educational_context.append(educontext_mapped) + + school_subject_groups: list = response.xpath('//h4[@class="filter-items-headline"][contains(text(),' + '"FÄCHERGRUPPEN")]/following-sibling::div/div/text()').getall() + if school_subject_groups: + # school subject groups are different from the individual disciplines, but still useful information that we + # can use as additional keywords + keywords.extend(school_subject_groups) + + school_subjects: list = response.xpath('//h4[@class="filter-items-headline"][contains(text(),' + '"FACH")]/following-sibling::div/div/text()').getall() + if school_subjects: + for school_subject in school_subjects: + if school_subject in self.MAPPING_DISCIPLINES: + if "SoWi" in school_subject: + keywords.append(school_subject) + subject_temp = self.MAPPING_DISCIPLINES.get(school_subject) + disciplines.append(subject_temp) + else: + disciplines.append(school_subject) + if disciplines: + pass + + time_required: list = response.xpath('//h4[@class="filter-items-headline"][contains(text(),' + '"ZEITBEDARF")]/following-sibling::div/a/span/text()').getall() + if time_required: + # there can be up to three "time required"-values per crawled item + # our pipeline expects only one value (in seconds), though, which is why we need to prioritize which value + # should actually be saved as typical_learning_time: the longest or shortest duration? + for time_item in time_required: + if "DOPPELSTUNDE" in time_item: + time_string = str(datetime.timedelta(minutes=90)) + typical_learning_time.append(time_string) + if "45 MINUTEN" in time_item: + time_string = str(datetime.timedelta(minutes=45)) + typical_learning_time.append(time_string) + if "ÜBUNG" in time_item: + # exercises are typically < 20 minutes + time_string = str(datetime.timedelta(minutes=20)) + typical_learning_time.append(time_string) + if typical_learning_time: + typical_learning_time.sort() + # we're using the longest duration of all available learning times + typical_learning_time = typical_learning_time.pop() + + json_ld = str() + if "/mkifa/" in response.url or "/tnh8i/" in response.url or "/p78kq/" in response.url: + # there are exactly 3 materials that have malformed "json+ld"-containers which would cause errors + # we skip trying to parse these containers and use fallback metadata instead + pass + else: + json_ld: str = response.xpath('//script[@type="application/ld+json"]/text()').get() + json_ld: dict = json.loads(json_ld) + + type_str: str = response.xpath('//head/meta[@property="og:type"]/@content').get() + date_published: str = response.xpath('//head/meta[@property="article:published_time"]/@content').get() + + language: str = response.xpath('//head/meta[@property="og:locale"]/@content').get() + + # building our BaseItem by filling up the BaseItemLoader starts here: + base: BaseItemLoader = BaseItemLoader() + + base.add_value('sourceId', response.url) + hash_temp: str = f"{date_published}v{self.version}" + base.add_value('hash', hash_temp) + if "dateModified" in json_ld: + date_modified: str = json_ld.get("dateModified") + if date_modified: + base.add_value('lastModified', date_modified) + if type_str: + base.add_value('type', type_str) + else: + base.add_value('type', Constants.TYPE_MATERIAL) + lom: LomBaseItemloader = LomBaseItemloader() + + general = LomGeneralItemloader() + general.add_value('identifier', response.url) + title: str = response.xpath('//head/meta[@property="og:title"]/@content').get() + if title: + general.add_value('title', title) + if keywords: + general.add_value('keyword', keywords) + description: str = response.xpath('//head/meta[@property="og:description"]/@content').get() + if description: + general.add_value('description', description) + if language: + general.add_value('language', language) + lom.add_value('general', general.load_item()) + + technical: LomTechnicalItemLoader = LomTechnicalItemLoader() + technical.add_value('format', 'text/html') + technical.add_value('location', response.url) + media_required: list = response.xpath('//h4[@class="filter-items-headline"][contains(text(),' + '"MEDIENEINSATZ")]/following-sibling::div/a/span/text()').getall() + if media_required: + technical.add_value('requirement', str(media_required)) + # noinspection DuplicatedCode + lom.add_value('technical', technical.load_item()) + + lifecycle: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle.add_value('role', 'publisher') # supported roles: "author" / "editor" / "publisher" + author_edulabs: str = response.xpath('//head/meta[@name="author"]/@content').get() + if author_edulabs: + lifecycle.add_value('organization', author_edulabs) + if date_published: + lifecycle.add_value('date', date_published) + # noinspection DuplicatedCode + lom.add_value('lifecycle', lifecycle.load_item()) + + educational = LomEducationalItemLoader() + if language: + educational.add_value('language', language) + if typical_learning_time: + educational.add_value('typicalLearningTime', typical_learning_time) + lom.add_value('educational', educational.load_item()) + + classification: LomClassificationItemLoader = LomClassificationItemLoader() + if digital_competencies: + classification.add_value('description', str(digital_competencies)) + lom.add_value('classification', classification.load_item()) + + # once you've filled "general", "technical", "lifecycle" and "educational" with values, + # the LomBaseItem is loaded into the "base"-BaseItemLoader + base.add_value('lom', lom.load_item()) + + vs = ValuespaceItemLoader() + vs.add_value('conditionsOfAccess', 'no login') + vs.add_value('containsAdvertisement', 'No') + vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') + if disciplines: + vs.add_value('discipline', disciplines) + if educational_context: + vs.add_value('educationalContext', educational_context) + vs.add_value('intendedEndUserRole', 'teacher') + vs.add_value('new_lrt', 'd8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9') # Webseite und Portal (stabil) + # by default, every item is considered to be a web-page; some materials have additional identifiers that can be + # used for the 'new_lrt'-value + if "UNTERRICHTSREIHE" in time_required: + vs.add_value('new_lrt', '962560fe-d8d0-43e2-ad60-97f070b935c6') # Unterrichtsreihe + if "/blog/" in response.url: + vs.add_value('new_lrt', ['5204fc81-5dac-4cc4-a28b-aad5c241fa19', 'b98c0c8c-5696-4537-82fa-dded7236081e']) + # "Webblog (dynamisch)", "Artikel" + vs.add_value('price', 'no') + base.add_value('valuespaces', vs.load_item()) + + license_loader: LicenseItemLoader = LicenseItemLoader() + if json_ld: + if "author" in json_ld.keys(): + author_name = json_ld.get("author") + if author_name: + license_loader.add_value('author', author_name) + else: + author_name_fallback = response.xpath('//div[@class="edusprint-author-row"]' + '//*[@class="author-name"]/text()').getall() + license_loader.add_value('author', author_name_fallback) + license_default = Constants.LICENSE_CC_BY_SA_40 + license_url: str = response.xpath('//a[@rel="license"]/@href').get() + if license_url: + if license_url.startswith('http://'): + license_url = license_url.replace('http://', "https://") + license_loader.add_value('url', license_url) + if license_url.endswith("/cc0/"): + license_loader.add_value('internal', Constants.LICENSE_CC_ZERO_10) + license_loader.replace_value('url', Constants.LICENSE_CC_ZERO_10) + else: + # edulabs.de footer: "Inhalte dieser Webseite sind, sofern nicht anders angegeben, nach Creative Commons 4.0 + # Attribution lizenziert." - we're using this string as a license description fallback + license_loader.add_value('url', license_default) + license_description_clean: list = list() + license_description: list = response.xpath('//li[@class="cc-info"]//text()').getall() + if license_description: + for temp_str in license_description: + temp = w3lib.html.strip_html5_whitespace(temp_str) + license_description_clean.append(temp) + license_description_clean: str = str(license_description) + license_loader.add_value('description', license_description_clean) + # noinspection DuplicatedCode + base.add_value('license', license_loader.load_item()) + + permissions = super().getPermissions(response) + base.add_value('permissions', permissions.load_item()) + + response_loader = super().mapResponse(response) + base.add_value('response', response_loader.load_item()) + + yield base.load_item() From a69cd94680f862d8564c7aba4978ad95a5a6ff1f Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 21 Jun 2022 17:16:20 +0200 Subject: [PATCH 084/590] fix:cleanup/move the old "type" into the new_lrt structure --- converter/constants.py | 6 ++---- converter/es_connector.py | 1 - converter/items.py | 1 - .../base_classes/lernprogramme_spider_base.py | 2 +- converter/spiders/base_classes/lom_base.py | 9 +++++---- converter/spiders/biologie_lernprogramme_spider.py | 4 ++-- converter/spiders/chemie_lernprogramme_spider.py | 4 ++-- converter/spiders/digitallearninglab_spider.py | 12 ++++++------ converter/spiders/dilertube_spider.py | 2 -- converter/spiders/fobizz_spider.py | 4 ++-- converter/spiders/ginkgomaps_spider.py | 3 +-- converter/spiders/grundschulkoenig_spider.py | 1 - converter/spiders/kindoergarten_spider.py | 1 - converter/spiders/kmap_spider.py | 4 ++-- converter/spiders/materialnetzwerk_spider.py | 1 - converter/spiders/niedersachsen_abi_spider.py | 2 -- converter/spiders/quizdidaktik_spider.py | 4 ++-- converter/spiders/rpi_virtuell_spider.py | 3 ++- converter/spiders/sample_spider_alternative.py | 4 +--- converter/spiders/umwelt_im_unterricht_spider.py | 1 - converter/spiders/wirlernenonline_spider.py | 8 ++++---- converter/spiders/zum_dwu_spider.py | 3 +-- converter/spiders/zum_mathe_apps_spider.py | 3 +-- converter/spiders/zum_physik_apps_spider.py | 3 +-- 24 files changed, 35 insertions(+), 51 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index 626ee57f..1e9eea5e 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -40,10 +40,8 @@ class Constants: LICENSE_CUSTOM = "CUSTOM" # Custom License, use the license description field for arbitrary values LICENSE_NONPUBLIC = "NONPUBLIC" - TYPE_MATERIAL = "MATERIAL" - TYPE_TOOL = "TOOL" - TYPE_SOURCE = "SOURCE" - TYPE_LESSONPLANNING = "LESSONPLANNING" + NEW_LRT_MATERIAL = "http://w3id.org/openeduhub/vocabs/new_lrt/1846d876-d8fd-476a-b540-b8ffd713fedb" + NEW_LRT_TOOL = "http://w3id.org/openeduhub/vocabs/new_lrt/cefccf75-cba3-427d-9a0f-35b4fedcbba1" SOURCE_TYPE_SPIDER = 1 SOURCE_TYPE_EDITORIAL = 2 diff --git a/converter/es_connector.py b/converter/es_connector.py index 7e278bd1..94b7dca8 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -261,7 +261,6 @@ def transformItem(self, uuid, spider, item): "ccm:replicationsource": spider.name, "ccm:replicationsourceid": item["sourceId"], "ccm:replicationsourcehash": item["hash"], - "ccm:objecttype": item["type"], "ccm:replicationsourceuuid": uuid, "cm:name": item["lom"]["general"]["title"], "ccm:wwwurl": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, diff --git a/converter/items.py b/converter/items.py index c0b306d7..60764e7a 100644 --- a/converter/items.py +++ b/converter/items.py @@ -175,7 +175,6 @@ class BaseItem(Item): hash = Field() collection = Field(output_processor=JoinMultivalues()) "id of collections this entry should be placed into" - type = Field() origin = Field() "in case it was fetched from a referatorium, the real origin name may be included here" response = Field(serializer=ResponseItem) diff --git a/converter/spiders/base_classes/lernprogramme_spider_base.py b/converter/spiders/base_classes/lernprogramme_spider_base.py index b58e28eb..514e672b 100644 --- a/converter/spiders/base_classes/lernprogramme_spider_base.py +++ b/converter/spiders/base_classes/lernprogramme_spider_base.py @@ -130,7 +130,6 @@ def getHash(self, response: Response) -> str: @overrides # LomBase def getBase(self, response: Response) -> items.BaseItemLoader: base = LomBase.getBase(self, response) - base.replace_value("type", self.static_values["type"]) if response.meta["row"]["thumbnail"] is not None: base.add_value("thumbnail", response.meta["row"]["thumbnail"]) return base @@ -172,6 +171,7 @@ def getLicense(self, response: Response) -> items.LicenseItemLoader: def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) skos = self.static_values["skos"] + valuespaces.replace_value("new_lrt", skos["new_lrt"]) valuespaces.add_value( "learningResourceType", skos["learningResourceType"], diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 6304f00d..c33444fa 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -124,7 +124,10 @@ def mapResponse(self, response, fetchData=True): return r def getValuespaces(self, response): - return ValuespaceItemLoader(response=response) + valuespaces = ValuespaceItemLoader(response=response) + # we assume that content is imported. Please use replace_value if you import something different + valuespaces.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) + return valuespaces def getLOM(self, response) -> LomBaseItemloader: lom = LomBaseItemloader(response=response) @@ -135,7 +138,7 @@ def getLOM(self, response) -> LomBaseItemloader: else: # support yield and generator for multiple values for contribute in lifecycle: - lom.add_value("lifecycle" ,contribute.load_item()) + lom.add_value("lifecycle", contribute.load_item()) lom.add_value("technical", self.getLOMTechnical(response).load_item()) lom.add_value("educational", self.getLOMEducational(response).load_item()) lom.add_value("classification", self.getLOMClassification(response).load_item()) @@ -145,8 +148,6 @@ def getBase(self, response=None) -> BaseItemLoader: base = BaseItemLoader() base.add_value("sourceId", self.getId(response)) base.add_value("hash", self.getHash(response)) - # we assume that content is imported. Please use replace_value if you import something different - base.add_value("type", Constants.TYPE_MATERIAL) return base def getLOMGeneral(self, response=None) -> LomGeneralItemloader: diff --git a/converter/spiders/biologie_lernprogramme_spider.py b/converter/spiders/biologie_lernprogramme_spider.py index 6e94e773..245f3cb6 100644 --- a/converter/spiders/biologie_lernprogramme_spider.py +++ b/converter/spiders/biologie_lernprogramme_spider.py @@ -21,11 +21,11 @@ class BiologieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): "first_name": "Joachim", "last_name": "Jakob", }, - "type": Constants.TYPE_TOOL, "format": "text/html", "language": "de", "licence_url": "https://creativecommons.org/licenses/by/4.0/legalcode", "skos": { + "new_lrt": Constants.NEW_LRT_MATERIAL, "learningResourceType": [ "http://w3id.org/openeduhub/vocabs/learningResourceType/application", "http://w3id.org/openeduhub/vocabs/learningResourceType/web_page", @@ -51,9 +51,9 @@ class BiologieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): exercises = { "static_value_overrides": { - "type": Constants.TYPE_MATERIAL, "format": "application/pdf", "skos": { + "new_lrt": Constants.NEW_LRT_MATERIAL, "learningResourceType": [ "http://w3id.org/openeduhub/vocabs/learningResourceType/drill_and_practice" ], diff --git a/converter/spiders/chemie_lernprogramme_spider.py b/converter/spiders/chemie_lernprogramme_spider.py index 12e39118..53ad6110 100644 --- a/converter/spiders/chemie_lernprogramme_spider.py +++ b/converter/spiders/chemie_lernprogramme_spider.py @@ -21,11 +21,11 @@ class ChemieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): "first_name": "Joachim", "last_name": "Jakob", }, - "type": Constants.TYPE_TOOL, "format": "text/html", "language": "de", "licence_url": "https://creativecommons.org/licenses/by/4.0/legalcode", "skos": { + "new_lrt": Constants.NEW_LRT_TOOL, "learningResourceType": [ "http://w3id.org/openeduhub/vocabs/learningResourceType/application", "http://w3id.org/openeduhub/vocabs/learningResourceType/web_page", @@ -51,9 +51,9 @@ class ChemieLernprogrammeSpider(LernprogrammeSpiderBase, CrawlSpider): exercises = { "static_value_overrides": { - "type": Constants.TYPE_MATERIAL, "format": "application/pdf", "skos": { + "new_lrt": Constants.NEW_LRT_MATERIAL, "learningResourceType": [ "http://w3id.org/openeduhub/vocabs/learningResourceType/drill_and_practice" ], diff --git a/converter/spiders/digitallearninglab_spider.py b/converter/spiders/digitallearninglab_spider.py index 470b73d3..9da66419 100644 --- a/converter/spiders/digitallearninglab_spider.py +++ b/converter/spiders/digitallearninglab_spider.py @@ -77,11 +77,11 @@ def handle_entry(self, response): return LrmiBase.parse(self, response) @staticmethod - def get_type(response): + def get_new_lrt(response): if response.meta["type"] == "tools": - return Constants.TYPE_TOOL + return Constants.NEW_LRT_TOOL else: - return Constants.TYPE_MATERIAL + return Constants.NEW_LRT_MATERIAL # thumbnail is always the same, do not use the one from rss def getBase(self, response): @@ -91,7 +91,6 @@ def getBase(self, response): "thumbnail", response.xpath('//img[@class="content-info__image"]/@src').get(), ) - base.replace_value("type", self.get_type(response)) return base def getLOMGeneral(self, response): @@ -117,6 +116,7 @@ def getLicense(self, response): def getValuespaces(self, response): valuespaces = LrmiBase.getValuespaces(self, response) + valuespaces.replace_value('new_lrt', self.get_new_lrt(response)) try: range = ( response.xpath( @@ -142,7 +142,7 @@ def getValuespaces(self, response): except: pass lrt = response.meta["item"].get("type") - valuespaces.add_value("learningResourceType", lrt) + valuespaces.add_value("new_lrt", lrt) try: tool_type = list( map( @@ -153,7 +153,7 @@ def getValuespaces(self, response): ) ) # @TODO: proper mapping, maybe specialised tool field? - valuespaces.add_value("learningResourceType", tool_type) + valuespaces.add_value("new_lrt", tool_type) except: pass return valuespaces diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index 8015578e..3e426ff9 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -226,7 +226,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: last_modified = published_date # while this is not strictly the last_modified date, it is the only date we can gather from the OOM base.add_value('lastModified', last_modified) - base.add_value('type', Constants.TYPE_MATERIAL) # thumbnail_url: str = response.xpath('//meta[@property="og:image"]/@content').get() # ToDo: DiLerTube provides thumbnails, but they are locked behind an error 423 when directly accessing the link # if thumbnail_url is not None: @@ -318,7 +317,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # see https://www.dilertube.de/datenschutz.html vs.add_value('price', "no") base.add_value('valuespaces', vs.load_item()) - lic = LicenseItemLoader() if "license_description" in video_info_dict.keys(): # DiLerTube allows the uploaders to enter freeform text into the license field diff --git a/converter/spiders/fobizz_spider.py b/converter/spiders/fobizz_spider.py index 9377250e..14fde645 100644 --- a/converter/spiders/fobizz_spider.py +++ b/converter/spiders/fobizz_spider.py @@ -102,7 +102,6 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE base = super().getBase(response=response) base.add_value("response", super().mapResponse(response).load_item()) # we assume that content is imported. Please use replace_value if you import something different - base.add_value("type", Constants.TYPE_MATERIAL) base.add_value('thumbnail', data.get("thumbnailUrl", None)) base.add_value('lastModified', data.get("dateModified", None)) for publisher in data.get("publisher", []): @@ -132,6 +131,7 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) for audience in data.get("audience", []): vs.add_value("intendedEndUserRole", audience) @@ -141,7 +141,7 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE vs.add_value('discipline', discipline) for lrt in data.get("type", []): - vs.add_value('learningResourceType', lrt) + vs.add_value('new_lrt', lrt) base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() diff --git a/converter/spiders/ginkgomaps_spider.py b/converter/spiders/ginkgomaps_spider.py index e55e0fd8..e58e0a63 100644 --- a/converter/spiders/ginkgomaps_spider.py +++ b/converter/spiders/ginkgomaps_spider.py @@ -238,7 +238,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): last_modified = response.xpath('/html/head/meta[6]/@content').get() hash_temp = last_modified + self.version base.add_value('hash', hash_temp) - base.add_value('type', Constants.TYPE_MATERIAL) if first_thumbnail is not None: base.add_value('thumbnail', first_thumbnail) base.add_value('lastModified', last_modified) @@ -284,11 +283,11 @@ def parse(self, response: scrapy.http.Response, **kwargs): # "Sekundarstufe II", # "Berufliche Bildung", # "Erwachsenenbildung"]) + vs.add_value('new_lrt', [Constants.NEW_LRT_MATERIAL, 'b6ceade0-58d3-4179-af71-d53ebc6e49d4']) # karte vs.add_value('intendedEndUserRole', ["learner", "teacher", "parent"]) vs.add_value('discipline', 'Geografie') # Geografie - vs.add_value('learningResourceType', 'map') # Karte vs.add_value('conditionsOfAccess', 'no login') lic = LicenseItemLoader() diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index 341209d0..be603ac1 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -110,7 +110,6 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry base.add_value("sourceId", response.url) hash_temp = str(sitemap_entry.lastmod + self.version) base.add_value("hash", hash_temp) - base.add_value("type", Constants.TYPE_MATERIAL) thumbnail_url = response.xpath('//meta[@property="og:image"]/@content').get() if thumbnail_url is not None: base.add_value('thumbnail', thumbnail_url) diff --git a/converter/spiders/kindoergarten_spider.py b/converter/spiders/kindoergarten_spider.py index a6e76535..bad34cc8 100644 --- a/converter/spiders/kindoergarten_spider.py +++ b/converter/spiders/kindoergarten_spider.py @@ -84,7 +84,6 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE base = super().getBase(response=response) base.add_value("response", super().mapResponse(response).load_item()) # we assume that content is imported. Please use replace_value if you import something different - base.add_value("type", Constants.TYPE_MATERIAL) # thumbnail_href = response.css('.post-thumbnail img::attr(src)').get() base.add_value('thumbnail', response.css('.post-thumbnail img::attr(src)').get()) base.add_value('lastModified', sitemap_entry.lastmod) diff --git a/converter/spiders/kmap_spider.py b/converter/spiders/kmap_spider.py index 2aaca434..46cdcd5c 100644 --- a/converter/spiders/kmap_spider.py +++ b/converter/spiders/kmap_spider.py @@ -75,7 +75,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: hash_temp += self.version base.add_value('hash', hash_temp) base.add_value('lastModified', last_modified) - base.add_value('type', Constants.TYPE_MATERIAL) # Thumbnails have their own url path, which can be found in the json+ld: # "thumbnailUrl": "/snappy/Physik/Grundlagen/Potenzschreibweise" # e.g. for the item https://kmap.eu/app/browser/Physik/Grundlagen/Potenzschreibweise @@ -115,9 +114,10 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) vs.add_value('discipline', json_ld.get("mainEntity").get("about")) vs.add_value('intendedEndUserRole', json_ld.get("mainEntity").get("audience")) - vs.add_value('learningResourceType', json_ld.get("mainEntity").get("learningResourceType")) + vs.add_value('new_lrt', json_ld.get("mainEntity").get("learningResourceType")) vs.add_value('price', 'no') vs.add_value('conditionsOfAccess', 'login required for additional features') base.add_value('valuespaces', vs.load_item()) diff --git a/converter/spiders/materialnetzwerk_spider.py b/converter/spiders/materialnetzwerk_spider.py index 049c2fe1..a7ed1956 100644 --- a/converter/spiders/materialnetzwerk_spider.py +++ b/converter/spiders/materialnetzwerk_spider.py @@ -225,7 +225,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): bundle_thumbnail = kwargs.get('bundle_thumbnail') if bundle_thumbnail is not None: base.add_value('thumbnail', bundle_thumbnail) - base.add_value('type', Constants.TYPE_MATERIAL) base.add_value('lastModified', date_published) lom = LomBaseItemloader() diff --git a/converter/spiders/niedersachsen_abi_spider.py b/converter/spiders/niedersachsen_abi_spider.py index 79459910..22cd492c 100644 --- a/converter/spiders/niedersachsen_abi_spider.py +++ b/converter/spiders/niedersachsen_abi_spider.py @@ -101,7 +101,6 @@ def parse(self, response, **kwargs): base.add_value('sourceId', pdf_item) hash_temp = str(f"{datetime.now().isoformat()}{self.version}") base.add_value('hash', hash_temp) - base.add_value('type', Constants.TYPE_MATERIAL) base.add_value('binary', self.get_binary(current_dict, pdf_item)) lom = LomBaseItemloader() @@ -161,7 +160,6 @@ def parse(self, response, **kwargs): base.add_value('sourceId', pdf_item) hash_temp = str(f"{datetime.now().isoformat()}{self.version}") base.add_value('hash', hash_temp) - base.add_value('type', Constants.TYPE_MATERIAL) base.add_value('binary', self.get_binary(current_dict, pdf_item)) lom = LomBaseItemloader() diff --git a/converter/spiders/quizdidaktik_spider.py b/converter/spiders/quizdidaktik_spider.py index ebc9cc29..919f1233 100644 --- a/converter/spiders/quizdidaktik_spider.py +++ b/converter/spiders/quizdidaktik_spider.py @@ -2,7 +2,7 @@ from scrapy.spiders import CrawlSpider from converter.constants import Constants -from .base_classes import LernprogrammeSpiderBase +from converter.spiders.base_classes import LernprogrammeSpiderBase class QuizdidaktikSpider(LernprogrammeSpiderBase, CrawlSpider): @@ -16,11 +16,11 @@ class QuizdidaktikSpider(LernprogrammeSpiderBase, CrawlSpider): "first_name": "Joachim", "last_name": "Jakob", }, - "type": Constants.TYPE_TOOL, "format": "text/html", "language": "de", "licence_url": "https://creativecommons.org/licenses/by/4.0/legalcode", "skos": { + "new_lrt": Constants.NEW_LRT_TOOL, "learningResourceType": [ "http://w3id.org/openeduhub/vocabs/learningResourceType/application", "http://w3id.org/openeduhub/vocabs/learningResourceType/web_page", diff --git a/converter/spiders/rpi_virtuell_spider.py b/converter/spiders/rpi_virtuell_spider.py index 1bd9f442..ff4788a6 100644 --- a/converter/spiders/rpi_virtuell_spider.py +++ b/converter/spiders/rpi_virtuell_spider.py @@ -273,7 +273,7 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) # base.add_value("response", super().mapResponse(response).load_item()) - base.add_value("type", Constants.TYPE_MATERIAL) + # base.add_value("type", Constants.TYPE_MATERIAL) base.add_value("thumbnail", wp_json_item.get("material_screenshot")) # base.add_value("lastModified", wp_json_item.get("date")) # is "date" from wp_json for lastModified correct? base.add_value("lastModified", date_modified) # or is this one better (grabbed from material_review_url)? @@ -330,6 +330,7 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) vs.add_value("discipline", "http://w3id.org/openeduhub/vocabs/discipline/520") # Religion # mapping educationalContext educational_context = list() diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 58beca53..c03a7b3e 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -60,9 +60,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: base.add_value('hash', hash_temp) last_modified = None base.add_value('lastModified', last_modified) - # sometimes you might get a "type"-value from the JSON_LD. If it's not supplied by the website you're crawling, - # you might need to use a constant: - base.add_value('type', Constants.TYPE_MATERIAL) thumbnail_url: str = "This string should hold the thumbnail URL" base.add_value('thumbnail', thumbnail_url) @@ -180,6 +177,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/fskRating.ttl) # - oer optional # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/oer.ttl) + vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) base.add_value('valuespaces', vs.load_item()) lic = LicenseItemLoader() diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index eaef7be3..8c8768c7 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -128,7 +128,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): hash_temp = str(date_cleaned_up + self.version) base.add_value('hash', hash_temp) base.add_value('lastModified', date_cleaned_up) - base.add_value('type', Constants.TYPE_MATERIAL) # base.add_value('thumbnail', thumbnail_url) lom = LomBaseItemloader() diff --git a/converter/spiders/wirlernenonline_spider.py b/converter/spiders/wirlernenonline_spider.py index aef601bf..6931c33e 100644 --- a/converter/spiders/wirlernenonline_spider.py +++ b/converter/spiders/wirlernenonline_spider.py @@ -82,7 +82,7 @@ def start_requests(self): yield self.startRequest("edutool") def parseRequest(self, response): - results = json.loads(response.body_as_unicode()) + results = json.loads(response.body) if results: for item in results: copyResponse = response.copy() @@ -96,9 +96,9 @@ def handleEntry(self, response): def getType(self, response): if response.meta["type"] == "edusource": - return Constants.TYPE_SOURCE + return Constants.NEW_LRT_MATERIAL elif response.meta["type"] == "edutool": - return Constants.TYPE_TOOL + return Constants.NEW_LRT_TOOL return None # thumbnail is always the same, do not use the one from rss @@ -107,7 +107,6 @@ def getBase(self, response): base.replace_value( "thumbnail", self.get("acf.thumbnail.url", json=response.meta["item"]) ) - base.replace_value("type", self.getType(response)) fulltext = self.get("acf.long_text", json=response.meta["item"]) base.replace_value("fulltext", html.unescape(fulltext)) try: @@ -161,6 +160,7 @@ def getLicense(self, response): def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) + valuespaces.replace_value("new_lrt", self.getType(response)) discipline = list( map( lambda x: x["value"], diff --git a/converter/spiders/zum_dwu_spider.py b/converter/spiders/zum_dwu_spider.py index bef89a4f..0955253c 100644 --- a/converter/spiders/zum_dwu_spider.py +++ b/converter/spiders/zum_dwu_spider.py @@ -104,7 +104,6 @@ def parse_topic_overview(self, response: scrapy.http.Response): def parse(self, response: scrapy.http.Response, **kwargs): base = super().getBase(response=response) # there are no suitable images to serve as thumbnails, therefore SPLASH will have to do - base.add_value('type', Constants.TYPE_MATERIAL) lom = LomBaseItemloader() general = LomGeneralItemloader(response=response) @@ -208,6 +207,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) # since the website holds both mathematics- and physics-related materials, we need to take a look at the last # section of the url: .htm filenames that start with # m | hpm | tkm belong to the discipline mathematics @@ -219,7 +219,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): if url_last_part.startswith("p") or url_last_part.startswith("kwp") or url_last_part.startswith("hpp") \ or url_last_part.startswith("vcp"): vs.add_value('discipline', "Physics") - vs.add_value('learningResourceType', Constants.TYPE_MATERIAL) vs.add_value('intendedEndUserRole', ['learner', 'teacher', 'parent', diff --git a/converter/spiders/zum_mathe_apps_spider.py b/converter/spiders/zum_mathe_apps_spider.py index 92d1f44b..9d4657a2 100644 --- a/converter/spiders/zum_mathe_apps_spider.py +++ b/converter/spiders/zum_mathe_apps_spider.py @@ -109,7 +109,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): last_modified = dateparser.parse(item2) base = super().getBase(response=response) - base.add_value('type', Constants.TYPE_MATERIAL) if last_modified is not None: hash_temp = last_modified.isoformat() + self.version base.add_value('hash', hash_temp) @@ -149,10 +148,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value('new_lrt', Constants.NEW_LRT_TOOL) vs.add_value('conditionsOfAccess', 'no login') vs.add_value('discipline', 'Mathematik') vs.add_value('intendedEndUserRole', ['learner', 'teacher', 'parent']) - vs.add_value('learningResourceType', ['application', 'web page']) vs.add_value('price', 'no') base.add_value('valuespaces', vs.load_item()) diff --git a/converter/spiders/zum_physik_apps_spider.py b/converter/spiders/zum_physik_apps_spider.py index e6d10fef..efa8dd72 100644 --- a/converter/spiders/zum_physik_apps_spider.py +++ b/converter/spiders/zum_physik_apps_spider.py @@ -89,7 +89,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): last_modified = dateparser.parse(item2) base = super().getBase(response=response) - base.add_value('type', Constants.TYPE_MATERIAL) if last_modified is not None: hash_temp = last_modified.isoformat() + self.version base.add_value('hash', hash_temp) @@ -129,10 +128,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value('new_lrt', Constants.NEW_LRT_TOOL) vs.add_value('conditionsOfAccess', 'no login') vs.add_value('discipline', 'Physik') vs.add_value('intendedEndUserRole', ['learner', 'teacher', 'parent']) - vs.add_value('learningResourceType', ['application', 'web page']) vs.add_value('price', 'no') base.add_value('valuespaces', vs.load_item()) From 8f97b7731a80faee8e644a61411baaa3cae5a832 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 21 Jun 2022 17:23:28 +0200 Subject: [PATCH 085/590] fix:edulabs remove old type --- converter/spiders/edulabs_spider.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/converter/spiders/edulabs_spider.py b/converter/spiders/edulabs_spider.py index e9a55b94..5c1d336f 100644 --- a/converter/spiders/edulabs_spider.py +++ b/converter/spiders/edulabs_spider.py @@ -148,10 +148,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: date_modified: str = json_ld.get("dateModified") if date_modified: base.add_value('lastModified', date_modified) - if type_str: - base.add_value('type', type_str) - else: - base.add_value('type', Constants.TYPE_MATERIAL) lom: LomBaseItemloader = LomBaseItemloader() general = LomGeneralItemloader() From 5421c35397aa0a24bb0881e4e6b38c17d77239eb Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 21 Jun 2022 17:23:43 +0200 Subject: [PATCH 086/590] fix:irights new_lrt type --- converter/spiders/irights_spider.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/converter/spiders/irights_spider.py b/converter/spiders/irights_spider.py index 9c8da06d..1aed6867 100644 --- a/converter/spiders/irights_spider.py +++ b/converter/spiders/irights_spider.py @@ -38,6 +38,4 @@ def getValuespaces(self, response): valuespaces.add_value("educationalContext", "erwachsenenbildung") valuespaces.add_value("discipline", "700") # Wirtschaftskunde valuespaces.add_value("discipline", "48005") # Gesellschaftskunde - # ToDo: confirm new_lrt values - valuespaces.add_value("new_lrt", "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9") # "Webseite und Portal (stabil)" return valuespaces From d15e9bd94805f5e00b4ae06633bcc4dfcd652ada Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 21 Jun 2022 17:35:41 +0200 Subject: [PATCH 087/590] fix:serlo new_lrt type --- converter/spiders/serlo_spider.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 744e9a8c..d80ad75f 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -4,6 +4,7 @@ import scrapy from scrapy.spiders import CrawlSpider +from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader from converter.spiders.base_classes import LomBase @@ -124,7 +125,6 @@ def parse(self, response, **kwargs): base.add_value('hash', hash_temp) base.add_value('lastModified', graphql_json["dateModified"]) type_list: list = graphql_json["type"] - base.add_value('type', type_list) # thumbnail_url: str = "This string should hold the thumbnail URL" # base.add_value('thumbnail', thumbnail_url) if "publisher" in json_ld: @@ -236,6 +236,8 @@ def parse(self, response, **kwargs): base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() + vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) + vs.add_value('new_lrt', type_list) # # for possible values, either consult https://vocabs.openeduhub.de # # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs # # TODO: fill "valuespaces"-keys with values for From e014638ff11104a100fb0e5ae12db6346960b0c4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 21 Jun 2022 18:55:59 +0200 Subject: [PATCH 088/590] fix: mediawiki_base location.url - URLs to individual items were sometimes malformed due to string-concatenation without url-encoding --- converter/spiders/base_classes/mediawiki_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index ea0d9baf..2866cbe0 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -2,6 +2,7 @@ import json import logging +import urllib.parse from pathlib import Path from urllib import parse @@ -239,7 +240,7 @@ def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: loader.replace_value('format', 'text/html') data = response.meta['item'] title = jmes_title.search(data) - loader.replace_value('location', f'{self.url}wiki/{title}') + loader.replace_value('location', f'{self.url}wiki/{urllib.parse.quote(title)}') return loader def getValuespaces(self, response): From 079a7b128c37a8070edbc4017393c10877acfbfc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 17 Jun 2022 16:29:36 +0200 Subject: [PATCH 089/590] add: pyCharm run configurations - add: /logs/-folder at project root (where all saved .json and .log files will be stored) --- .run/biologie_lernprogramme_spider.run.xml | 25 ++++++++++++++++++++++ .run/chemie_lernprogramme_spider.run.xml | 25 ++++++++++++++++++++++ .run/digitallearninglab_spider.run.xml | 25 ++++++++++++++++++++++ .run/dilertube_spider.run.xml | 25 ++++++++++++++++++++++ .run/edulabs_spider.run.xml | 25 ++++++++++++++++++++++ .run/fobizz_spider.run.xml | 25 ++++++++++++++++++++++ .run/ginkgomaps_spider.run.xml | 25 ++++++++++++++++++++++ .run/grundschulkoenig_spider.run.xml | 25 ++++++++++++++++++++++ .run/kmap_spider.run.xml | 25 ++++++++++++++++++++++ .run/learning_apps_spider.run.xml | 25 ++++++++++++++++++++++ .run/lehreronline_spider.run.xml | 25 ++++++++++++++++++++++ .run/materialnetzwerk_spider.run.xml | 25 ++++++++++++++++++++++ .run/mediothek_pixiothek_spider.run.xml | 25 ++++++++++++++++++++++ .run/memucho_spider.run.xml | 25 ++++++++++++++++++++++ .run/planet_schule_spider.run.xml | 25 ++++++++++++++++++++++ .run/rpi_virtuell_spider.run.xml | 25 ++++++++++++++++++++++ .run/science_in_school_spider.run.xml | 25 ++++++++++++++++++++++ .run/segu_spider.run.xml | 25 ++++++++++++++++++++++ .run/tutory_spider.run.xml | 25 ++++++++++++++++++++++ .run/youtube_spider.run.xml | 25 ++++++++++++++++++++++ .run/zum_deutschlernen_spider.run.xml | 25 ++++++++++++++++++++++ .run/zum_dwu_spider.run.xml | 25 ++++++++++++++++++++++ .run/zum_klexikon_spider.run.xml | 25 ++++++++++++++++++++++ .run/zum_mathe_apps_spider.run.xml | 25 ++++++++++++++++++++++ .run/zum_physik_apps_spider.run.xml | 25 ++++++++++++++++++++++ logs/.gitignore | 3 +++ 26 files changed, 628 insertions(+) create mode 100644 .run/biologie_lernprogramme_spider.run.xml create mode 100644 .run/chemie_lernprogramme_spider.run.xml create mode 100644 .run/digitallearninglab_spider.run.xml create mode 100644 .run/dilertube_spider.run.xml create mode 100644 .run/edulabs_spider.run.xml create mode 100644 .run/fobizz_spider.run.xml create mode 100644 .run/ginkgomaps_spider.run.xml create mode 100644 .run/grundschulkoenig_spider.run.xml create mode 100644 .run/kmap_spider.run.xml create mode 100644 .run/learning_apps_spider.run.xml create mode 100644 .run/lehreronline_spider.run.xml create mode 100644 .run/materialnetzwerk_spider.run.xml create mode 100644 .run/mediothek_pixiothek_spider.run.xml create mode 100644 .run/memucho_spider.run.xml create mode 100644 .run/planet_schule_spider.run.xml create mode 100644 .run/rpi_virtuell_spider.run.xml create mode 100644 .run/science_in_school_spider.run.xml create mode 100644 .run/segu_spider.run.xml create mode 100644 .run/tutory_spider.run.xml create mode 100644 .run/youtube_spider.run.xml create mode 100644 .run/zum_deutschlernen_spider.run.xml create mode 100644 .run/zum_dwu_spider.run.xml create mode 100644 .run/zum_klexikon_spider.run.xml create mode 100644 .run/zum_mathe_apps_spider.run.xml create mode 100644 .run/zum_physik_apps_spider.run.xml create mode 100644 logs/.gitignore diff --git a/.run/biologie_lernprogramme_spider.run.xml b/.run/biologie_lernprogramme_spider.run.xml new file mode 100644 index 00000000..278383ef --- /dev/null +++ b/.run/biologie_lernprogramme_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/chemie_lernprogramme_spider.run.xml b/.run/chemie_lernprogramme_spider.run.xml new file mode 100644 index 00000000..4169fb2d --- /dev/null +++ b/.run/chemie_lernprogramme_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/digitallearninglab_spider.run.xml b/.run/digitallearninglab_spider.run.xml new file mode 100644 index 00000000..dab6f54b --- /dev/null +++ b/.run/digitallearninglab_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/dilertube_spider.run.xml b/.run/dilertube_spider.run.xml new file mode 100644 index 00000000..63bb20dc --- /dev/null +++ b/.run/dilertube_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/edulabs_spider.run.xml b/.run/edulabs_spider.run.xml new file mode 100644 index 00000000..06ae8b69 --- /dev/null +++ b/.run/edulabs_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/fobizz_spider.run.xml b/.run/fobizz_spider.run.xml new file mode 100644 index 00000000..5bcce8f2 --- /dev/null +++ b/.run/fobizz_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/ginkgomaps_spider.run.xml b/.run/ginkgomaps_spider.run.xml new file mode 100644 index 00000000..af91c626 --- /dev/null +++ b/.run/ginkgomaps_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/grundschulkoenig_spider.run.xml b/.run/grundschulkoenig_spider.run.xml new file mode 100644 index 00000000..75137aa3 --- /dev/null +++ b/.run/grundschulkoenig_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/kmap_spider.run.xml b/.run/kmap_spider.run.xml new file mode 100644 index 00000000..7f3cef51 --- /dev/null +++ b/.run/kmap_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/learning_apps_spider.run.xml b/.run/learning_apps_spider.run.xml new file mode 100644 index 00000000..a862e178 --- /dev/null +++ b/.run/learning_apps_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/lehreronline_spider.run.xml b/.run/lehreronline_spider.run.xml new file mode 100644 index 00000000..6c2e242a --- /dev/null +++ b/.run/lehreronline_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/materialnetzwerk_spider.run.xml b/.run/materialnetzwerk_spider.run.xml new file mode 100644 index 00000000..0e7dbc87 --- /dev/null +++ b/.run/materialnetzwerk_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/mediothek_pixiothek_spider.run.xml b/.run/mediothek_pixiothek_spider.run.xml new file mode 100644 index 00000000..c68b3e6c --- /dev/null +++ b/.run/mediothek_pixiothek_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/memucho_spider.run.xml b/.run/memucho_spider.run.xml new file mode 100644 index 00000000..c294c8fb --- /dev/null +++ b/.run/memucho_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/planet_schule_spider.run.xml b/.run/planet_schule_spider.run.xml new file mode 100644 index 00000000..cefc610f --- /dev/null +++ b/.run/planet_schule_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/rpi_virtuell_spider.run.xml b/.run/rpi_virtuell_spider.run.xml new file mode 100644 index 00000000..6536cbe7 --- /dev/null +++ b/.run/rpi_virtuell_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/science_in_school_spider.run.xml b/.run/science_in_school_spider.run.xml new file mode 100644 index 00000000..b90b8283 --- /dev/null +++ b/.run/science_in_school_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/segu_spider.run.xml b/.run/segu_spider.run.xml new file mode 100644 index 00000000..8f289f07 --- /dev/null +++ b/.run/segu_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/tutory_spider.run.xml b/.run/tutory_spider.run.xml new file mode 100644 index 00000000..9f7d5552 --- /dev/null +++ b/.run/tutory_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/youtube_spider.run.xml b/.run/youtube_spider.run.xml new file mode 100644 index 00000000..1f3c484d --- /dev/null +++ b/.run/youtube_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/zum_deutschlernen_spider.run.xml b/.run/zum_deutschlernen_spider.run.xml new file mode 100644 index 00000000..56831d46 --- /dev/null +++ b/.run/zum_deutschlernen_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/zum_dwu_spider.run.xml b/.run/zum_dwu_spider.run.xml new file mode 100644 index 00000000..8918164a --- /dev/null +++ b/.run/zum_dwu_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/zum_klexikon_spider.run.xml b/.run/zum_klexikon_spider.run.xml new file mode 100644 index 00000000..675c7dd5 --- /dev/null +++ b/.run/zum_klexikon_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/zum_mathe_apps_spider.run.xml b/.run/zum_mathe_apps_spider.run.xml new file mode 100644 index 00000000..84077bb3 --- /dev/null +++ b/.run/zum_mathe_apps_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/zum_physik_apps_spider.run.xml b/.run/zum_physik_apps_spider.run.xml new file mode 100644 index 00000000..fa286b44 --- /dev/null +++ b/.run/zum_physik_apps_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/logs/.gitignore b/logs/.gitignore new file mode 100644 index 00000000..6eabd13d --- /dev/null +++ b/logs/.gitignore @@ -0,0 +1,3 @@ +*.log +*.json +*.jsonl From c9f1165ab2aa6e96cc2f21562a1bb9d3cf932eaf Mon Sep 17 00:00:00 2001 From: tsimon Date: Fri, 24 Jun 2022 13:29:38 +0200 Subject: [PATCH 090/590] build:docker compose env configs --- .env.example | 4 ++++ docker-compose.yml | 8 ++++---- 2 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..e5a0dba9 --- /dev/null +++ b/.env.example @@ -0,0 +1,4 @@ +CRAWLER=wirlernenonline_spider +EDU_SHARING_BASE_URL=http://host.docker.internal/edu-sharing/ +EDU_SHARING_USERNAME=admin +EDU_SHARING_PASSWORD=Joint#17# \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 04e9510e..e253a78f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -26,10 +26,10 @@ services: image: openeduhub/oeh-search-etl:develop environment: - "SPLASH_URL=http://splash:8050" - - "CRAWLER=wirlernenonline_spider" + - "CRAWLER=${CRAWLER}" - "DRY_RUN=False" - - "EDU_SHARING_BASE_URL=http://host.docker.internal/edu-sharing/" - - "EDU_SHARING_USERNAME=admin" - - "EDU_SHARING_PASSWORD=admin" + - "EDU_SHARING_BASE_URL=${EDU_SHARING_BASE_URL}" + - "EDU_SHARING_USERNAME=${EDU_SHARING_USERNAME}" + - "EDU_SHARING_PASSWORD=${EDU_SHARING_PASSWORD}" From b1160260ab9202c0886015171e4aa83c9acb6775 Mon Sep 17 00:00:00 2001 From: tsimon Date: Fri, 24 Jun 2022 13:33:08 +0200 Subject: [PATCH 091/590] build:docker compose readme --- Readme.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/Readme.md b/Readme.md index de1f665b..8eca828f 100644 --- a/Readme.md +++ b/Readme.md @@ -24,6 +24,17 @@ As a last step, set up your config variables by copying the `.env.example`-file - A crawler can be run with `scrapy crawl `. It assumes that you have an edu-sharing 6.0 instance in your `.env` settings configured which can accept the data. - If a crawler has [Scrapy Spider Contracts](https://docs.scrapy.org/en/latest/topics/contracts.html#spiders-contracts) implemented, you can test those by running `scrapy check ` + +## Run via Docker +```bash +git clone https://github.com/openeduhub/oeh-search-etl +cd oeh-search-etl +cp .env.example .env +# modify .env with your edu sharing instance +export CRAWLER=your_crawler_id_spider # i.e. wirlernenonline_spider +docker compose up +``` + ## Building a Crawler - We use Scrapy as a framework. Please check out the guides for Scrapy spider (https://docs.scrapy.org/en/latest/intro/tutorial.html) From f1b39df735c78c343a36f03839437b0dbc730d13 Mon Sep 17 00:00:00 2001 From: tsimon Date: Fri, 24 Jun 2022 14:11:48 +0200 Subject: [PATCH 092/590] build:docker build action --- Readme.md | 1 + docker-compose.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/Readme.md b/Readme.md index 8eca828f..3f39b1cb 100644 --- a/Readme.md +++ b/Readme.md @@ -31,6 +31,7 @@ git clone https://github.com/openeduhub/oeh-search-etl cd oeh-search-etl cp .env.example .env # modify .env with your edu sharing instance +docker compose build scrapy export CRAWLER=your_crawler_id_spider # i.e. wirlernenonline_spider docker compose up ``` diff --git a/docker-compose.yml b/docker-compose.yml index e253a78f..311d6817 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,6 +24,7 @@ services: extra_hosts: host.docker.internal: host-gateway image: openeduhub/oeh-search-etl:develop + build: . environment: - "SPLASH_URL=http://splash:8050" - "CRAWLER=${CRAWLER}" From 2568ba50610089c5a28dd804a38764ea0d728d3a Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 24 Jun 2022 14:25:35 +0200 Subject: [PATCH 093/590] build:docker playwright endpoint --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index 311d6817..cd8fd105 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -26,6 +26,8 @@ services: image: openeduhub/oeh-search-etl:develop build: . environment: + - "PYPPETEER_WS_ENDPOINT=ws://headless_chrome:3000" + - "PLAYWRIGHT_WS_ENDPOINT=ws://headless_chrome:3000" - "SPLASH_URL=http://splash:8050" - "CRAWLER=${CRAWLER}" - "DRY_RUN=False" From d48beed6d9a94168ae0c0c92963eee8d7560fe4b Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 24 Jun 2022 17:45:47 +0200 Subject: [PATCH 094/590] build:docker compose network + chrome mem fix --- docker-compose.yml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index cd8fd105..a7d37c35 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,8 @@ version: "3.4" +networks: + scrapy: + services: splash: image: scrapinghub/splash:master @@ -7,6 +10,8 @@ services: restart: always ports: - "127.0.0.1:8050:8050" + networks: + - scrapy healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8050/_ping"] interval: 30s @@ -16,15 +21,23 @@ services: headless_chrome: image: browserless/chrome restart: always + environment: + - "DEFAULT_LAUNCH_ARGS:[\"--disable-dev-shm-usage\"]" ports: - "127.0.0.1:3000:3000" + networks: + - scrapy scrapy: # extra_hosts is only required if your need to access an edu-sharing instance on the host that runs docker # host.docker.internal points to the ip address of the host docker network interface extra_hosts: host.docker.internal: host-gateway image: openeduhub/oeh-search-etl:develop - build: . + build: + context: . + network: host + networks: + - scrapy environment: - "PYPPETEER_WS_ENDPOINT=ws://headless_chrome:3000" - "PLAYWRIGHT_WS_ENDPOINT=ws://headless_chrome:3000" From 5918e68a5b3551d76ab4292f7ed27d3b30d41e21 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 27 Jun 2022 10:11:31 +0200 Subject: [PATCH 095/590] build:docker build fixing --- .env.example | 1 + Dockerfile | 8 +++++++- docker-compose.yml | 5 ++--- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index e5a0dba9..21080049 100644 --- a/.env.example +++ b/.env.example @@ -1,4 +1,5 @@ CRAWLER=wirlernenonline_spider +LOG_LEVEL=INFO EDU_SHARING_BASE_URL=http://host.docker.internal/edu-sharing/ EDU_SHARING_USERNAME=admin EDU_SHARING_PASSWORD=Joint#17# \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 6535f13a..690f7108 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,13 @@ ENV CRAWLER wirlernenonline_spider WORKDIR / -COPY . . +COPY requirements.txt requirements.txt +COPY scrapy.cfg scrapy.cfg +COPY setup.cfg setup.cfg +COPY converter/ converter/ +COPY csv/ csv/ +COPY edu_sharing_client/ edu_sharing_client/ +COPY valuespace_converter/ valuespace_converter/ RUN pip3 install -r requirements.txt diff --git a/docker-compose.yml b/docker-compose.yml index a7d37c35..6d2c4792 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -44,8 +44,7 @@ services: - "SPLASH_URL=http://splash:8050" - "CRAWLER=${CRAWLER}" - "DRY_RUN=False" + - "LOG_LEVEL=${LOG_LEVEL:-INFO}" - "EDU_SHARING_BASE_URL=${EDU_SHARING_BASE_URL}" - "EDU_SHARING_USERNAME=${EDU_SHARING_USERNAME}" - - "EDU_SHARING_PASSWORD=${EDU_SHARING_PASSWORD}" - - + - "EDU_SHARING_PASSWORD=${EDU_SHARING_PASSWORD}" \ No newline at end of file From 899f37171b0ffe24ec23ed903815221429779c7d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Jun 2022 17:14:31 +0200 Subject: [PATCH 096/590] science_in_school_spider v0.0.1 - update 'type' and 'origin' documentation in sample_spider_alternative -- remove: no longer used 'type' from BaseItem -- add: documentation for the "origin"-BaseItem-field -- the "origin"-field in our BaseItemLoader is used to control in which sub-folder of 'SYNC_OBJ//' items should be placed -- this wasn't explained previously and is now documented with two different use-cases in youtube_spider and lehreronline_spider --- .../spiders/sample_spider_alternative.py | 7 +- converter/spiders/science_in_school_spider.py | 355 ++++++++++++++++++ 2 files changed, 361 insertions(+), 1 deletion(-) create mode 100644 converter/spiders/science_in_school_spider.py diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index c03a7b3e..8174c238 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -48,7 +48,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - permissions required (see: PermissionItemLoader below) # - license required (see: LicenseItemLoader below) # - lastModified recommended - # - type recommended + # - origin optional (only necessary if items need to be sorted into a specific sub-folder) # - thumbnail recommended # - publisher optional # - binary optional (only needed if you're working with binary files (e.g. .pdf-files), @@ -61,6 +61,11 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: last_modified = None base.add_value('lastModified', last_modified) thumbnail_url: str = "This string should hold the thumbnail URL" + base.add_value('origin', 'premium_only') # the OPTIONAL value for "origin" controls the subfolder-name + # in the edu-sharing repository (e.g. if you need to make a distinction between learning objects that are free + # to access or premium_only). in this example, items that have the "premium_only"-value will be sent to the + # "SYNC_OBJ//premium_only/"-folder. + # (This field is used in two different use-cases, both in "youtube_spider" and "lehreronline_spider") base.add_value('thumbnail', thumbnail_url) lom = LomBaseItemloader() diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py new file mode 100644 index 00000000..4c11bdd2 --- /dev/null +++ b/converter/spiders/science_in_school_spider.py @@ -0,0 +1,355 @@ +import datetime + +import dateparser +import scrapy +import w3lib.html + +from converter.constants import Constants +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ + LicenseItemLoader, LomAgeRangeItemLoader +from converter.spiders.base_classes import LomBase + + +class ScienceInSchoolSpider(scrapy.Spider, LomBase): + name = "science_in_school_spider" + friendlyName = "Science in School" + start_urls = [ + "https://www.scienceinschool.org/issue/" + ] + version = "0.0.1" + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True + } + allowed_domains = [ + "scienceinschool.org" + ] + DEBUG_ALL_ARTICLE_URLS = set() + DEBUG_LANGUAGES_AVAILABLE = set() + + TOPICS_TO_DISCIPLINES_MAPPING = { + "Astronomy / space": "Astronomy", + "Biology": "Biology", + "Chemistry": "Chemistry", + "Health": "Health education", + "Mathematics": "Mathematics", + "Physics": "Physics", + "Sustainability": "Sustainability" + } + LICENSE_MAPPING = { + "CC-BY": "https://creativecommons.org/licenses/by-sa/4.0", + "CC-BY-NC-SA": "https://creativecommons.org/licenses/by-nc-sa/4.0", + "CC-BY-NC-ND": "https://creativecommons.org/licenses/by-nc-nd/4.0" + } + KEYWORD_EXCLUSION_LIST = [ + "Not applicable", "not applicable" + ] + + def start_requests(self): + for start_url in self.start_urls: + yield scrapy.Request(url=start_url, callback=self.parse_issue_overview) + + def parse_issue_overview(self, response: scrapy.http.Response) -> scrapy.Request: + """ + Crawls the overview-page of all published issues and extracts URLs to the individual issue numbers + + :param response: scrapy.http.Response + :return: scrapy.Request + + Scrapy Contracts: + @url https://www.scienceinschool.org/issue/ + @returns requests 51 + """ + issue_urls = response.xpath('//h3[@class="vf-card__heading"]/a[@class="vf-card__link"]/@href').getall() + if issue_urls: + # self.logger.info(f"Found {len(issue_urls)} Issues in the overview") + for issue_url in issue_urls: + yield scrapy.Request(url=issue_url, callback=self.parse_article_overview) + pass + + def parse_article_overview(self, response: scrapy.http.Response) -> scrapy.Request: + """ + Crawls an issue (e.g. Issue #3) for all individual article URLs within that publication. Afterwards yields the + URLs to the parse()-method. + + :param response: scrapy.http.Response + :return: scrapy.Request + + Scrapy Contracts: + @url https://www.scienceinschool.org/issue/issue-3/ + @returns requests 20 + """ + article_urls = response.xpath('//h3[@class="vf-card__heading"]/a[@class="vf-card__link"]/@href').getall() + # self.logger.info(f"Currently on {response.url} // Found {len(article_urls)} individual articles") + self.DEBUG_ALL_ARTICLE_URLS.update(article_urls) + # self.logger.info(f"Total URLs gathered so far: {len(self.DEBUG_ALL_ARTICLE_URLS)}") + for article_url in article_urls: + yield scrapy.Request(url=article_url, callback=self.parse) + pass + + def getId(self, response=None) -> str: + pass + + def getHash(self, response=None) -> str: + pass + + def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + """ + Crawls an individual article and extracts metadata. Afterwards creates a BaseItem by filling up metadata-fields + by calling .load_item() on the respective ItemLoaders. + + :param response: scrapy.http.Response + :param kwargs: + :return: BaseItem via .load_item() + + Scrapy Contracts: + @url https://www.scienceinschool.org/article/2006/birdflu/ + @returns item 1 + """ + multilanguage_article_list: list = response.xpath('//ul[@class="vf-links__list vf-links__list--secondary | ' + 'vf-list"]/li/a/@href').getall() + # on the left side of each article is a list of "Available languages", which holds URLs to all available + # versions of the (currently visited) article, including its own URL. We need to make sure that we're only + # gathering URLs that haven't been parsed before: + # self.logger.info(f"Before gathering article translations: {len(self.DEBUG_ALL_ARTICLE_URLS)}") + if multilanguage_article_list: + for article_translation_url in multilanguage_article_list: + if article_translation_url not in self.DEBUG_ALL_ARTICLE_URLS: + # making sure we're not parsing translated articles more than once or causing loops + if article_translation_url.endswith('.pdf'): + # skipping direct-links to .pdf files because scrapy / splash can't handle these + continue + elif "/sr/" in article_translation_url or article_translation_url.endswith('-sr/'): + # Articles that are translated to Serbian currently aren't supported by the dateparser. + # Since we don't want to deal with ~40 errors from these URLs, we skip them altogether. + continue + else: + yield scrapy.Request(url=article_translation_url, callback=self.parse) + self.DEBUG_ALL_ARTICLE_URLS.update(multilanguage_article_list) + # self.logger.info(f"This message should still be appearing after fetching article translations. URLs gathered " + # f"so far: {len(self.DEBUG_ALL_ARTICLE_URLS)}") + + title: str = response.xpath('//meta[@property="og:title"]/@content').get() + if title is None: + title = response.xpath('//head/title/text()').get() + description: str = response.xpath('//meta[@property="og:description"]/@content').get() + thumbnail_url: str = response.xpath('//meta[@property="og:image"]/@content').get() + language: list = response.xpath('//html/@lang').getall() + + date_published_raw: str = response.xpath('//p[@class="vf-meta__date"]/text()').get() + date_published = str() + if date_published_raw: + # using dateparser to get a reusable ISO-format from strings like 'January 28, 2016' + # dateparser will show warnings in Python 3.10 (we're waiting for a new dateparser version) + date_parsed = dateparser.parse(date_string=date_published_raw) + if date_parsed: + # the dateparser library can't parse all languages reliably, throws errors with serbian articles + date_published = date_parsed.isoformat() + else: + date_published = datetime.datetime.now() + + authors_raw: list = response.xpath('//div[@class="vf-author | vf-article-meta-info__author"]/p/text()').getall() + authors_clean = list() + if authors_raw: + for author_raw in authors_raw: + possible_authors: str = w3lib.html.strip_html5_whitespace(author_raw) + if possible_authors: + authors_clean.append(possible_authors) + + # selector for the whole metadata container, in case you want to try it out with Scrapy Shell: + # response.xpath('//aside[@class="vf-article-meta-information"]').getall() + metadata_container_ages_topics_keywords: list = response.xpath('//p[@class="vf-meta__topics"]').getall() + # this metadata container doesn't have individual CSS Selectors for the different types of metadata + # therefore we have to analyze it line-by-line: + age_ranges = list() + disciplines = set() + keywords = set() + if metadata_container_ages_topics_keywords: + for metadata_container_item in metadata_container_ages_topics_keywords: + current_selector = scrapy.Selector(text=metadata_container_item) + current_selector_description = current_selector.xpath('//span/text()').get() + if current_selector_description: + if "Ages:" in current_selector_description: + age_ranges_raw_string: str = current_selector.xpath('//p/text()').get() + # a typical string value can be ' 14-16, 16-19' (including the whitespace around single values) + if age_ranges_raw_string: + # therefore we're splitting up the string by its commas and removing the whitespace around + # each value + potential_age_ranges: list = age_ranges_raw_string.split(',') + if potential_age_ranges: + for age_range_item in potential_age_ranges: + if age_range_item in self.KEYWORD_EXCLUSION_LIST: + # filtering out the 'not applicable' string (which can also appear in topics) + pass + else: + age_range_clean = age_range_item.strip() + age_ranges.append(age_range_clean) + if "Topics:" in current_selector_description: + # there can be several topics per article + topic_description_list_raw = current_selector.xpath('//a/text()').getall() + topic_description_urls = current_selector.xpath('//a/@href').getall() + if topic_description_list_raw and topic_description_urls: + # topic_dict = dict(zip(topic_description_list_raw, topic_description_urls)) + for potential_topic in topic_description_list_raw.copy(): + # topics can either be real disciplines or will be treated as additional keywords + if potential_topic in self.TOPICS_TO_DISCIPLINES_MAPPING: + disciplines.add(self.TOPICS_TO_DISCIPLINES_MAPPING.get(potential_topic)) + elif potential_topic in self.KEYWORD_EXCLUSION_LIST: + topic_description_list_raw.remove(potential_topic) + else: + keywords.add(potential_topic) + if "Keywords:" in current_selector_description: + keyword_description_list_raw: list = current_selector.xpath('//a/text()').getall() + keyword_description_urls: list = current_selector.xpath('//a/@href').getall() + if keyword_description_list_raw and keyword_description_urls: + # keyword_dict = dict(zip(keyword_description_list_raw, keyword_description_urls)) + for potential_keyword in keyword_description_list_raw: + keywords.add(potential_keyword) + + # supporting_materials_selector = response.xpath('//article[@class="sis-materials"]/p/a') + supporting_materials_descriptions: list = \ + response.xpath('//article[@class="sis-materials"]/p/a/text()').getall() + supporting_materials_urls: list = response.xpath('//article[@class="sis-materials"]/p/a/@href').getall() + # on the right-hand side of an article there can (sometimes) be downloadable, additional materials: + # - supporting materials (teachers guides etc.) + # - "Download this article as a PDF"-button + # ToDo: these materials would be suitable as "Serienobjekte" in a future crawler-version, see below + if supporting_materials_descriptions and supporting_materials_urls: + supporting_materials_dict = dict(zip(supporting_materials_descriptions, supporting_materials_urls)) + if "Download this article as a PDF" in supporting_materials_dict.keys(): + # first, we're extracting the PDF Download URL and remove it from the dictionary + article_pdf_download_url = supporting_materials_dict.pop("Download this article as a PDF") + if article_pdf_download_url: + # ToDo: if PDF download is available -> add it to our binary field? + pass + if supporting_materials_dict: + # before we look for "supporting materials", we need to make sure that our dict isn't empty after + # removing the "Download this article as a PDF"-URL + # supporting_materials_url_list = supporting_materials_dict.values() + # ToDo: put these urls into an "edu-sharing"-Serienobjekt as soon as our environment supports it + pass + + base = BaseItemLoader() + + # TODO: fill "base"-keys with values for + # - binary optional (only needed if you're working with binary files (e.g. .pdf-files), + # if you want to see an example, check out "niedersachsen_abi_spider.py") + base.add_value('sourceId', response.url) + hash_temp: str = f"{date_published}v{self.version}" + base.add_value('hash', hash_temp) + if thumbnail_url: + base.add_value('thumbnail', thumbnail_url) + lom = LomBaseItemloader() + + general = LomGeneralItemloader() + general.add_value('identifier', response.url) + if title: + general.add_value('title', title) + if keywords: + general.add_value('keyword', keywords) + if description: + general.add_value('description', description) + if language: + general.add_value('language', language) + # depending on the article language, we're creating sub-folders within edu-sharing: + # SYNC_OBJ/science_in_school_spider// + base.add_value('origin', language) + else: + # if no language code is detected, the main part of the website is always available in English + general.add_value('language', 'en') + # noinspection DuplicatedCode + lom.add_value('general', general.load_item()) + + technical = LomTechnicalItemLoader() + technical.add_value('format', 'text/html') + technical.add_value('location', response.url) + lom.add_value('technical', technical.load_item()) + + lifecycle = LomLifecycleItemloader() + lifecycle.add_value('role', 'publisher') + lifecycle.add_value('organization', 'EIROforum') # EIROforum is the intergovernmental organization/publisher + # behind scienceinschool.org + lifecycle.add_value('url', 'https://www.scienceinschool.org/about-eiroforum/') + lifecycle.add_value('email', 'info@eiroforum.org') + lifecycle.add_value('date', date_published) + lom.add_value('lifecycle', lifecycle.load_item()) + + educational = LomEducationalItemLoader() + if language: + educational.add_value('language', language) + # ToDo: the primary website language is always English, but sometimes additional languages are available as well + lom_age_range_loader = LomAgeRangeItemLoader() + # since we already prepared age_ranges above to only hold valid, already whitespace-stripped strings, we can use + # these values to fill our typicalAgeRange. According to the "Filter"-function on scienceinschool.org there + # could be these possible values in our list: "< 11", "11-14", "14-16", "16-19" + age_range_total = set() + if age_ranges: + for age_range_item in age_ranges: + if "<" in age_range_item: + # "< 11" + from_range = 0 + to_range = age_range_item.replace('<', '') + to_range = int(to_range) + age_range_total.add(from_range) + age_range_total.add(to_range) + elif "-" in age_range_item: + from_range = int(min(age_range_item.split('-'))) + to_range = int(max(age_range_item.split('-'))) + age_range_total.add(from_range) + age_range_total.add(to_range) + if age_range_total: + lom_age_range_loader.add_value('fromRange', min(age_range_total)) + lom_age_range_loader.add_value('toRange', max(age_range_total)) + educational.add_value('typicalAgeRange', lom_age_range_loader.load_item()) + + lom.add_value('educational', educational.load_item()) + + classification = LomClassificationItemLoader() + lom.add_value('classification', classification.load_item()) + + base.add_value('lom', lom.load_item()) + + vs = ValuespaceItemLoader() + vs.add_value('discipline', disciplines) + vs.add_value('intendedEndUserRole', 'teacher') + vs.add_value('sourceContentType', 'Lehrkräftefortbildung') + vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') + # see: https://www.embl.de/aboutus/privacy_policy/ + vs.add_value('new_lrt', [Constants.NEW_LRT_MATERIAL, + 'b98c0c8c-5696-4537-82fa-dded7236081e', '0f519bd5-069c-4d32-b6d3-a373ac96724c']) + # "Artikel und Einzelpublikation", "Fachliche News" + vs.add_value('containsAdvertisement', 'no') + vs.add_value('conditionsOfAccess', 'no_login') + vs.add_value('price', 'no') + base.add_value('valuespaces', vs.load_item()) + + license_loader = LicenseItemLoader() + if authors_clean: + license_loader.add_value('author', authors_clean) + license_raw: str = response.xpath('//a[@href="/copyright"]/text()').get() + # see: https://www.scienceinschool.org/copyright/ + # the possible string patterns seem to be either "CC-BY", "CC-BY-NC-SA" or "CC-BY-NC-ND" + if license_raw: + if license_raw in self.LICENSE_MAPPING: + license_loader.add_value('url', self.LICENSE_MAPPING.get(license_raw)) + # sometimes there is an additional license description available, which always seems to be in the next + #

-container after the copyright -element: + license_description = response.xpath('//div[child::a[@href="/copyright"]]/following-sibling::div' + '/text()').get() + if license_description: + license_description = w3lib.html.strip_html5_whitespace(license_description) + license_loader.add_value('description', license_description) + else: + # as a fallback, we try to set the raw license string + license_loader.add_value('description', license_raw) + # noinspection DuplicatedCode + base.add_value('license', license_loader.load_item()) + + permissions = super().getPermissions(response) + base.add_value('permissions', permissions.load_item()) + + response_loader = super().mapResponse(response) + base.add_value('response', response_loader.load_item()) + + yield base.load_item() From d1fa8c25300b57ced76a2389897836b951bbd41c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 27 Jun 2022 14:39:10 +0200 Subject: [PATCH 097/590] grundschulkoenig_spider v0.0.5: - fix: Copyright mapping --- converter/spiders/grundschulkoenig_spider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index be603ac1..c004d6c3 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -19,7 +19,7 @@ class GrundSchulKoenigSpider(CrawlSpider, LomBase): start_urls = ['https://www.grundschulkoenig.de/sitemap.xml?sitemap=pages&cHash=b8e1a6633393d69093d0ebe93a3d2616'] name = 'grundschulkoenig_spider' - version = "0.0.4" # last update: 2022-05-06 + version = "0.0.5" # last update: 2022-06-27 custom_settings = { "ROBOTSTXT_OBEY": False, # while there is no robots.txt, there is a 404-forward-page that gets misinterpreted by Scrapy @@ -187,7 +187,7 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() - lic.add_value('url', Constants.LICENSE_COPYRIGHT_LAW) + lic.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) base.add_value("license", lic.load_item()) permissions = super().getPermissions(response) From 558b0fe9093e3e652a6a9000aa5d53c3c62a2ab6 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 12:06:15 +0200 Subject: [PATCH 098/590] fix:improve logging for changed items --- converter/spiders/base_classes/lom_base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index c33444fa..7625f3c0 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -73,7 +73,10 @@ def hasChanged(self, response=None) -> bool: db = EduSharing().findItem(self.getId(response), self) changed = db == None or db[1] != self.getHash(response) if not changed: - logging.info("Item " + db[0] + " has not changed") + logging.info( + "Item " + self.getId(response) + + "(uuid: " + db[0] + ") has not changed" + ) return changed # you might override this method if you don't want to import specific entries From f3617f36ae4620c9dce59868db39efc1ddb5fddd Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 12:06:58 +0200 Subject: [PATCH 099/590] fix:esconnector typicalagerange not mapped + duration ms mapping --- converter/es_connector.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 94b7dca8..aa3723c4 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -288,7 +288,13 @@ def transformItem(self, uuid, spider, item): spaces["cclom:general_keyword"] = None if "technical" in item["lom"]: if "duration" in item["lom"]["technical"]: - spaces["cclom:duration"] = item["lom"]["technical"]["duration"] + duration = item["lom"]["technical"]["duration"] + try: + # edusharing requries milliseconds + duration = int(float(duration) * 1000) + except: + pass + spaces["cclom:duration"] = duration # TODO: this does currently not support multiple values per role if "lifecycle" in item["lom"]: @@ -358,13 +364,12 @@ def transformItem(self, uuid, spider, item): } for key in item["valuespaces"]: spaces[valuespaceMapping[key]] = item["valuespaces"][key] - if "typicalagerange" in item["lom"]["educational"]: - spaces["ccm:educationaltypicalagerange_from"] = item["lom"]["educational"][ - "typicalagerange" - ]["fromRange"] - spaces["ccm:educationaltypicalagerange_to"] = item["lom"]["educational"][ - "typicalagerange" - ]["toRange"] + if "typicalAgeRange" in item["lom"]["educational"]: + tar = item["lom"]["educational"]["typicalAgeRange"] + if "fromRange" in tar: + spaces["ccm:educationaltypicalagerange_from"] = tar["fromRange"] + if "toRange" in tar: + spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] # intendedEndUserRole = Field(output_processor=JoinMultivalues()) # discipline = Field(output_processor=JoinMultivalues()) # educationalContext = Field(output_processor=JoinMultivalues()) From 2396bf0c8c54ca3b301ce198297665afff287a3e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 1 Jul 2022 12:55:46 +0200 Subject: [PATCH 100/590] science_in_school_spider v0.0.2 - fix: "general.language" is using underscores instead of dashes -- edu-sharing is expecting underscores in language-codes for this field (to be properly mapped to a "cclom:general_language_DISPLAYNAME") - version bump - rename: "DEBUG_ALL_ARTICLE_URLS"-set to "ALL_ARTICLE_URLS" to make it clearer that this set is needed for functionality of the crawler, not just for debugging --- converter/spiders/science_in_school_spider.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index 4c11bdd2..759ebd43 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -17,7 +17,7 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): start_urls = [ "https://www.scienceinschool.org/issue/" ] - version = "0.0.1" + version = "0.0.2" # last update: 2022-07-01 custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True @@ -25,8 +25,7 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): allowed_domains = [ "scienceinschool.org" ] - DEBUG_ALL_ARTICLE_URLS = set() - DEBUG_LANGUAGES_AVAILABLE = set() + ALL_ARTICLE_URLS = set() TOPICS_TO_DISCIPLINES_MAPPING = { "Astronomy / space": "Astronomy", @@ -82,8 +81,8 @@ def parse_article_overview(self, response: scrapy.http.Response) -> scrapy.Reque """ article_urls = response.xpath('//h3[@class="vf-card__heading"]/a[@class="vf-card__link"]/@href').getall() # self.logger.info(f"Currently on {response.url} // Found {len(article_urls)} individual articles") - self.DEBUG_ALL_ARTICLE_URLS.update(article_urls) - # self.logger.info(f"Total URLs gathered so far: {len(self.DEBUG_ALL_ARTICLE_URLS)}") + self.ALL_ARTICLE_URLS.update(article_urls) + # self.logger.info(f"Total URLs gathered so far: {len(self.ALL_ARTICLE_URLS)}") for article_url in article_urls: yield scrapy.Request(url=article_url, callback=self.parse) pass @@ -112,10 +111,10 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # on the left side of each article is a list of "Available languages", which holds URLs to all available # versions of the (currently visited) article, including its own URL. We need to make sure that we're only # gathering URLs that haven't been parsed before: - # self.logger.info(f"Before gathering article translations: {len(self.DEBUG_ALL_ARTICLE_URLS)}") + # self.logger.info(f"Before gathering article translations: {len(self.ALL_ARTICLE_URLS)}") if multilanguage_article_list: for article_translation_url in multilanguage_article_list: - if article_translation_url not in self.DEBUG_ALL_ARTICLE_URLS: + if article_translation_url not in self.ALL_ARTICLE_URLS: # making sure we're not parsing translated articles more than once or causing loops if article_translation_url.endswith('.pdf'): # skipping direct-links to .pdf files because scrapy / splash can't handle these @@ -126,9 +125,9 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: continue else: yield scrapy.Request(url=article_translation_url, callback=self.parse) - self.DEBUG_ALL_ARTICLE_URLS.update(multilanguage_article_list) + self.ALL_ARTICLE_URLS.update(multilanguage_article_list) # self.logger.info(f"This message should still be appearing after fetching article translations. URLs gathered " - # f"so far: {len(self.DEBUG_ALL_ARTICLE_URLS)}") + # f"so far: {len(self.ALL_ARTICLE_URLS)}") title: str = response.xpath('//meta[@property="og:title"]/@content').get() if title is None: @@ -251,7 +250,10 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: if description: general.add_value('description', description) if language: - general.add_value('language', language) + for language_item in language: + # edu-sharing expects the base.language value to be using underscores + language_underscore: str = language_item.replace('-', '_') + general.add_value('language', language_underscore) # depending on the article language, we're creating sub-folders within edu-sharing: # SYNC_OBJ/science_in_school_spider// base.add_value('origin', language) From b0c721fde3f55a8c314a0d316ac3e0f4556c9e74 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 1 Jul 2022 12:57:26 +0200 Subject: [PATCH 101/590] add: "general.language" documentation - the edu-sharing repo is expecting underscores for this field to be able to properly map values to 'cclom:general_language_DISPLAYNAME' --- converter/spiders/sample_spider_alternative.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 8174c238..ba362c66 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -82,7 +82,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - title required # - keyword required # - description required - # - language recommended + # - language recommended (edu-sharing expects underscores in language-codes, e.g. 'en-US' + # needs to be replaced by 'en_US') # - coverage optional # - structure optional # - aggregationLevel optional From 9f96411d6a444c30642bcee83d2ad34faa4344d6 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 12:06:15 +0200 Subject: [PATCH 102/590] fix:improve logging for changed items --- converter/spiders/base_classes/lom_base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index c33444fa..7625f3c0 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -73,7 +73,10 @@ def hasChanged(self, response=None) -> bool: db = EduSharing().findItem(self.getId(response), self) changed = db == None or db[1] != self.getHash(response) if not changed: - logging.info("Item " + db[0] + " has not changed") + logging.info( + "Item " + self.getId(response) + + "(uuid: " + db[0] + ") has not changed" + ) return changed # you might override this method if you don't want to import specific entries From caa100534ddbd65443e0910bcc02d186e617e471 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 12:06:58 +0200 Subject: [PATCH 103/590] fix:esconnector typicalagerange not mapped + duration ms mapping --- converter/es_connector.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 94b7dca8..aa3723c4 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -288,7 +288,13 @@ def transformItem(self, uuid, spider, item): spaces["cclom:general_keyword"] = None if "technical" in item["lom"]: if "duration" in item["lom"]["technical"]: - spaces["cclom:duration"] = item["lom"]["technical"]["duration"] + duration = item["lom"]["technical"]["duration"] + try: + # edusharing requries milliseconds + duration = int(float(duration) * 1000) + except: + pass + spaces["cclom:duration"] = duration # TODO: this does currently not support multiple values per role if "lifecycle" in item["lom"]: @@ -358,13 +364,12 @@ def transformItem(self, uuid, spider, item): } for key in item["valuespaces"]: spaces[valuespaceMapping[key]] = item["valuespaces"][key] - if "typicalagerange" in item["lom"]["educational"]: - spaces["ccm:educationaltypicalagerange_from"] = item["lom"]["educational"][ - "typicalagerange" - ]["fromRange"] - spaces["ccm:educationaltypicalagerange_to"] = item["lom"]["educational"][ - "typicalagerange" - ]["toRange"] + if "typicalAgeRange" in item["lom"]["educational"]: + tar = item["lom"]["educational"]["typicalAgeRange"] + if "fromRange" in tar: + spaces["ccm:educationaltypicalagerange_from"] = tar["fromRange"] + if "toRange" in tar: + spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] # intendedEndUserRole = Field(output_processor=JoinMultivalues()) # discipline = Field(output_processor=JoinMultivalues()) # educationalContext = Field(output_processor=JoinMultivalues()) From 753ae8e3924bf59f85d68d42a53c71822160a2a7 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 12:08:14 +0200 Subject: [PATCH 104/590] feat:init sodix --- converter/.env.example | 4 + converter/spiders/sodix_spider.py | 206 ++++++++++++++++++++++++++++++ 2 files changed, 210 insertions(+) create mode 100644 converter/spiders/sodix_spider.py diff --git a/converter/.env.example b/converter/.env.example index 6d62e1a2..ab02649b 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -34,3 +34,7 @@ YOUTUBE_API_KEY = "" # only for oeh spider: select the sources you want to fetch from oeh (comma seperated) # OEH_IMPORT_SOURCES = 'oeh,wirlernenonline_spider,serlo_spider,youtube_spider' + +# Sodix Spider login data +# SODIX_SPIDER_USERNAME = "" +# SODIX_SPIDER_PASSWORD = "" \ No newline at end of file diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py new file mode 100644 index 00000000..e3b7b486 --- /dev/null +++ b/converter/spiders/sodix_spider.py @@ -0,0 +1,206 @@ +from converter.items import * +from .base_classes import LomBase +from .base_classes import JSONBase +import json +import logging +import requests +import html +from converter.constants import * +import scrapy + +# Spider to fetch RSS from planet schule +from .. import env + + +class SodixSpider(scrapy.Spider, LomBase, JSONBase): + name = "sodix_spider" + friendlyName = "Sodix" + url = "https://sodix.de/" + version = "0.1.3" + apiUrl = "https://api.sodix.de/gql/graphql" + access_token: str = None + page_size = 2500 + + def __init__(self, **kwargs): + self.access_token = requests.post( + "https://api.sodix.de/gql/auth/login", + None, + { + "login": env.get("SODIX_SPIDER_USERNAME"), + "password": env.get("SODIX_SPIDER_PASSWORD"), + } + ).json()['access_token'] + LomBase.__init__(self, **kwargs) + + def mapResponse(self, response): + r = LomBase.mapResponse(self, response, fetchData=False) + r.replace_value("text", "") + r.replace_value("html", "") + r.replace_value("url", response.meta["item"].get("link")) + return r + + def getId(self, response): + return response.meta["item"].get("id") + + def getHash(self, response): + return response.meta["item"].get("updated") + self.version + + def getUri(self, response=None) -> str: + # or media.originalUrl? + return self.get("media.url", json=response.meta["item"]) + + def startRequest(self, offset=0): + return scrapy.Request( + url=self.apiUrl, + callback=self.parseRequest, + body=json.dumps({ + "query": "{\n findAllMetadata(page: " + str(offset) + ", pageSize: " + str( + self.page_size) + ") {\n id\n identifier\n title\n description\n keywords\n language\n creationDate\n updated\n publishedTime\n availableTo\n recordStatus\n author\n authorWebsite\n producer\n publishers{\n id\n title\n description\n imageDetails\n imagePreview\n officialWebsite\n linkToGeneralUseRights \n }\n source{\n id\n name\n description\n imageUrl\n termsOfUse\n generalUseRights\n website\n sourceStatus\n created\n edited \n }\n media {\n size\n dataType\n duration\n thumbDetails\n thumbPreview\n url\n originalUrl \n }\n targetAudience\n learnResourceType\n educationalLevels\n classLevel\n schoolTypes\n eafCode\n subject{\n id\n name\n level\n path\n } \n competencies{\n id\n level\n name\n path \n }\n license{\n name\n version\n country\n url\n text \n }\n additionalLicenseInformation\n downloadRight\n cost\n linkedObjects \n }\n}\n\n", + "operationName": None + }), + method="POST", + headers={ + "Accept": "application/json", + "Content-Type": "application/json", + "Authorization": "Bearer " + self.access_token + }, + meta={"offset": offset}, + ) + + def start_requests(self): + yield self.startRequest() + + def parseRequest(self, response): + results = json.loads(response.body) + if results: + for item in results['data']['findAllMetadata']: + copyResponse = response.copy() + copyResponse.meta["item"] = item + if self.hasChanged(copyResponse): + yield self.handleEntry(copyResponse) + yield self.startRequest(response.meta["offset"] + self.page_size) + + def handleEntry(self, response): + return LomBase.parse(self, response) + + # thumbnail is always the same, do not use the one from rss + def getBase(self, response): + base = LomBase.getBase(self, response) + base.replace_value( + "thumbnail", self.get("media.thumbPreview", json=response.meta["item"]) + ) + for publisher in self.get("publishers", json=response.meta["item"]): + base.add_value( + "publisher", publisher['title'] + ) + return base + + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.replace_value( + "title", + self.get("title", json=response.meta["item"]) + ) + general.add_value( + "keyword", + self.get("keywords", json=response.meta["item"]) + ) + general.add_value( + "description", + self.get("description", json=response.meta["item"]) + ) + return general + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.replace_value("format", self.get("media.dataType", json=response.meta["item"])) + technical.replace_value( + "location", self.getUri(response) + ) + technical.add_value( + "duration", self.get("media.duration", json=response.meta["item"]) + ) + technical.add_value( + "size", self.get("media.size", json=response.meta["item"]) + ) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + licenseId = self.get("license.name", json=response.meta["item"]) + # @TODO: add mappings for the sodix names + url = None + license.add_value("url", url) + return license + + def getLOMEducational(self, response=None) -> LomEducationalItemLoader: + educational = LomBase.getLOMEducational(response) + class_level = self.get('classLevel', json=response.meta['item']) + if class_level and len(class_level.split("-")) == 2: + split = class_level.split("-") + tar = LomAgeRangeItemLoader() + tar.add_value( + "fromRange", + split[0] + ) + tar.add_value( + "toRange", + split[1] + ) + educational.add_value("typicalAgeRange", tar.load_item()) + return educational + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + subjects = self.get('subject', json=response.meta['item']) + for subject in subjects if subjects else []: + valuespaces.add_value("discipline", subject['name']) + valuespaces.add_value("educationalContext", self.get('educationalLevels', json=response.meta['item'])) + # @TODO: add mappings! + valuespaces.add_value("intendedEndUserRole", self.get('targetAudience', json=response.meta['item'])) + if self.get('cost', json=response.meta['item']) == "FREE": + valuespaces.add_value("price", "no") + + # @TODO: mapping required: + # enum LRT { + # APP + # ARBEITSBLATT + # AUDIO + # AUDIOVISUELLES + # BILD + # DATEN + # ENTDECKENDES + # EXPERIMENT + # FALLSTUDIE + # GLOSSAR + # HANDBUCH + # INTERAKTION + # KARTE + # KURS + # LERNKONTROLLE + # LERNSPIEL + # MODELL + # OFFENE + # PRESENTATION + # PROJECT + # QUELLE + # RADIO + # RECHERCHE + # RESSOURCENTYP + # ROLLENSPIEL + # SIMULATION + # SOFTWARE + # SONSTIGES + # TEST + # TEXT + # UBUNG + # UNTERRICHTSBAUSTEIN + # UNTERRICHTSPLANUNG + # VERANSCHAULICHUNG + # VIDEO + # WEBSEITE + # WEBTOOL + # } + valuespaces.add_value("learningResourceType", self.get('learnResourceType', json=response.meta['item'])) + return valuespaces + From c2c2b46edeb498a616d4e280b61e4b64788895e9 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 14:15:00 +0200 Subject: [PATCH 105/590] fix:sodix cleanup + mapping fixes --- converter/spiders/sodix_spider.py | 98 ++++++++++++++++++++++++++++--- 1 file changed, 90 insertions(+), 8 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index e3b7b486..1d998776 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -16,7 +16,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.3" + version = "0.1.4" apiUrl = "https://api.sodix.de/gql/graphql" access_token: str = None page_size = 2500 @@ -43,7 +43,7 @@ def getId(self, response): return response.meta["item"].get("id") def getHash(self, response): - return response.meta["item"].get("updated") + self.version + return response.meta["item"].get("updated") + "v" + self.version def getUri(self, response=None) -> str: # or media.originalUrl? @@ -52,10 +52,85 @@ def getUri(self, response=None) -> str: def startRequest(self, offset=0): return scrapy.Request( url=self.apiUrl, - callback=self.parseRequest, + callback=self.parse_request, body=json.dumps({ - "query": "{\n findAllMetadata(page: " + str(offset) + ", pageSize: " + str( - self.page_size) + ") {\n id\n identifier\n title\n description\n keywords\n language\n creationDate\n updated\n publishedTime\n availableTo\n recordStatus\n author\n authorWebsite\n producer\n publishers{\n id\n title\n description\n imageDetails\n imagePreview\n officialWebsite\n linkToGeneralUseRights \n }\n source{\n id\n name\n description\n imageUrl\n termsOfUse\n generalUseRights\n website\n sourceStatus\n created\n edited \n }\n media {\n size\n dataType\n duration\n thumbDetails\n thumbPreview\n url\n originalUrl \n }\n targetAudience\n learnResourceType\n educationalLevels\n classLevel\n schoolTypes\n eafCode\n subject{\n id\n name\n level\n path\n } \n competencies{\n id\n level\n name\n path \n }\n license{\n name\n version\n country\n url\n text \n }\n additionalLicenseInformation\n downloadRight\n cost\n linkedObjects \n }\n}\n\n", + "query": f"""{{ + findAllMetadata(page: {offset}, pageSize: {self.page_size}) {{ + id + identifier + title + description + keywords + language + creationDate + updated + publishedTime + availableTo + recordStatus + author + authorWebsite + producer + publishers {{ + id + title + description + imageDetails + imagePreview + officialWebsite + linkToGeneralUseRights + }} + source {{ + id + name + description + imageUrl + termsOfUse + generalUseRights + website + sourceStatus + created + edited + }} + media {{ + size + dataType + duration + thumbDetails + thumbPreview + url + originalUrl + }} + targetAudience + learnResourceType + educationalLevels + classLevel + schoolTypes + eafCode + subject {{ + id + name + level + path + }} + competencies {{ + id + level + name + path + }} + license {{ + name + version + country + url + text + }} + additionalLicenseInformation + downloadRight + cost + linkedObjects + }} + }}""", "operationName": None }), method="POST", @@ -70,7 +145,7 @@ def startRequest(self, offset=0): def start_requests(self): yield self.startRequest() - def parseRequest(self, response): + def parse_request(self, response): results = json.loads(response.body) if results: for item in results['data']['findAllMetadata']: @@ -95,6 +170,11 @@ def getBase(self, response): ) return base + def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: + lifecycle = LomBase.getLOMLifecycle(response) + + return lifecycle + def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) general.replace_value( @@ -128,6 +208,7 @@ def getLOMTechnical(self, response): def getLicense(self, response): license = LomBase.getLicense(self, response) licenseId = self.get("license.name", json=response.meta["item"]) + licenseUrl = self.get("license.url", json=response.meta["item"]) # @TODO: add mappings for the sodix names url = None license.add_value("url", url) @@ -139,13 +220,14 @@ def getLOMEducational(self, response=None) -> LomEducationalItemLoader: if class_level and len(class_level.split("-")) == 2: split = class_level.split("-") tar = LomAgeRangeItemLoader() + # mapping from classLevel to ageRange tar.add_value( "fromRange", - split[0] + int(split[0]) + 5 ) tar.add_value( "toRange", - split[1] + int(split[1]) + 5 ) educational.add_value("typicalAgeRange", tar.load_item()) return educational From a7481dce851c2fbc6eca97884bc25f04c4737c02 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 29 Jun 2022 14:32:22 +0200 Subject: [PATCH 106/590] fix:ending fix, page mapping --- converter/spiders/sodix_spider.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 1d998776..f105596f 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -49,13 +49,13 @@ def getUri(self, response=None) -> str: # or media.originalUrl? return self.get("media.url", json=response.meta["item"]) - def startRequest(self, offset=0): + def startRequest(self, page=0): return scrapy.Request( url=self.apiUrl, callback=self.parse_request, body=json.dumps({ "query": f"""{{ - findAllMetadata(page: {offset}, pageSize: {self.page_size}) {{ + findAllMetadata(page: {page}, pageSize: {self.page_size}) {{ id identifier title @@ -139,7 +139,7 @@ def startRequest(self, offset=0): "Content-Type": "application/json", "Authorization": "Bearer " + self.access_token }, - meta={"offset": offset}, + meta={"page": page}, ) def start_requests(self): @@ -148,12 +148,15 @@ def start_requests(self): def parse_request(self, response): results = json.loads(response.body) if results: - for item in results['data']['findAllMetadata']: + list = results['data']['findAllMetadata'] + if len(list) == 0: + return + for item in list: copyResponse = response.copy() copyResponse.meta["item"] = item if self.hasChanged(copyResponse): yield self.handleEntry(copyResponse) - yield self.startRequest(response.meta["offset"] + self.page_size) + yield self.startRequest(response.meta["page"] + 1) def handleEntry(self, response): return LomBase.parse(self, response) From b1f412a967a9cdf6980994a679627f9435c89247 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 1 Jul 2022 12:07:46 +0200 Subject: [PATCH 107/590] fix:add found values as comment --- converter/spiders/sodix_spider.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index f105596f..9c8f4352 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -213,7 +213,9 @@ def getLicense(self, response): licenseId = self.get("license.name", json=response.meta["item"]) licenseUrl = self.get("license.url", json=response.meta["item"]) # @TODO: add mappings for the sodix names + # {None, 'CC BY-NC-SA', 'Copyright, lizenzpflichtig', 'CC BY-SA', 'CC BY-ND', 'CC BY', 'CC0', 'freie Lizenz', 'CC BY-NC-ND', 'keine Angaben (gesetzliche Regelung)', 'CC BY-NC', 'Gemeinfrei / Public Domain', 'Copyright, freier Zugang'} url = None + # {'', 'https://creativecommons.org/licenses/by-nd/4.0/deed.de', 'https://creativecommons.org/licenses/by-sa/3.0/deed.de', 'https://creativecommons.org/licenses/by/3.0/deed.de', 'https://creativecommons.org/licenses/by-nc-sa/4.0/deed.de', 'https://creativecommons.org/licenses/by-nc-nd/4.0/deed.de', 'https://creativecommons.org/licenses/by/2.0/deed.de', 'https://creativecommons.org/licenses/by/4.0/', 'https://creativecommons.org/licenses/by-nc-nd/2.0/de/', 'https://creativecommons.org/licenses/by-nc-nd/3.0/de/', 'https://creativecommons.org/licenses/by-sa/2.0/deed.de', 'https://creativecommons.org/licenses/by-nd/3.0/deed.de', 'https://creativecommons.org/licenses/by-nd/2.0/de/', 'https://creativecommons.org/licenses/by-nc-sa/3.0/deed.de', 'https://creativecommons.org/licenses/by-sa/4.0/deed.de', 'https://creativecommons.org/licenses/by/2.5/deed.de', 'https://creativecommons.org/licenses/by-sa/2.0/de/', 'https://creativecommons.org/licenses/by/3.0/de/', 'https://creativecommons.org/licenses/by-nc-nd/3.0/deed.de', 'https://creativecommons.org/licenses/by-nc/3.0/de/', 'https://creativecommons.org/licenses/by-nd/3.0/de/', 'https://creativecommons.org/licenses/by-sa/2.5/deed.de', 'https://creativecommons.org/publicdomain/mark/1.0/deed.de', 'https://creativecommons.org/licenses/by-nc-sa/2.0/deed.de', 'https://creativecommons.org/licenses/by-sa/2.0/fr/deed.de', 'https://creativecommons.org/licenses/by-nc/3.0/deed.de', None, 'https://creativecommons.org/licenses/by-nc-sa/2.5/deed.de', 'https://creativecommons.org/licenses/by-nc/4.0/deed.de', 'https://creativecommons.org/publicdomain/zero/1.0/deed.de', 'https://creativecommons.org/licenses/by-sa/3.0/de/', 'https://creativecommons.org/licenses/by-nc-sa/3.0/de/'} license.add_value("url", url) return license From f310da02a01ec34f9a72abe7a2a2ebae266b0ea5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Jun 2022 17:41:48 +0200 Subject: [PATCH 108/590] sodix_spider v0.1.5 - add: Mapping from Sodix "learnRessourcenTyp" to oeh LRT - add: Mapping for eduContex - add: Mapping for intendedEndUserRole - fix: hash -- string concatenations with "+" are less stable than f-Strings in Python --- converter/spiders/sodix_spider.py | 122 +++++++++++++++++++----------- 1 file changed, 76 insertions(+), 46 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 9c8f4352..536c82c6 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -16,11 +16,61 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.4" + version = "0.1.5" apiUrl = "https://api.sodix.de/gql/graphql" access_token: str = None page_size = 2500 + MAPPING_LRT = { + "APP": "application", + "ARBEITSBLATT": "worksheet", + "AUDIO": "audio", + "AUDIOVISUELLES": "audiovisual medium", + "BILD": "image", + "DATEN": "data", + "ENTDECKENDES": "exploration", + "EXPERIMENT": "experiment", + "FALLSTUDIE": "case_study", + "GLOSSAR": "glossary", + "HANDBUCH": "guide", + # "INTERAKTION": "", + "KARTE": "map", + "KURS": "course", + "LERNKONTROLLE": "assessment", + "LERNSPIEL": "educational Game", + "MODELL": "model", + "OFFENE": "open activity", + "PRESENTATION": "presentation", + "PROJECT": "project", + "QUELLE": "reference", + "RADIO": "broadcast", + "RECHERCHE": "enquiry-oriented activity", + "RESSOURCENTYP": "other", # "Anderer Ressourcentyp" + "ROLLENSPIEL": "role play", + "SIMULATION": "simulation", + "SOFTWARE": "application", + "SONSTIGES": "other", + # "TEST": "", + "TEXT": "text", + "UBUNG": "drill and practice", + "UNTERRICHTSBAUSTEIN": "teaching module", + "UNTERRICHTSPLANUNG": "lesson plan", + "VERANSCHAULICHUNG": "demonstration", + "VIDEO": "video", + "WEBSEITE": "web page", + "WEBTOOL": ["web page", "tool"], + + } + MAPPING_EDUCONTEXT = { + "Primarbereich": "Primarstufe", + "Fort- und Weiterbildung": "Fortbildung" + } + + MAPPING_INTENDED_END_USER_ROLE = { + "pupils": "learner", + + } + def __init__(self, **kwargs): self.access_token = requests.post( "https://api.sodix.de/gql/auth/login", @@ -43,7 +93,8 @@ def getId(self, response): return response.meta["item"].get("id") def getHash(self, response): - return response.meta["item"].get("updated") + "v" + self.version + return f"{response.meta['item'].get('updated')}v{self.version}" + # return response.meta["item"].get("updated") + "v" + self.version def getUri(self, response=None) -> str: # or media.originalUrl? @@ -242,52 +293,31 @@ def getValuespaces(self, response): subjects = self.get('subject', json=response.meta['item']) for subject in subjects if subjects else []: valuespaces.add_value("discipline", subject['name']) - valuespaces.add_value("educationalContext", self.get('educationalLevels', json=response.meta['item'])) - # @TODO: add mappings! + educational_context_list = self.get('educationalLevels', json=response.meta['item']) + if educational_context_list: + for potential_edu_context in educational_context_list: + if potential_edu_context in self.MAPPING_EDUCONTEXT: + potential_edu_context = self.MAPPING_EDUCONTEXT.get(potential_edu_context) + valuespaces.add_value('educationalContext', potential_edu_context) + target_audience_list = self.get('targetAudience', json=response.meta['item']) + if target_audience_list: + for target_audience_item in target_audience_list: + if target_audience_item in self.MAPPING_INTENDED_END_USER_ROLE: + target_audience_item = self.MAPPING_INTENDED_END_USER_ROLE.get(target_audience_item) + valuespaces.add_value('intendedEndUserRole', target_audience_item) valuespaces.add_value("intendedEndUserRole", self.get('targetAudience', json=response.meta['item'])) + if self.get('cost', json=response.meta['item']) == "FREE": valuespaces.add_value("price", "no") - - # @TODO: mapping required: - # enum LRT { - # APP - # ARBEITSBLATT - # AUDIO - # AUDIOVISUELLES - # BILD - # DATEN - # ENTDECKENDES - # EXPERIMENT - # FALLSTUDIE - # GLOSSAR - # HANDBUCH - # INTERAKTION - # KARTE - # KURS - # LERNKONTROLLE - # LERNSPIEL - # MODELL - # OFFENE - # PRESENTATION - # PROJECT - # QUELLE - # RADIO - # RECHERCHE - # RESSOURCENTYP - # ROLLENSPIEL - # SIMULATION - # SOFTWARE - # SONSTIGES - # TEST - # TEXT - # UBUNG - # UNTERRICHTSBAUSTEIN - # UNTERRICHTSPLANUNG - # VERANSCHAULICHUNG - # VIDEO - # WEBSEITE - # WEBTOOL - # } - valuespaces.add_value("learningResourceType", self.get('learnResourceType', json=response.meta['item'])) + potential_lrts = self.get('learnResourceType', json=response.meta['item']) + # attention: sodix calls their LRT "learnResourceType" + if potential_lrts: + for potential_lrt in potential_lrts: + if potential_lrt in self.MAPPING_LRT: + potential_lrt = self.MAPPING_LRT.get(potential_lrt) + valuespaces.add_value('learningResourceType', potential_lrt) + else: + # ToDo: lrt values that can't get mapped should be put into "keywords" to avoid losing them + pass return valuespaces From c1a3c939568c6a49fa51224f19ecd8bf9e1b436c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 1 Jul 2022 17:54:16 +0200 Subject: [PATCH 109/590] add: 3 missing licenses to constants.py - CC_BY_NC_40, CC_BY_ND_40 and CC_BY_NC_SA_40 was missing -- ToDo: we might have to update the "VALID_LICENSE_URLS"-list and the "LICENSE_MAPPINGS"-dict (need confirmation beforehand) --- converter/constants.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/converter/constants.py b/converter/constants.py index 1e9eea5e..54b8b1cc 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -4,7 +4,10 @@ class Constants: LICENSE_CC_BY_SA_40 = "https://creativecommons.org/licenses/by-sa/4.0/" LICENSE_CC_BY_30 = "https://creativecommons.org/licenses/by/3.0/" LICENSE_CC_BY_40 = "https://creativecommons.org/licenses/by/4.0/" + LICENSE_CC_BY_NC_40 = "https://creativecommons.org/licenses/by-nc/4.0/" + LICENSE_CC_BY_ND_40 = "https://creativecommons.org/licenses/by-nd/4.0/" LICENSE_CC_BY_NC_SA_30 = "https://creativecommons.org/licenses/by-nc-sa/3.0/" + LICENSE_CC_BY_NC_SA_40 = "https://creativecommons.org/licenses/by-nc-sa/4.0/" LICENSE_CC_BY_NC_ND_30 = "https://creativecommons.org/licenses/by-nc-nd/3.0/" LICENSE_CC_BY_NC_ND_40 = "https://creativecommons.org/licenses/by-nc-nd/4.0/" LICENSE_PDM = "https://creativecommons.org/publicdomain/mark/1.0/" From b8028eb57faaba19093f8e619d2b05cd8007ebf1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 1 Jul 2022 18:11:12 +0200 Subject: [PATCH 110/590] sodix_spider v0.1.6 - add: license.author, license.description, license.internal, license.url -- add: mapping for license names --- if custom license fields are recognized that don't fit any valid licenses we can map to, those values get put into the "description"-field of the license loader - optimize imports --- converter/spiders/sodix_spider.py | 107 +++++++++++++++++++++++++----- 1 file changed, 89 insertions(+), 18 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 536c82c6..2baa580a 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -1,13 +1,12 @@ -from converter.items import * -from .base_classes import LomBase -from .base_classes import JSONBase import json -import logging + import requests -import html -from converter.constants import * import scrapy +from converter.constants import * +from converter.items import * +from .base_classes import JSONBase +from .base_classes import LomBase # Spider to fetch RSS from planet schule from .. import env @@ -16,7 +15,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.5" + version = "0.1.6" apiUrl = "https://api.sodix.de/gql/graphql" access_token: str = None page_size = 2500 @@ -68,7 +67,21 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): MAPPING_INTENDED_END_USER_ROLE = { "pupils": "learner", + } + MAPPING_LICENSE_NAMES = { + 'CC BY': Constants.LICENSE_CC_BY_40, + 'CC BY-NC': Constants.LICENSE_CC_BY_NC_40, + 'CC BY-NC-ND': Constants.LICENSE_CC_BY_NC_ND_40, + 'CC BY-NC-SA': Constants.LICENSE_CC_BY_NC_SA_40, + 'CC BY-ND': Constants.LICENSE_CC_BY_ND_40, + 'CC BY-SA': Constants.LICENSE_CC_BY_SA_40, + 'CC0': Constants.LICENSE_CC_ZERO_10, + 'Copyright, freier Zugang': Constants.LICENSE_COPYRIGHT_LAW, + 'Copyright, lizenzpflichtig': Constants.LICENSE_COPYRIGHT_LAW, + 'Gemeinfrei / Public Domain': Constants.LICENSE_PDM, + 'freie Lizenz': Constants.LICENSE_CUSTOM, + 'keine Angaben (gesetzliche Regelung)': Constants.LICENSE_CUSTOM, } def __init__(self, **kwargs): @@ -94,7 +107,6 @@ def getId(self, response): def getHash(self, response): return f"{response.meta['item'].get('updated')}v{self.version}" - # return response.meta["item"].get("updated") + "v" + self.version def getUri(self, response=None) -> str: # or media.originalUrl? @@ -207,6 +219,8 @@ def parse_request(self, response): copyResponse.meta["item"] = item if self.hasChanged(copyResponse): yield self.handleEntry(copyResponse) + # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter + # specific media types / URLs yield self.startRequest(response.meta["page"] + 1) def handleEntry(self, response): @@ -260,15 +274,73 @@ def getLOMTechnical(self, response): return technical def getLicense(self, response): - license = LomBase.getLicense(self, response) - licenseId = self.get("license.name", json=response.meta["item"]) - licenseUrl = self.get("license.url", json=response.meta["item"]) - # @TODO: add mappings for the sodix names - # {None, 'CC BY-NC-SA', 'Copyright, lizenzpflichtig', 'CC BY-SA', 'CC BY-ND', 'CC BY', 'CC0', 'freie Lizenz', 'CC BY-NC-ND', 'keine Angaben (gesetzliche Regelung)', 'CC BY-NC', 'Gemeinfrei / Public Domain', 'Copyright, freier Zugang'} - url = None - # {'', 'https://creativecommons.org/licenses/by-nd/4.0/deed.de', 'https://creativecommons.org/licenses/by-sa/3.0/deed.de', 'https://creativecommons.org/licenses/by/3.0/deed.de', 'https://creativecommons.org/licenses/by-nc-sa/4.0/deed.de', 'https://creativecommons.org/licenses/by-nc-nd/4.0/deed.de', 'https://creativecommons.org/licenses/by/2.0/deed.de', 'https://creativecommons.org/licenses/by/4.0/', 'https://creativecommons.org/licenses/by-nc-nd/2.0/de/', 'https://creativecommons.org/licenses/by-nc-nd/3.0/de/', 'https://creativecommons.org/licenses/by-sa/2.0/deed.de', 'https://creativecommons.org/licenses/by-nd/3.0/deed.de', 'https://creativecommons.org/licenses/by-nd/2.0/de/', 'https://creativecommons.org/licenses/by-nc-sa/3.0/deed.de', 'https://creativecommons.org/licenses/by-sa/4.0/deed.de', 'https://creativecommons.org/licenses/by/2.5/deed.de', 'https://creativecommons.org/licenses/by-sa/2.0/de/', 'https://creativecommons.org/licenses/by/3.0/de/', 'https://creativecommons.org/licenses/by-nc-nd/3.0/deed.de', 'https://creativecommons.org/licenses/by-nc/3.0/de/', 'https://creativecommons.org/licenses/by-nd/3.0/de/', 'https://creativecommons.org/licenses/by-sa/2.5/deed.de', 'https://creativecommons.org/publicdomain/mark/1.0/deed.de', 'https://creativecommons.org/licenses/by-nc-sa/2.0/deed.de', 'https://creativecommons.org/licenses/by-sa/2.0/fr/deed.de', 'https://creativecommons.org/licenses/by-nc/3.0/deed.de', None, 'https://creativecommons.org/licenses/by-nc-sa/2.5/deed.de', 'https://creativecommons.org/licenses/by-nc/4.0/deed.de', 'https://creativecommons.org/publicdomain/zero/1.0/deed.de', 'https://creativecommons.org/licenses/by-sa/3.0/de/', 'https://creativecommons.org/licenses/by-nc-sa/3.0/de/'} - license.add_value("url", url) - return license + license_loader = LomBase.getLicense(self, response) + + author: str = self.get('author', json=response.meta['item']) + if author: + license_loader.add_value('author', author) + license_description: str = self.get("license.text", json=response.meta["item"]) + if license_description: + license_loader.add_value('description', license_description) + license_name: str = self.get("license.name", json=response.meta["item"]) + if license_name: + if license_name in self.MAPPING_LICENSE_NAMES: + license_internal_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) + if license_name.startswith("CC"): + # ToDo: for CC-licenses the actual URL is more precise than our 'internal' license mapping + # (you will see differences between the 'internal' value and the actual URL from the API, + # e.g. a license pointing to v3.0 and v4.0 at the same time) + pass + else: + license_loader.add_value('internal', license_internal_mapped) + if not license_description: + # "name"-fields with the "Copyright, freier Zugang"-value don't have "text"-fields, therefore + # we're carrying over the custom description, just in case + license_loader.replace_value('description', license_name) + + license_url: str = self.get("license.url", json=response.meta["item"]) + # license_urls_sorted = ['https://creativecommons.org/licenses/by-nc-nd/2.0/de/', + # 'https://creativecommons.org/licenses/by-nc-nd/3.0/de/', + # 'https://creativecommons.org/licenses/by-nc-nd/3.0/deed.de', + # 'https://creativecommons.org/licenses/by-nc-nd/4.0/deed.de', + # 'https://creativecommons.org/licenses/by-nc-sa/2.0/deed.de', + # 'https://creativecommons.org/licenses/by-nc-sa/2.5/deed.de', + # 'https://creativecommons.org/licenses/by-nc-sa/3.0/de/', + # 'https://creativecommons.org/licenses/by-nc-sa/3.0/deed.de', + # 'https://creativecommons.org/licenses/by-nc-sa/4.0/deed.de', + # 'https://creativecommons.org/licenses/by-nc/3.0/de/', + # 'https://creativecommons.org/licenses/by-nc/3.0/deed.de', + # 'https://creativecommons.org/licenses/by-nc/4.0/deed.de', + # 'https://creativecommons.org/licenses/by-nd/2.0/de/', + # 'https://creativecommons.org/licenses/by-nd/3.0/de/', + # 'https://creativecommons.org/licenses/by-nd/3.0/deed.de', + # 'https://creativecommons.org/licenses/by-nd/4.0/deed.de', + # 'https://creativecommons.org/licenses/by-sa/2.0/de/', + # 'https://creativecommons.org/licenses/by-sa/2.0/deed.de', + # 'https://creativecommons.org/licenses/by-sa/2.0/fr/deed.de', + # 'https://creativecommons.org/licenses/by-sa/2.5/deed.de', + # 'https://creativecommons.org/licenses/by-sa/3.0/de/', + # 'https://creativecommons.org/licenses/by-sa/3.0/deed.de', + # 'https://creativecommons.org/licenses/by-sa/4.0/deed.de', + # 'https://creativecommons.org/licenses/by/2.0/deed.de', + # 'https://creativecommons.org/licenses/by/2.5/deed.de', + # 'https://creativecommons.org/licenses/by/3.0/de/', + # 'https://creativecommons.org/licenses/by/3.0/deed.de', + # 'https://creativecommons.org/licenses/by/4.0/', + # 'https://creativecommons.org/publicdomain/mark/1.0/deed.de', + # 'https://creativecommons.org/publicdomain/zero/1.0/deed.de'] + # ToDo: our constants.py doesn't have entries for v2.0 or 2.5 values of CC licenses + if license_url: + # making sure to only handle valid license urls, since the API result can be NoneType or empty string ('') + if license_url.endswith("deed.de"): + license_url = license_url[:-len("deed.de")] + if license_url.endswith("/de/"): + license_url = license_url[:-len("de/")] + # cutting off the "de/"-part of the URL while leaving the rest intact + elif license_url.endswith("/fr/"): + license_url = license_url[:-len("fr/")] + license_loader.replace_value('url', license_url) + return license_loader def getLOMEducational(self, response=None) -> LomEducationalItemLoader: educational = LomBase.getLOMEducational(response) @@ -320,4 +392,3 @@ def getValuespaces(self, response): # ToDo: lrt values that can't get mapped should be put into "keywords" to avoid losing them pass return valuespaces - From 133c85772ce2dccd94627d4d31198dd33a9891a9 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 5 Jul 2022 09:25:34 +0200 Subject: [PATCH 111/590] fix:sodix fetch token everytime to prevent timeouts --- converter/spiders/sodix_spider.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 2baa580a..e0a1b474 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -17,7 +17,6 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): url = "https://sodix.de/" version = "0.1.6" apiUrl = "https://api.sodix.de/gql/graphql" - access_token: str = None page_size = 2500 MAPPING_LRT = { @@ -85,14 +84,6 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): } def __init__(self, **kwargs): - self.access_token = requests.post( - "https://api.sodix.de/gql/auth/login", - None, - { - "login": env.get("SODIX_SPIDER_USERNAME"), - "password": env.get("SODIX_SPIDER_PASSWORD"), - } - ).json()['access_token'] LomBase.__init__(self, **kwargs) def mapResponse(self, response): @@ -113,6 +104,14 @@ def getUri(self, response=None) -> str: return self.get("media.url", json=response.meta["item"]) def startRequest(self, page=0): + access_token = requests.post( + "https://api.sodix.de/gql/auth/login", + None, + { + "login": env.get("SODIX_SPIDER_USERNAME"), + "password": env.get("SODIX_SPIDER_PASSWORD"), + } + ).json()['access_token'] return scrapy.Request( url=self.apiUrl, callback=self.parse_request, @@ -200,7 +199,7 @@ def startRequest(self, page=0): headers={ "Accept": "application/json", "Content-Type": "application/json", - "Authorization": "Bearer " + self.access_token + "Authorization": "Bearer " + access_token }, meta={"page": page}, ) From ad3c53a137ef6a85d5420370968b60174562ecd1 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 6 Jul 2022 12:28:24 +0200 Subject: [PATCH 112/590] fix:sodix fetch token everytime to prevent timeouts --- converter/spiders/sodix_spider.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 2baa580a..e0a1b474 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -17,7 +17,6 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): url = "https://sodix.de/" version = "0.1.6" apiUrl = "https://api.sodix.de/gql/graphql" - access_token: str = None page_size = 2500 MAPPING_LRT = { @@ -85,14 +84,6 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): } def __init__(self, **kwargs): - self.access_token = requests.post( - "https://api.sodix.de/gql/auth/login", - None, - { - "login": env.get("SODIX_SPIDER_USERNAME"), - "password": env.get("SODIX_SPIDER_PASSWORD"), - } - ).json()['access_token'] LomBase.__init__(self, **kwargs) def mapResponse(self, response): @@ -113,6 +104,14 @@ def getUri(self, response=None) -> str: return self.get("media.url", json=response.meta["item"]) def startRequest(self, page=0): + access_token = requests.post( + "https://api.sodix.de/gql/auth/login", + None, + { + "login": env.get("SODIX_SPIDER_USERNAME"), + "password": env.get("SODIX_SPIDER_PASSWORD"), + } + ).json()['access_token'] return scrapy.Request( url=self.apiUrl, callback=self.parse_request, @@ -200,7 +199,7 @@ def startRequest(self, page=0): headers={ "Accept": "application/json", "Content-Type": "application/json", - "Authorization": "Bearer " + self.access_token + "Authorization": "Bearer " + access_token }, meta={"page": page}, ) From 6fa87683e80861ef973656235bdc6e9104683608 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 6 Jul 2022 12:30:12 +0200 Subject: [PATCH 113/590] feat:es support mulitple technical location urls --- converter/es_connector.py | 6 ++++-- converter/items.py | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index aa3723c4..4729d444 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -263,8 +263,10 @@ def transformItem(self, uuid, spider, item): "ccm:replicationsourcehash": item["hash"], "ccm:replicationsourceuuid": uuid, "cm:name": item["lom"]["general"]["title"], - "ccm:wwwurl": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, - "cclom:location": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, + "ccm:wwwurl": item["lom"]["technical"]["location"][0] + if "location" in item["lom"]["technical"] else None, + "cclom:location": item["lom"]["technical"]["location"] + if "location" in item["lom"]["technical"] else None, "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, "cclom:title": item["lom"]["general"]["title"], } diff --git a/converter/items.py b/converter/items.py index 60764e7a..c9fd2299 100644 --- a/converter/items.py +++ b/converter/items.py @@ -56,7 +56,8 @@ class LomLifecycleItem(Item): class LomTechnicalItem(Item): format = Field() size = Field() - location = Field() + location = Field(output_processor=JoinMultivalues()) + "URI/location of the element, multiple values are supported, the first entry is the primary location, while all others are secondary locations" requirement = Field() installationRemarks = Field() otherPlatformRequirements = Field() From 9d886cc3c316a73a70e6cbe85b7a93864f849f35 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 6 Jul 2022 12:30:21 +0200 Subject: [PATCH 114/590] sodix:multiple tech locations --- converter/spiders/sodix_spider.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index e0a1b474..81edf654 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -264,6 +264,11 @@ def getLOMTechnical(self, response): technical.replace_value( "location", self.getUri(response) ) + original = self.get("media.originalUrl", json=response.meta["item"]) + if original: + technical.add_value( + "location", original + ) technical.add_value( "duration", self.get("media.duration", json=response.meta["item"]) ) From dd50427fdf5db20b9fb4145789035c6d90307309 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 7 Jul 2022 09:56:45 +0200 Subject: [PATCH 115/590] fix:type casting issue when logging --- converter/spiders/base_classes/lom_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 7625f3c0..2a08c507 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -74,7 +74,7 @@ def hasChanged(self, response=None) -> bool: changed = db == None or db[1] != self.getHash(response) if not changed: logging.info( - "Item " + self.getId(response) + + "Item " + str(self.getId(response)) + "(uuid: " + db[0] + ") has not changed" ) return changed From fd66e21b5a8a022dcc704390c236332f9cf7d4b8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Jul 2022 01:24:50 +0200 Subject: [PATCH 116/590] fix: rudimentary binary file filtering for Splash requests - currently only .pdf and .docx files are skipped during Splash requests -- we need to build a proper exclusion list of filetypes which might crash Splash or only return LUA errors --- converter/web_tools.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index 2e7a0c16..7f45ac36 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -47,10 +47,9 @@ def __getUrlDataPlaywright(url: str): def __getUrlDataSplash(url: str): settings = get_project_settings() # html = None - if settings.get("SPLASH_URL") and not url.endswith((".pdf", ".docx")): + if settings.get("SPLASH_URL") and not url.endswith(".pdf") and not url.endswith(".docx"): # Splash can't handle some binary direct-links (Splash will throw "LUA Error 400: Bad Request" as a result) - # ToDo: which additional filetypes need to be added to the exclusion list? - # ToDo: find general solution for extracting metadata from .pdf-files? + # ToDo: which additional filetypes need to be added to the exclusion list? - media files (.mp3, mp4 etc.?) result = requests.post( settings.get("SPLASH_URL") + "/render.json", json={ From f920a3266db8780cdc0af51ff63d91a284834bc4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Jul 2022 01:33:19 +0200 Subject: [PATCH 117/590] fix: pipeline Splash handling of multiple "technical.location" fields when there is no "base.thumbnail"-field - in the edge-case where a spider source didn't serve any "thumbnail"-metadata, the pipeline would fail to get a thumbnail via the Splash container -- fixed it by using the first URL found within "technical.location" (primary URL) --- converter/pipelines.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index fb5f1261..48a33535 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -324,7 +324,8 @@ def process_item(self, raw_item, spider): response = requests.post( settings.get("SPLASH_URL") + "/render.png", json={ - "url": item["lom"]["technical"]["location"], + "url": item["lom"]["technical"]["location"][0], + # since there can be multiple "technical.location"-values, the first URL is used for thumbnails "wait": settings.get("SPLASH_WAIT"), "html5_media": 1, "headers": settings.get("SPLASH_HEADERS"), From f88a64871e1564db9c990dff22712236eb3acaf4 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 13 Jul 2022 12:25:58 +0200 Subject: [PATCH 118/590] fix:check if technical location array has more than zero entries --- converter/pipelines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 48a33535..71299489 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -317,6 +317,7 @@ def process_item(self, raw_item, spider): ) elif ( "location" in item["lom"]["technical"] + and len(item["lom"]["technical"]["location"]) > 0 and "format" in item["lom"]["technical"] and item["lom"]["technical"]["format"] == "text/html" ): From 5bbddde71a579c72d06889265931328d32c22348 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 29 Jul 2022 12:54:01 +0200 Subject: [PATCH 119/590] extend ProcessThumbnailPipeline for Playwright screenshots - by using the spider class attribute WEB_TOOLS we are able to control which technology is used for screenshotting websites -- by default, if the spider class attribute is missing, it will always default to Splash -- there's 2 different use-cases for Playwright screenshots: --- if the crawler is already using Playwright, we can save the screenshot byte-stream to base.screenshot_bytes and therefore skip one HTTP Request to the target domain --- in the edge-case of serlo_spider we are only using Playwright for screenshots since Splash's screenshots are too unreliable - refactor: create thumbnails from image byte-streams - add: documentation regarding the expected pipeline behaviour - optimize imports items.py: - add optional "BaseItem.screenshot_bytes"-field web_tools.py: - extended the getUrlDataPlaywright()-method to also take a screenshot while we're already visiting a target-website (so we can save one unnecessary HTTP Request in the thumbnail pipeline) serlo_spider v0.2.1: - version bump (to fetch new thumbnails for the edu-sharing repo) --- converter/items.py | 2 + converter/pipelines.py | 102 +++++++++++++++++++++--------- converter/spiders/serlo_spider.py | 7 +- converter/web_tools.py | 19 +++++- 4 files changed, 94 insertions(+), 36 deletions(-) diff --git a/converter/items.py b/converter/items.py index c9fd2299..959c16c2 100644 --- a/converter/items.py +++ b/converter/items.py @@ -194,6 +194,8 @@ class BaseItem(Item): "editorial notes" binary = Field() "binary data which should be uploaded (raw data)" + screenshot_bytes = Field() + # this is a (temporary) field that gets deleted after the thumbnail pipeline processed its byte-data class BaseItemLoader(ItemLoader): diff --git a/converter/pipelines.py b/converter/pipelines.py index 71299489..17ec58ae 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -1,33 +1,35 @@ # -*- coding: utf-8 -*- from __future__ import annotations + import base64 # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html import csv -import dateparser -import io -import isodate import logging import time +from abc import ABCMeta from io import BytesIO from typing import BinaryIO, TextIO, Optional -from abc import ABCMeta + +import dateparser import dateutil.parser +import isodate import requests -from PIL import Image import scrapy import scrapy.crawler +from PIL import Image +from itemadapter import ItemAdapter from scrapy.exceptions import DropItem from scrapy.exporters import JsonItemExporter from scrapy.utils.project import get_project_settings -from itemadapter import ItemAdapter from converter import env from converter.constants import * from converter.es_connector import EduSharing +from converter.web_tools import WebTools, WebEngine from valuespace_converter.app.valuespaces import Valuespaces log = logging.getLogger(__name__) @@ -94,6 +96,7 @@ class LOMFillupPipeline(BasicPipeline): """ fillup missing props by "guessing" or loading them if possible """ + def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) if "fulltext" not in item and "text" in item["response"]: @@ -150,6 +153,7 @@ def process_item(self, raw_item, spider): except KeyError: raise DropItem(f'Item {item} was dropped for not providing enough metadata') + class NormLicensePipeline(BasicPipeline): def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) @@ -197,6 +201,7 @@ class ConvertTimePipeline(BasicPipeline): convert typicalLearningTime into an integer representing seconds + convert duration into an integer """ + def process_item(self, raw_item, spider): # map lastModified item = ItemAdapter(raw_item) @@ -238,7 +243,7 @@ def process_item(self, raw_item, spider): if duration: if len(duration.split(":")) == 3: duration = isodate.parse_time(duration) - duration = duration.hour*60*60 + duration.minute*60 + duration.second + duration = duration.hour * 60 * 60 + duration.minute * 60 + duration.second elif duration.startswith("PT"): duration = int(isodate.parse_duration(duration).total_seconds()) else: @@ -255,6 +260,7 @@ class ProcessValuespacePipeline(BasicPipeline): """ generate de_DE / i18n strings for valuespace fields """ + def __init__(self): self.valuespaces = Valuespaces() @@ -305,10 +311,26 @@ def scale_image(img, max_size): return img.resize((int(w), int(h)), Image.ANTIALIAS).convert("RGB") def process_item(self, raw_item, spider): + """ + By default the thumbnail-pipeline handles several cases: + - if there is a URL-string inside the "BaseItem.thumbnail"-field: + -- download image from URL; rescale it into different sizes (small/large); + --- save the thumbnails as base64 within + ---- "BaseItem.thumbnail.small", "BaseItem.thumbnail.large" + --- (afterwards delete the URL from "BaseItem.thumbnail") + + - if there is NO "BaseItem.thumbnail"-field: + -- default: take a screenshot of the URL from "technical.location" with Splash, rescale and save (as above) + -- alternatively, on-demand: use Playwright to take a screenshot, rescale and save (as above) + """ item = ItemAdapter(raw_item) response = None url = None settings = get_project_settings() + # checking if the (optional) attribute WEB_TOOLS exists within the specific spider class: + web_tools_spider_attribute = getattr(spider, "WEB_TOOLS", WebEngine.Splash) + # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, + # it will default back to "splash" if "thumbnail" in item: url = item["thumbnail"] response = requests.get(url) @@ -321,7 +343,7 @@ def process_item(self, raw_item, spider): and "format" in item["lom"]["technical"] and item["lom"]["technical"]["format"] == "text/html" ): - if settings.get("SPLASH_URL"): + if settings.get("SPLASH_URL") and web_tools_spider_attribute == WebEngine.Splash: response = requests.post( settings.get("SPLASH_URL") + "/render.png", json={ @@ -332,6 +354,23 @@ def process_item(self, raw_item, spider): "headers": settings.get("SPLASH_HEADERS"), }, ) + if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools_spider_attribute == WebEngine.Playwright: + if "screenshot_bytes" in item: + # in case we are already using playwright in a spider, we can skip one additional HTTP Request by + # accessing the (temporary available) "screenshot_bytes"-field + img = Image.open(BytesIO(item["screenshot_bytes"])) + self.create_thumbnails_from_image_bytes(img, item, settings) + del item["screenshot_bytes"] + # the final BaseItem data model doesn't use screenshot_bytes, + # therefore we delete it after we're done processing it + else: + # this edge-case is necessary for spiders that only need playwright to gather a screenshot, + # but don't use playwright within the spider itself (e.g. serlo_spider) + playwright_dict = WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], + engine=WebEngine.Playwright) + screenshot_bytes = playwright_dict.get("screenshot_bytes") + img = Image.open(BytesIO(screenshot_bytes)) + self.create_thumbnails_from_image_bytes(img, item, settings) else: if settings.get("DISABLE_SPLASH") is False: log.warning( @@ -360,28 +399,7 @@ def process_item(self, raw_item, spider): ).decode() else: img = Image.open(BytesIO(response.content)) - small = BytesIO() - self.scale_image(img, settings.get("THUMBNAIL_SMALL_SIZE")).save( - small, - "JPEG", - mode="RGB", - quality=settings.get("THUMBNAIL_SMALL_QUALITY"), - ) - large = BytesIO() - self.scale_image(img, settings.get("THUMBNAIL_LARGE_SIZE")).save( - large, - "JPEG", - mode="RGB", - quality=settings.get("THUMBNAIL_LARGE_QUALITY"), - ) - item["thumbnail"] = {} - item["thumbnail"]["mimetype"] = "image/jpeg" - item["thumbnail"]["small"] = base64.b64encode( - small.getvalue() - ).decode() - item["thumbnail"]["large"] = base64.b64encode( - large.getvalue() - ).decode() + self.create_thumbnails_from_image_bytes(img, item, settings) except Exception as e: if url is not None: log.warning( @@ -401,6 +419,30 @@ def process_item(self, raw_item, spider): ) return raw_item + def create_thumbnails_from_image_bytes(self, image, item, settings): + small = BytesIO() + self.scale_image(image, settings.get("THUMBNAIL_SMALL_SIZE")).save( + small, + "JPEG", + mode="RGB", + quality=settings.get("THUMBNAIL_SMALL_QUALITY"), + ) + large = BytesIO() + self.scale_image(image, settings.get("THUMBNAIL_LARGE_SIZE")).save( + large, + "JPEG", + mode="RGB", + quality=settings.get("THUMBNAIL_LARGE_QUALITY"), + ) + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = "image/jpeg" + item["thumbnail"]["small"] = base64.b64encode( + small.getvalue() + ).decode() + item["thumbnail"]["large"] = base64.b64encode( + large.getvalue() + ).decode() + class EduSharingCheckPipeline(EduSharing, BasicPipeline): def process_item(self, raw_item, spider): diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index d80ad75f..5d99fda8 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -2,21 +2,22 @@ import requests import scrapy -from scrapy.spiders import CrawlSpider from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader from converter.spiders.base_classes import LomBase +from converter.web_tools import WebEngine -class SerloSpider(CrawlSpider, LomBase): +class SerloSpider(scrapy.Spider, LomBase): name = "serlo_spider" friendlyName = "serlo_spider" # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2" # last update: 2022-03-14 + version = "0.2.1" # last update: 2022-07-29 + WEB_TOOLS = WebEngine.Playwright graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: diff --git a/converter/web_tools.py b/converter/web_tools.py index 7f45ac36..998c839d 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -40,8 +40,14 @@ def __getUrlDataPyppeteer(url: str): @staticmethod def __getUrlDataPlaywright(url: str): - html = asyncio.run(WebTools.fetchDataPlaywright(url)) - return {"html": html, "text": WebTools.html2Text(html), "cookies": None, "har": None} + playwright_dict = asyncio.run(WebTools.fetchDataPlaywright(url)) + html = playwright_dict.get("content") + screenshot_bytes = playwright_dict.get("screenshot_bytes") + return {"html": html, + "text": WebTools.html2Text(html), + "cookies": None, + "har": None, + "screenshot_bytes": screenshot_bytes} @staticmethod def __getUrlDataSplash(url: str): @@ -99,8 +105,15 @@ async def fetchDataPlaywright(url: str): # waits for page to fully load (= no network traffic for 500ms), # maximum timeout: 90s content = await page.content() + screenshot_bytes = await page.screenshot() + # ToDo: HAR / text / cookies + # if we are able to replicate the Splash response with all its fields, we could save traffic/Requests + # that are currently still being handled by Splash # await page.close() - return content + return { + "content": content, + "screenshot_bytes": screenshot_bytes + } @staticmethod def html2Text(html: str): From e9f63d4e167b3bb1718c2aaf3bb042603f824b43 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 29 Jul 2022 13:14:16 +0200 Subject: [PATCH 120/590] add: documentation for Playwright to sample_spider_alternative - add: base.screenshot_bytes explanation in contrast to base.thumbnail -- base.thumbnail expects a URL as a string -- while base.screenshot_bytes (temporarily) stores a byte-stream of a website screenshot taken by Playwright, which is consumed and deleted from the BaseItem after processing the byte-date --- .../spiders/sample_spider_alternative.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index ba362c66..33198f9f 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -6,18 +6,23 @@ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ PermissionItemLoader, LomClassificationItemLoader from converter.spiders.base_classes import LomBase +from converter.web_tools import WebEngine, WebTools # This is an alternative approach to our previous "sample_spider.py" that might be easier to read and understand # for web crawling beginners. Use whichever approach is more convenient for you. # LAST UPDATE: 2021-08-20 # please also consult converter/items.py for all currently available keys/values in our crawler data model + + class SampleSpiderAlternative(CrawlSpider, LomBase): name = "sample_spider_alternative" friendlyName = "Sample Source (alternative Method)" # how your crawler should appear in the "Supplier"-list start_urls = ["https://edu-sharing.com"] # starting point of your crawler, e.g. a sitemap, index, rss-feed etc. version = "0.0.1" # this is used for timestamping your crawler results (if a source changes its layout/data, # make sure to increment this value to force a clear distinction between old and new crawler results) + WEB_TOOLS = WebEngine.Playwright # OPTIONAL: this attribute controls which tool is used for taking Screenshots + # you can skip this attribute altogether if you want to use the default Settings (Splash) def getId(self, response=None) -> str: # You have two choices here: @@ -37,6 +42,12 @@ def start_requests(self): yield scrapy.Request(url=start_url, callback=self.parse) def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + # OPTIONAL: If you need to use playwright to crawl a website, this is how you can access the data provided + # by Playwright's headless browser + playwright_dict: dict = WebTools.getUrlData(response.url, WebEngine.Playwright) + html_body = playwright_dict.get("html") + screenshot_bytes = playwright_dict.get("screenshot_bytes") # to be used in base.screenshot_bytes + base = BaseItemLoader() # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py @@ -66,7 +77,9 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # to access or premium_only). in this example, items that have the "premium_only"-value will be sent to the # "SYNC_OBJ//premium_only/"-folder. # (This field is used in two different use-cases, both in "youtube_spider" and "lehreronline_spider") - base.add_value('thumbnail', thumbnail_url) + base.add_value('thumbnail', thumbnail_url) # the thumbnail field expects an URL (as a String) + base.add_value('screenshot_bytes', screenshot_bytes) # this is an OPTIONAL field that will be CONSUMED within + # the thumbnail pipeline to create a small/large thumbnail of the website itself lom = LomBaseItemloader() # TODO: afterwards fill up the LomBaseItem with @@ -108,7 +121,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # or replaced with: # technical.replace_value('key', 'value') technical.add_value('format', 'text/html') # e.g. if the learning object is a web-page - technical.add_value('location', response.url) # if the the learning object has a unique URL that's being + technical.add_value('location', response.url) # if the the learning object has a unique URL that's being # navigated by the crawler lom.add_value('technical', technical.load_item()) @@ -122,7 +135,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - organization optional # - email optional # - uuid optional - lifecycle.add_value('role', 'author') # supported roles: "author" / "editor" / "publisher" + lifecycle.add_value('role', 'author') # supported roles: "author" / "editor" / "publisher" # for available roles mapping, please take a look at converter/es_connector.py lom.add_value('lifecycle', lifecycle.load_item()) From c83c0dc4ba94f5f3a43b15876edf742d5888e0ee Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 29 Jul 2022 16:16:54 +0200 Subject: [PATCH 121/590] serlo_spider v0.2.2 - fix: text, html_body, screenshot -- since Splash can't render Serlo properly, the html body and fulltext were showing ApiExceptions instead of actually useful information -- we now use Playwright for all three metadata fields instead --- converter/spiders/serlo_spider.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 5d99fda8..c55383f9 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -7,7 +7,7 @@ from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader from converter.spiders.base_classes import LomBase -from converter.web_tools import WebEngine +from converter.web_tools import WebEngine, WebTools class SerloSpider(scrapy.Spider, LomBase): @@ -16,7 +16,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.1" # last update: 2022-07-29 + version = "0.2.2" # last update: 2022-07-29 WEB_TOOLS = WebEngine.Playwright graphql_items = list() @@ -112,11 +112,16 @@ def parse(self, response, **kwargs): json_ld = response.xpath('//*[@type="application/ld+json"]/text()').get() json_ld = json.loads(json_ld) + playwright_dict = WebTools.getUrlData(response.url, WebEngine.Playwright) + html_body = playwright_dict.get("html") + screenshot_bytes = playwright_dict.get("screenshot_bytes") + html_text = playwright_dict.get("text") + base = BaseItemLoader() # # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py # # TODO: fill "base"-keys with values for # # - thumbnail recommended - + base.add_value('screenshot_bytes', screenshot_bytes) # The actual URL of a learning material is dynamic and can change at any given time # (e.g. when the title gets changed by a serlo editor), therefore we use the "id"-field # or the identifier number as a stable ID @@ -325,6 +330,8 @@ def parse(self, response, **kwargs): base.add_value('permissions', permissions.load_item()) response_loader = super().mapResponse(response) + response_loader.replace_value('html', html_body) + response_loader.replace_value('text', html_text) base.add_value('response', response_loader.load_item()) yield base.load_item() From 830cccb283985466ec208c605ff126ab0dc07a8b Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 1 Aug 2022 11:38:31 +0200 Subject: [PATCH 122/590] fix:wrap configs as custom_settings, always allow to provide screenshot as bytes regardless of web tool --- converter/pipelines.py | 66 ++++++++++++++-------- converter/spiders/base_classes/lom_base.py | 8 +++ converter/spiders/serlo_spider.py | 6 +- 3 files changed, 55 insertions(+), 25 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 17ec58ae..e0a9e8fb 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -326,24 +326,35 @@ def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) response = None url = None - settings = get_project_settings() - # checking if the (optional) attribute WEB_TOOLS exists within the specific spider class: - web_tools_spider_attribute = getattr(spider, "WEB_TOOLS", WebEngine.Splash) - # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, - # it will default back to "splash" - if "thumbnail" in item: + settings = self.get_settings_for_crawler(spider) + # checking if the (optional) attribute WEB_TOOLS exists: + web_tools = settings.get("WEB_TOOLS", WebEngine.Splash) + # if screenshot_bytes is provided (the crawler has already a binary representation of the image + # the pipeline will convert/scale the given image + if "screenshot_bytes" in item: + # in case we are already using playwright in a spider, we can skip one additional HTTP Request by + # accessing the (temporary available) "screenshot_bytes"-field + img = Image.open(BytesIO(item["screenshot_bytes"])) + self.create_thumbnails_from_image_bytes(img, item, settings) + # the final BaseItem data model doesn't use screenshot_bytes, + # therefore we delete it after we're done processing it + del item["screenshot_bytes"] + + # a thumbnail (url) is given - we will try to fetch it from the url + elif "thumbnail" in item: url = item["thumbnail"] response = requests.get(url) log.debug( "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" ) + # nothing was given, we try to screenshot the page either via Splash or Playwright elif ( "location" in item["lom"]["technical"] and len(item["lom"]["technical"]["location"]) > 0 and "format" in item["lom"]["technical"] and item["lom"]["technical"]["format"] == "text/html" ): - if settings.get("SPLASH_URL") and web_tools_spider_attribute == WebEngine.Splash: + if settings.get("SPLASH_URL") and web_tools == WebEngine.Splash: response = requests.post( settings.get("SPLASH_URL") + "/render.png", json={ @@ -354,23 +365,17 @@ def process_item(self, raw_item, spider): "headers": settings.get("SPLASH_HEADERS"), }, ) - if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools_spider_attribute == WebEngine.Playwright: - if "screenshot_bytes" in item: - # in case we are already using playwright in a spider, we can skip one additional HTTP Request by - # accessing the (temporary available) "screenshot_bytes"-field - img = Image.open(BytesIO(item["screenshot_bytes"])) - self.create_thumbnails_from_image_bytes(img, item, settings) - del item["screenshot_bytes"] - # the final BaseItem data model doesn't use screenshot_bytes, - # therefore we delete it after we're done processing it - else: - # this edge-case is necessary for spiders that only need playwright to gather a screenshot, - # but don't use playwright within the spider itself (e.g. serlo_spider) - playwright_dict = WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], - engine=WebEngine.Playwright) - screenshot_bytes = playwright_dict.get("screenshot_bytes") - img = Image.open(BytesIO(screenshot_bytes)) - self.create_thumbnails_from_image_bytes(img, item, settings) + if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright: + # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, + # it will default back to "splash" + + # this edge-case is necessary for spiders that only need playwright to gather a screenshot, + # but don't use playwright within the spider itself (e.g. serlo_spider) + playwright_dict = WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], + engine=WebEngine.Playwright) + screenshot_bytes = playwright_dict.get("screenshot_bytes") + img = Image.open(BytesIO(screenshot_bytes)) + self.create_thumbnails_from_image_bytes(img, item, settings) else: if settings.get("DISABLE_SPLASH") is False: log.warning( @@ -419,6 +424,19 @@ def process_item(self, raw_item, spider): ) return raw_item + # override the project settings with the given ones from the current spider + # see PR 56 for details + def get_settings_for_crawler(self, spider): + all_settings = get_project_settings() + crawler_settings = getattr(spider, "custom_settings", {}) + for key in crawler_settings.keys(): + if ( + all_settings.get(key) and crawler_settings.get(key).priority > all_settings.get(key).priority + or not all_settings.get(key) + ): + all_settings.set(key, crawler_settings.get(key)) + return all_settings + def create_thumbnails_from_image_bytes(self, image, item, settings): small = BytesIO() self.scale_image(image, settings.get("THUMBNAIL_SMALL_SIZE")).save( diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 7625f3c0..afe07f2e 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -1,5 +1,7 @@ import html2text import logging + +import scrapy.settings from scrapy.utils.project import get_project_settings from converter.constants import Constants @@ -20,6 +22,12 @@ class LomBase: remoteId = None forceUpdate = False + # you can specify custom settings which will later influence the behaviour of the pipelines for your crawler + custom_settings: scrapy.settings.Settings = { + # web tools to use, relevant for screenshots/thumbnails + "WEB_TOOLS": scrapy.settings.SettingsAttribute(WebEngine.Splash, scrapy.settings.SETTINGS_PRIORITIES["spider"]) + } + def __init__(self, **kwargs): if self.name is None: raise NotImplementedError(f'{self.__class__.__name__}.name is not defined on crawler') diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index c55383f9..1817bd7b 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -2,6 +2,7 @@ import requests import scrapy +import scrapy.settings from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ @@ -17,7 +18,10 @@ class SerloSpider(scrapy.Spider, LomBase): API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api version = "0.2.2" # last update: 2022-07-29 - WEB_TOOLS = WebEngine.Playwright + custom_settings: scrapy.settings.Settings = { + # playwright cause of issues with thumbnails+text for serlo + "WEB_TOOLS": scrapy.settings.SettingsAttribute(WebEngine.Playwright, scrapy.settings.SETTINGS_PRIORITIES["spider"]) + } graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: From 4d8c1e8b6f79240398f42d40254dd35c35c189da Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 1 Aug 2022 11:55:35 +0200 Subject: [PATCH 123/590] fix: init settings via scrapy method + fix override issues --- converter/pipelines.py | 24 ++++++++++++---------- converter/spiders/base_classes/lom_base.py | 8 ++++---- converter/spiders/serlo_spider.py | 10 ++++----- 3 files changed, 22 insertions(+), 20 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index e0a9e8fb..74014fa2 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -326,7 +326,7 @@ def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) response = None url = None - settings = self.get_settings_for_crawler(spider) + settings = get_settings_for_crawler(spider) # checking if the (optional) attribute WEB_TOOLS exists: web_tools = settings.get("WEB_TOOLS", WebEngine.Splash) # if screenshot_bytes is provided (the crawler has already a binary representation of the image @@ -426,16 +426,6 @@ def process_item(self, raw_item, spider): # override the project settings with the given ones from the current spider # see PR 56 for details - def get_settings_for_crawler(self, spider): - all_settings = get_project_settings() - crawler_settings = getattr(spider, "custom_settings", {}) - for key in crawler_settings.keys(): - if ( - all_settings.get(key) and crawler_settings.get(key).priority > all_settings.get(key).priority - or not all_settings.get(key) - ): - all_settings.set(key, crawler_settings.get(key)) - return all_settings def create_thumbnails_from_image_bytes(self, image, item, settings): small = BytesIO() @@ -462,6 +452,18 @@ def create_thumbnails_from_image_bytes(self, image, item, settings): ).decode() +def get_settings_for_crawler(spider): + all_settings = get_project_settings() + crawler_settings = getattr(spider, "custom_settings", {}) + for key in crawler_settings.keys(): + if ( + all_settings.get(key) and crawler_settings.getpriority(key) > all_settings.getpriority(key) + or not all_settings.get(key) + ): + all_settings.set(key, crawler_settings.get(key), crawler_settings.getpriority(key)) + return all_settings + + class EduSharingCheckPipeline(EduSharing, BasicPipeline): def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index afe07f2e..4e0a5179 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -1,7 +1,7 @@ import html2text import logging -import scrapy.settings +from scrapy import settings from scrapy.utils.project import get_project_settings from converter.constants import Constants @@ -23,10 +23,10 @@ class LomBase: forceUpdate = False # you can specify custom settings which will later influence the behaviour of the pipelines for your crawler - custom_settings: scrapy.settings.Settings = { + custom_settings = settings.BaseSettings({ # web tools to use, relevant for screenshots/thumbnails - "WEB_TOOLS": scrapy.settings.SettingsAttribute(WebEngine.Splash, scrapy.settings.SETTINGS_PRIORITIES["spider"]) - } + "WEB_TOOLS": WebEngine.Splash, + }, 'spider') def __init__(self, **kwargs): if self.name is None: diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 1817bd7b..e365a5b9 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -2,7 +2,7 @@ import requests import scrapy -import scrapy.settings +from scrapy import settings from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ @@ -18,10 +18,10 @@ class SerloSpider(scrapy.Spider, LomBase): API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api version = "0.2.2" # last update: 2022-07-29 - custom_settings: scrapy.settings.Settings = { - # playwright cause of issues with thumbnails+text for serlo - "WEB_TOOLS": scrapy.settings.SettingsAttribute(WebEngine.Playwright, scrapy.settings.SETTINGS_PRIORITIES["spider"]) - } + custom_settings = settings.BaseSettings({ + # playwright cause of issues with thumbnails+text for serlo + "WEB_TOOLS": WebEngine.Playwright + }, 'spider') graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: From 18539e04e9d85a7aa59e3254d0ccf74d9ca6cea2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 11 Jul 2022 14:46:40 +0200 Subject: [PATCH 124/590] science_in_school_spider v0.0.3 - fix: "license.url"-mapping -- the previous mapping was missing a trailing "/", which while still being a valid URL, didn't resolve properly on the edu-sharing side of things (instead of CC-BY-NC-ND the license was recognized as "None") --- converter/spiders/science_in_school_spider.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index 759ebd43..6ead6ec2 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -17,7 +17,7 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): start_urls = [ "https://www.scienceinschool.org/issue/" ] - version = "0.0.2" # last update: 2022-07-01 + version = "0.0.3" # last update: 2022-07-11 custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True @@ -37,9 +37,9 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): "Sustainability": "Sustainability" } LICENSE_MAPPING = { - "CC-BY": "https://creativecommons.org/licenses/by-sa/4.0", - "CC-BY-NC-SA": "https://creativecommons.org/licenses/by-nc-sa/4.0", - "CC-BY-NC-ND": "https://creativecommons.org/licenses/by-nc-nd/4.0" + "CC-BY": Constants.LICENSE_CC_BY_40, + "CC-BY-NC-SA": Constants.LICENSE_CC_BY_NC_SA_40, + "CC-BY-NC-ND": Constants.LICENSE_CC_BY_NC_ND_40 } KEYWORD_EXCLUSION_LIST = [ "Not applicable", "not applicable" @@ -230,10 +230,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: pass base = BaseItemLoader() - - # TODO: fill "base"-keys with values for - # - binary optional (only needed if you're working with binary files (e.g. .pdf-files), - # if you want to see an example, check out "niedersachsen_abi_spider.py") base.add_value('sourceId', response.url) hash_temp: str = f"{date_published}v{self.version}" base.add_value('hash', hash_temp) From 48e5223b156972c97a4bc125bf6ecc4ca8185f2c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 24 May 2022 18:55:29 +0200 Subject: [PATCH 125/590] lehreronline_spider v0.0.1 (squashed) - this crawler uses XMLFeedSpider to parse through an "Elixier system XML" - add: distinction between content types (free/paid) -- by using the base.origin field to create different sub-folders for "frei_und_kostenlos", "free_account_required" and "premium_only" learning objects - add: documentation / DocStrings / typehints - add: detection for -elements with empty -tags -- these would previously cause parse_node() to fail and skip the (thousands) of URLs after the mis-behaving entry - harden metadata detection for individual fields that are missing in the Lehrer-Online API (even though some fields were expected to be always available, they actually were not) - add: distinction between actual disciplines and additional_keywords - add: Mapping for material_type to new_lrt - add: Mapping for educationalContext - add: complete dataset API (enable this optionally, only if you want to crawl everything at once) - successful crawl (periodic): 275 items (if using the "?type=3030" start_url) -- currently holds 5685 unique URLs to crawl --- converter/spiders/lehreronline_spider.py | 535 +++++++++++++++++++++++ 1 file changed, 535 insertions(+) create mode 100644 converter/spiders/lehreronline_spider.py diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py new file mode 100644 index 00000000..5eba35cf --- /dev/null +++ b/converter/spiders/lehreronline_spider.py @@ -0,0 +1,535 @@ +from datetime import datetime + +import scrapy.selector.unified +import w3lib.html +from scrapy.spiders import XMLFeedSpider + +from converter.constants import Constants +from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, \ + LicenseItemLoader +from converter.spiders.base_classes import LomBase + + +class LehrerOnlineSpider(XMLFeedSpider, LomBase): + name = "lehreronline_spider" + friendlyName = "Lehrer-Online" + start_urls = [ + "https://www.lehrer-online.de/?type=3030", # only the 25 newest items per category (275 in total) + # "https://www.lehrer-online.de/?type=4040&limit=10000" # complete data-set (~5688 URLs in total, the initial + # API response is currently ~18MB. The initial loading of the API response takes about ~56s to complete) + ] + version = "0.0.1" # last update: 2022-05-31 + custom_settings = { + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True, + # "DUPEFILTER_DEBUG": True + } + iterator = 'iternodes' + itertag = 'datensatz' + + MAPPING_EDU_CONTEXT = { + 'Elementarbildung': 'Elementarbereich', + 'Fort- und Weiterbildung': 'Fortbildung', + 'Spezieller Förderbedarf': 'Förderschule' + } + + MAPPING_MATERIAL_TYPE_TO_NEW_LRT = { + 'Blog': '5204fc81-5dac-4cc4-a28b-aad5c241fa19', # "Webblog (dynamisch)" + 'Cartoon': '667f5063-70b9-400c-b1f7-7702ec9487f1', # "Cartoon, Comic" + 'Dossier': '7381f17f-50a6-4ce1-b3a0-9d85a482eec0', # "Unterrichtsplanung" + # Dossiers are hard to categorize, they typically consist of several types (news, "Unterrichtseinheit" etc.) + # that are put together as a "Fokusthema", similar to how Umwelt-im-Unterricht.de groups together several + # articles into a "Thema der Woche" + 'Fachartikel': 'b98c0c8c-5696-4537-82fa-dded7236081e', # "Artikel und Einzelpublikation" + 'Fundstueck': 'dc5763ab-6f47-4aa3-9ff3-1303efbeef6e', # "Nachrichten und Neuigkeiten + 'Interaktives': '4665caac-99d7-4da3-b9fb-498d8ece034f', # "Interaktives Medium" + 'Kopiervorlage': '6a15628c-0e59-43e3-9fc5-9a7f7fa261c4', # "Skript, Handout und Handreichung" + 'News': 'dc5763ab-6f47-4aa3-9ff3-1303efbeef6e', # "Nachrichten und Neuigkeiten" + 'Rechtsfall': 'dc5763ab-6f47-4aa3-9ff3-1303efbeef6e', # "Nachrichten und Neuigkeiten" + # ToDo: could this be mapped to either "Fachliche News", "Alltags News" or "Pädagogische News"? + 'Unterrichtseinheit': 'ef58097d-c1de-4e6a-b4da-6f10e3716d3d', # "Unterrichtseinheit" + 'Videos': '7a6e9608-2554-4981-95dc-47ab9ba924de' # "Video (Material)" + } + + MAPPING_RIGHTS_TO_URLS = { + 'CC-by': 'https://creativecommons.org/licenses/by/3.0', + 'CC-by-nc': 'https://creativecommons.org/licenses/by-nc/3.0', + 'CC-by-nc-nd': 'https://creativecommons.org/licenses/by-nc-nd/3.0', + 'CC-by-nc-nd 4.0': 'https://creativecommons.org/licenses/by-nc-nd/4.0', + 'CC-by-nc-sa': 'https://creativecommons.org/licenses/by-nc-sa/3.0/', + 'CC-by-nc-sa 4.0': 'https://creativecommons.org/licenses/by-nc-sa/4.0', + 'CC-by-nd': 'https://creativecommons.org/licenses/by-nd/3.0', + 'CC-by-sa': 'https://creativecommons.org/licenses/by-sa/3.0', + 'CC-by-sa 4.0': 'https://creativecommons.org/licenses/by-sa/4.0/', + } + + FACH_IS_ACTUALLY_A_KEYWORD = [ + 'Besondere Förderung', + 'Computer, Internet & Co.', + 'Deutsch / Kommunikation', # "Deutsch / Kommunikation" is part of "Berufsbildung", not "Deutschunterricht" + # therefore we need to treat it as a keyword + 'Fachcurricula', + 'Feste und Feiertage', + 'Früher und Heute', + 'Ich und meine Welt', + 'Kulturelle Bildung', + 'Jahreszeiten', + 'Lehrerbildung und Schulentwicklung', + 'Lesen und Schreiben', + 'Mediennutzung und Medienkompetenz: Analysieren und Reflektieren', + 'Mediennutzung und Medienkompetenz: Kommunizieren und Kooperieren', + 'Mediennutzung und Medienkompetenz: Problemlösen und Handeln', + 'Mediennutzung und Medienkompetenz: Produzieren und Präsentieren', + 'Mediennutzung und Medienkompetenz: Schützen und sicher agieren', + 'Mediennutzung und Medienkompetenz: Suchen, Verarbeiten und Aufbewahren', + 'Orga und Bürowirtschaft', + 'Pflege, Therapie, Pharmazie', + 'Rechnen und Logik', + 'Rechnungswesen', + 'Sache und Technik', + 'Schuleingangsphase', + 'Schulrecht, Schulorganisation, Schulentwicklung', + 'Sprache und Literatur', + 'Technik', + 'Wirtschaftsinformatik', + 'Kunst, Musik und Kultur', + ] + + MAPPING_FACH_TO_DISCIPLINES = { + 'Arbeitsschutz und Arbeitssicherheit': 'Arbeitssicherheit', + 'Astronomie': 'Astronomie', + 'Berufs- und Arbeitswelt': 'Arbeitslehre', + 'Berufsvorbereitung, Berufsalltag, Arbeitsrecht': 'Arbeitslehre', + 'Biologie': 'Biologie', + 'Chemie': 'Chemie', + 'DaF / DaZ': 'Deutsch als Zweitsprache', + 'Deutsch': 'Deutsch', + 'Elektrotechnik': 'Elektrotechnik', + 'Englisch': 'Englisch', + 'Ernährung und Gesundheit': ['Ernährung und Hauswirtschaft', 'Gesundheit'], + 'Französisch': 'Französisch', + 'Fächerübergreifender Unterricht': 'Allgemein', + 'Geographie': 'Geographie', + 'Geschichte': 'Geschichte', + 'Geschichte, Politik und Gesellschaftswissenschaften': ['Geschichte', 'Politik', 'Gesellschaftskunde'], + 'Gesundheit und Gesundheitsschutz': 'Gesundheit', + 'Informatik': 'Informatik', + 'Informationstechnik': 'Informatik', + 'Klima, Umwelt, Nachhaltigkeit': 'Nachhaltigkeit', + 'Kunst': 'Kunst', + 'Latein': 'Latein', + 'MINT: Mathematik, Informatik, Naturwissenschaften und Technik': 'MINT', + 'Mathematik': 'Mathematik', + 'Metalltechnik': 'Metalltechnik', + 'Musik': 'Musik', + 'Natur und Umwelt': 'Environmental education', + 'Physik': 'Physik', + 'Politik / SoWi': ['Politik', 'Social education'], + 'Pädagogik': 'Pädagogik', + 'Religion / Ethik': ['Religion', 'Ethik'], + 'Religion und Ethik': ['Religion', 'Ethik'], + 'Spanisch': 'Spanisch', + 'Sport': 'Sport', + 'Sport und Bewegung': 'Sport', + 'WiSo / Politik': ['Economics', 'Social education', 'Politik'], + 'Wirtschaftslehre': 'Economics' + } + + def getId(self, response=None) -> str: + pass + + def getHash(self, response=None) -> str: + pass + + def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> scrapy.Request: + """ + Parses the Lehrer-Online API for individual -nodes and yields URLs found within -tags + to the parse()-method. Additionally this method builds a "cleaned up" metadata_dict that gets handed over within + cb_kwargs. + :param response: + :param selector: scrapy.selector.unified.Selector + :return: scrapy.Request + + Scrapy Contracts: + @url https://www.lehrer-online.de/?type=3030 + @returns item 250 + """ + # an individual can hold the following elements: + # availability + # - titel always + # - sprache always (currently: 100% "Deutsch") + # - beschreibung always + # - beschreibung_lang sometimes (>50%) + # - schlagwort sometimes (unpredictable) + # - kostenpflichtig always + # - autor sometimes + # - autor_email always ("redaktion@lehrer-online.de") + # - anbieter_herkunft always (Impressum) + # - einsteller always ("Redaktion Lehrer-Online") + # - einsteller_email always ("redaktion@lehrer-online.de") + # - letzte_aenderung sometimes ("2022-02-18") + # - publikationsdatum always ("2022-02-18") + # - verfallsdatum never + # - fach sometimes (often: multiple -elements) + # - bildungsebene sometimes (>50%, sometimes completely empty) + # - material_type always + # - material_id_location always + # - url_ressource always + # - lernressourcentyp never + # - zielgruppe always + # - rechte sometimes + # - frei_zugaenglich always + # - quelle_id always (currently holds "LO" 100% of the time) + # - quelle_logo_url always + # - quelle_homepage_url always + # - quelle_pfad always + + # self.logger.info(f"Currently crawling {self.itertag.join(selector.getall())}") + metadata_dict = dict() + + title_raw: str = selector.xpath('titel/text()').get() + # self.logger.info(f"the title is: {title_raw}") + if title_raw: + metadata_dict.update({'title': title_raw}) + + in_language: str = selector.xpath('sprache/text()').get() + if in_language: + if in_language == "Deutsch": + metadata_dict.update({'language': 'de'}) + + description_short: str = selector.xpath('beschreibung/text()').get() + if description_short: + metadata_dict.update({'description_short': description_short}) + + description_long: str = selector.xpath('beschreibung_lang/text()').get() + if description_long: + metadata_dict.update({'description_long': description_long}) + + thumbnail_url: str = selector.xpath('bild_url/text()').get() + if thumbnail_url: + metadata_dict.update({'thumbnail_url': thumbnail_url}) + + keyword_list: list = selector.xpath('schlagwort/text()').getall() + if keyword_list: + metadata_dict.update({'keywords': keyword_list}) + # self.logger.info(f"the keywords are: {keyword_list}") + + with_costs_string: str = selector.xpath('kostenpflichtig/text()').get() + # with_costs_string can be either "ja" or "nein" + if with_costs_string == "ja": + metadata_dict.update({'price': 'yes'}) + elif with_costs_string == "nein": + metadata_dict.update({'price': 'no'}) + + author_raw: str = selector.xpath('autor/text()').get() + if author_raw: + metadata_dict.update({'author': author_raw}) + + author_email: str = selector.xpath('autor_email/text()').get() + if author_email: + metadata_dict.update({'author_email': author_email}) + + provider_address: str = selector.xpath('anbieter_herkunft/text()').get() + # provider_address is (currently?) always the address found in the Impressum + if provider_address: + metadata_dict.update({'provider_address': provider_address}) + provider_name: str = selector.xpath('einsteller/text()').get() + # the value for "einsteller" is currently "Redaktion Lehrer-Online" in 100% of cases + if provider_name: + metadata_dict.update({'provider_name': provider_name}) + provider_email: str = selector.xpath('einsteller_email/text()').get() + # the value for "einsteller_email" is currently "redaktion@lehrer-online.de" in 100% of cases + if provider_email: + metadata_dict.update({'provider_email': provider_email}) + + # both last_modified and date_published will be surrounded by lots of whitespace, tabs and newlines + # therefore we need to clean up the string before saving it into our dictionary + last_modified: str = selector.xpath('letzte_aenderung/text()').get() + if last_modified is not None: + last_modified = w3lib.html.strip_html5_whitespace(last_modified) + if last_modified: + # last_modified is not always available, sometimes it's an empty string + last_modified_datetime: datetime = datetime.strptime(last_modified, '%Y-%m-%d') + last_modified = last_modified_datetime.isoformat() + metadata_dict.update({'last_modified': last_modified}) + + date_published: str = selector.xpath('publikationsdatum/text()').get() + if date_published is not None: + date_published = w3lib.html.strip_html5_whitespace(date_published) + if date_published: + # date_published is not always available in the API, but when it is, it follows a strict syntax + date_published: str = w3lib.html.strip_html5_whitespace(date_published) + date_published_datetime: datetime = datetime.strptime(date_published, '%Y-%m-%d') + date_published = date_published_datetime.isoformat() + metadata_dict.update({'date_published': date_published}) + else: + # since date_published is used for our hash, we need this fallback in case it isn't available in the API + metadata_dict.update({'date_published': datetime.now().isoformat()}) + + # ToDo: there is a -Element, that is (in the API) currently empty 100% of the time, check again + # during the next crawler-update if this data is available in the API by then + # expiration_date = selector.xpath('verfallsdatum/text()').get() + # if expiration_date: + # metadata_dict.update({'expiration_date': expiration_date}) + + # can either be completely empty or there can be several -elements within a + disciplines_or_additional_keywords_raw: list = selector.xpath('fach/text()').getall() + actual_disciplines = list() + additional_keywords = list() + if disciplines_or_additional_keywords_raw: + for potential_discipline_item in disciplines_or_additional_keywords_raw: + if potential_discipline_item in self.MAPPING_FACH_TO_DISCIPLINES: + # since not every "fach"-value is the same as our discipline-vocabs, mapping is necessary + discipline = self.MAPPING_FACH_TO_DISCIPLINES.get(potential_discipline_item) + if type(discipline) is list: + actual_disciplines.extend(discipline) + else: + actual_disciplines.append(discipline) + continue + elif potential_discipline_item in self.FACH_IS_ACTUALLY_A_KEYWORD or potential_discipline_item: + # not all "fach"-values are valid disciplines, but they can be used as additional keywords + # basically: everything that's not a correct discipline is treated as an additional keyword + additional_keywords.append(potential_discipline_item) + continue + # once we iterated through all -elements, we can set/update the actual fields in metadata_dict + if actual_disciplines: + metadata_dict.update({'discipline': actual_disciplines}) + if additional_keywords: + keyword_list.extend(additional_keywords) + metadata_dict.update({'keywords': keyword_list}) + + educational_context_raw: str = selector.xpath('bildungsebene/text()').get() + educational_context_cleaned_up = set() + if educational_context_raw is not None: + # if this metadata-field is left empty by Lehrer-Online, it will hold a string full of whitespaces + # '\n\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t' gets filtered out here: + educational_context_raw: str = w3lib.html.strip_html5_whitespace(educational_context_raw) + if ";" in educational_context_raw: + # if there's multiple values, they are surrounded by whitespaces and separated by a semicolon + educational_level_list: list = educational_context_raw.split(sep=";") + for educational_level_item in educational_level_list: + edu_level_temp: str = w3lib.html.strip_html5_whitespace(educational_level_item) + educational_context_cleaned_up.add(edu_level_temp) + elif educational_context_raw: + # if there's only one entry it needs to be longer than an empty string + educational_context_raw: str = w3lib.html.strip_html5_whitespace(educational_context_raw) + educational_context_cleaned_up.add(educational_context_raw) + if educational_context_cleaned_up: + educational_context_cleaned_up = list(educational_context_cleaned_up) + educational_context = list() + # we need to map some values to our educatonalContext vocabulary + for edu_context_item in educational_context_cleaned_up: + if edu_context_item in self.MAPPING_EDU_CONTEXT.keys(): + edu_context_temp = self.MAPPING_EDU_CONTEXT.get(edu_context_item) + educational_context.append(edu_context_temp) + else: + educational_context.append(edu_context_item) + metadata_dict.update({'educational_context': educational_context}) + + material_type_raw: str = selector.xpath('material_type/text()').get() + if material_type_raw: + if material_type_raw in self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT.keys(): + new_lrt = self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT.get(material_type_raw) + metadata_dict.update({'new_lrt': new_lrt}) + metadata_dict.update({'material_type_raw': material_type_raw}) + + material_id_local: str = selector.xpath('material_id_local/text()').get() + if material_id_local: + # the material_id_local seems to be a stable string (including an uuid) that is suitable for our sourceId + metadata_dict.update({'source_id': material_id_local}) + + material_url: str = selector.xpath('url_ressource/text()').get() + if material_url is not None: + material_url = w3lib.html.strip_html5_whitespace(material_url) + if material_url: + # checking explicitly for an empty URL-string (2 out of 5688 -tags were empty) + # see: https://docs.python.org/3/library/stdtypes.html#truth-value-testing + metadata_dict.update({'url': material_url}) + + # ToDo: lernressourcentyp - currently: always (100%!) empty, needs to be implemented in a future version + # when the API actually has data for us. + # this might become useful for mapping additional values to new_lrt in the future + # lrt_raw = selector.xpath('lernressourcentyp/text()').get() + + intended_end_user_role: str = selector.xpath('zielgruppe/text()').get() + if intended_end_user_role: + metadata_dict.update({'intended_end_user': intended_end_user_role}) + + rights_raw: str = selector.xpath('rechte/text()').get() + if rights_raw: + rights_raw: str = w3lib.html.strip_html5_whitespace(rights_raw) + if rights_raw: + # after stripping the whitespace characters, we need to make sure that strings aren't empty + if rights_raw in self.MAPPING_RIGHTS_TO_URLS: + license_url = self.MAPPING_RIGHTS_TO_URLS.get(rights_raw) + if license_url: + metadata_dict.update({'license_url': license_url}) + else: + metadata_dict.update({'license_description': rights_raw}) + + free_to_access: str = selector.xpath('frei_zugaenglich/text()').get() + # ToDo: Confirm if behaviour is still correct after LO implemented filtering for free materials into the API + # can be either 'ja' or 'nein', but it has a different meaning when "kostenpflichtig"-element is set to "ja": + # frei_zugaenglich (ja) & kostenpflichtig (nein) = truly free to access, no log-in required + # frei_zugaenglich (ja) & kostenpflichtig (ja) = available for free, but log-in required (free) + # frei_zugaenglich (nein) & kostenpflichtig (ja) = login required, paywalled content + # frei_zugaenglich (nein) & kostenpflichtig (nein) = Premium-Account only, paywalled content (this might + # be an oversight in the API, could change in the future) + if free_to_access == "ja": + if metadata_dict.get("price") == "yes": + metadata_dict.update({'conditions_of_access': 'login_for_additional_features'}) + metadata_dict.update({'origin_folder_name': 'free_account_required'}) + elif metadata_dict.get("price") == "no": + metadata_dict.update({'conditions_of_access': 'no_login'}) + metadata_dict.update({'origin_folder_name': 'frei_und_kostenlos'}) + elif free_to_access == "nein": + metadata_dict.update({'conditions_of_access': 'login'}) + metadata_dict.update({'origin_folder_name': 'premium_only'}) + + # quelle_id currently holds just the abbreviation "LO" for all elements, check again later + # quelle_logo_url is different from bild_url, always holds (the same) URL to the Lehrer-Online logo + # quelle_homepage_url always holds a link to "https://www.lehrer-online.de" + + # self.logger.info(f"metadata_dict = {metadata_dict}") + if material_url: + # not every -element actually holds a valid URL to parse for us - we need to skip those empty + # strings otherwise the parse_node() method throws an error on that entry (and skips the rest) + yield scrapy.Request(url=material_url, callback=self.parse, cb_kwargs={'metadata_dict': metadata_dict}) + else: + pass + + def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + """ + Uses the metadata_dict that was built in parse_node() and extracts additional metadata from the DOM itself to + create and fill a BaseItem with the gathered metadata. + :param response: scrapy.http.Response + :param kwargs: a dictionary that always holds a "metadata_dict"-key (which itself holds a dictionary) + :return: BaseItemLoader + """ + metadata_dict: dict = kwargs.get("metadata_dict") + # self.logger.info(f"Metadata inside PARSE-METHOD for {response.url}: {metadata_dict.keys()}") + + base = BaseItemLoader() + + base.add_value('sourceId', metadata_dict.get("source_id")) + hash_temp: str = metadata_dict.get("date_published") + self.version + base.add_value('hash', hash_temp) + if "last_modified" in metadata_dict.keys(): + last_modified = metadata_dict.get("last_modified") + base.add_value('lastModified', last_modified) + else: + # if last_modified is not available in the API, we use the publication date instead as a workaround + base.add_value('lastModified', metadata_dict.get("date_published")) + if "provider_address" in metadata_dict.keys(): + base.add_value('publisher', metadata_dict.get("provider_address")) + base.add_value('type', Constants.TYPE_MATERIAL) + if "thumbnail_url" in metadata_dict.keys(): + thumbnail_url: str = metadata_dict.get("thumbnail_url") + if thumbnail_url: + base.add_value('thumbnail', thumbnail_url) + if "origin_folder_name" in metadata_dict.keys(): + base.add_value('origin', metadata_dict.get("origin_folder_name")) + + lom = LomBaseItemloader() + + general = LomGeneralItemloader() + general.add_value('identifier', response.url) + general.add_value('title', metadata_dict.get("title")) + if "keywords" in metadata_dict.keys(): + general.add_value('keyword', metadata_dict.get("keywords")) + if "description_long" in metadata_dict.keys(): + general.add_value('description', metadata_dict.get("description_long")) + elif "description_short" in metadata_dict.keys(): + general.add_value('description', metadata_dict.get("description_short")) + if "language" in metadata_dict.keys(): + general.add_value('language', metadata_dict.get("language")) + + # noinspection DuplicatedCode + lom.add_value('general', general.load_item()) + + technical = LomTechnicalItemLoader() + technical.add_value('format', 'text/html') + technical.add_value('location', response.url) + lom.add_value('technical', technical.load_item()) + + lifecycle = LomLifecycleItemloader() + lifecycle.add_value('role', 'publisher') # supported roles: "author" / "editor" / "publisher" + lifecycle.add_value('date', metadata_dict.get("date_published")) + if "provider_name" in metadata_dict.keys(): + lifecycle.add_value('organization', metadata_dict.get("provider_name")) + if "provider_email" in metadata_dict.keys(): + lifecycle.add_value('email', metadata_dict.get("provider_email")) + lom.add_value('lifecycle', lifecycle.load_item()) + + educational = LomEducationalItemLoader() + if "description_short" in metadata_dict.keys(): + educational.add_value('description', metadata_dict.get("description_short")) + # - typicalLearningTime optional + if "language" in metadata_dict.keys(): + educational.add_value('language', metadata_dict.get("language")) + # ToDo: RegEx-extract typicalLearningTime? (needs to be a duration; LO serves this metadata as a string) + # the time-format on the DOM is a wildly irregular String (from "3 Unterrichtsstunden" to "3x90 Minuten", + # "mindestens 12 Unterrichtsstunden plus Lektüre" etc.); maybe consider this for later crawler-versions + # learning_time_string = response.xpath('//li[@class="icon-count-hours"]/span/text()').get() + lom.add_value('educational', educational.load_item()) + + # classification = super().getLOMClassification() + # lom.add_value('classification', classification.load_item()) + + base.add_value('lom', lom.load_item()) + + vs = ValuespaceItemLoader() + vs.add_value('containsAdvertisement', 'yes') + # vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') + # see: https://www.eduversum.de/datenschutz/ - ToDo: can this be considered "Datensparsam" or not? + if "conditions_of_access" in metadata_dict.keys(): + vs.add_value('conditionsOfAccess', metadata_dict.get("conditions_of_access")) + if "discipline" in metadata_dict.keys(): + vs.add_value('discipline', metadata_dict.get("discipline")) + if "educational_context" in metadata_dict.keys(): + vs.add_value('educationalContext', metadata_dict.get("educational_context")) + if "intended_end_user" in metadata_dict.keys(): + vs.add_value('intendedEndUserRole', metadata_dict.get("intended_end_user")) + if "new_lrt" in metadata_dict.keys(): + vs.add_value('new_lrt', metadata_dict.get("new_lrt")) + if "price" in metadata_dict.keys(): + vs.add_value('price', metadata_dict.get("price")) + vs.add_value('sourceContentType', '004') # "Unterrichtsmaterial- und Aufgaben-Sammlung" + base.add_value('valuespaces', vs.load_item()) + + license_loader = LicenseItemLoader() + if "license_url" in metadata_dict.keys(): + license_url = metadata_dict.get("license_url") + license_loader.add_value('url', license_url) + elif "license_description" in metadata_dict.keys(): + license_description = metadata_dict.get("license_description") + if license_description == 'Frei nutzbares Material': + # just in case the license-description changes over time, we're gathering the description from the DOM + license_title: str = response.xpath('//div[@class="license-title"]/text()').get() + license_text: str = response.xpath('//div[@class="license-text"]/text()').get() + if license_text and license_title: + license_full_desc: str = license_text.join(license_title) + license_loader.add_value('description', license_full_desc) + else: + license_loader.add_value('description', license_description) + if not license_description or license_description == 'Keine Angabe': + license_loader.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) + # noinspection DuplicatedCode + if "author" in metadata_dict.keys(): + license_loader.add_value('author', metadata_dict.get("author")) + # if "expiration_date" in metadata_dict.keys(): + # # ToDo: activate gathering of expiration_date once the data is available in the API + # # - make sure that the dateparser correctly recognizes the date + # expiration_date = metadata_dict.get("expiration_date") + # license_loader.add_value('expirationDate', expiration_date) + base.add_value('license', license_loader.load_item()) + + permissions = super().getPermissions(response) + base.add_value('permissions', permissions.load_item()) + + response_loader = super().mapResponse(response) + base.add_value('response', response_loader.load_item()) + + yield base.load_item() From e87ce868f554381dbe7d306e44f57bbf14ed06fd Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 29 Jun 2022 17:46:12 +0200 Subject: [PATCH 126/590] lehreronline_spider v0.0.2 - replace: old base.type value -- if no mapping for new_lrt is possible, new_lrt defaults to "Material" - add: mapping for Lehrer-Online "lernressourcentyp" to new_lrt -- values that can't get mapped are put into the keyword-field, so we don't lose that information - fix: distinction between free/login-required/premium content, according to Lehrer-Online's specifications --- converter/spiders/lehreronline_spider.py | 109 +++++++++++++++++------ 1 file changed, 84 insertions(+), 25 deletions(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 5eba35cf..31421c40 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -15,11 +15,11 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): name = "lehreronline_spider" friendlyName = "Lehrer-Online" start_urls = [ - "https://www.lehrer-online.de/?type=3030", # only the 25 newest items per category (275 in total) - # "https://www.lehrer-online.de/?type=4040&limit=10000" # complete data-set (~5688 URLs in total, the initial - # API response is currently ~18MB. The initial loading of the API response takes about ~56s to complete) + "https://www.lehrer-online.de/?type=3030&limit=10000" + # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) + # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] - version = "0.0.1" # last update: 2022-05-31 + version = "0.0.2" # last update: 2022-06-29 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -35,6 +35,50 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): 'Spezieller Förderbedarf': 'Förderschule' } + MAPPING_LO_LRT_TO_NEW_LRT = { + # Lehrer-Online uses a different vocabulary for their "lernressourcentyp" + "Ablaufplan": "7381f17f-50a6-4ce1-b3a0-9d85a482eec0", # "Unterrichtsplanung" # ToDo: confirm this mapping + "Arbeitsblatt": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt + # "Arbeitsblatt interaktiv": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt # ToDo: missing vocab? + # "Arbeitsheft": "", + # "Außerschulischer Lernort": "", + # "Didaktik/Methodik": "", + "Diskussion": "61462395-8303-44bf-95a4-6a4297013283", + # "Argumentation, Plattformen für strukturierte Diskussion" # ToDo: this is a "Tool" + # "Einzelarbeit": "", + "Experiment": "4735c61a-429b-4909-9f3c-cbf975e2aa0e", # "Experiment" + # "Folien": "", # ToDo: missing vocab? slides? + # "Hausaufgabe": "", + # "Interaktives Quiz": "", # ToDo: missing vocab? + # "Internetressource": "", + "Kurs": "4e16015a-7862-49ed-9b5e-6c1c6e0ffcd1", # "Kurs" + # "Lehrer-Begleitheft": "", + "Lehrerhandreichung": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" + # "Lehrerheft": "", + # "Lernkontrolle": "", + "Lernspiel": "b0495f44-b05d-4bde-9dc5-34d7b5234d76", # "Lernspiel" + "Nachrichten": "dc5763ab-6f47-4aa3-9ff3-1303efbeef6e", # "Nachrichten und Neuigkeiten" + "Nachschlagewerk": "c022c920-c236-4234-bae1-e264a3e2bdf6", # "Nachschlagewerk und Glossar" + "Poster": "c382a478-74e0-42f1-96dd-fcfb5c27f746", # "Poster und Plakat" + "Primärmaterial": "ab5b99ea-551c-42f3-995b-e4b5f469ad7e", # "Primärmaterial und Quelle" + "Projekt": "22823ca9-7175-4b24-892e-19ebbf5fe0e7", # "Projekt (Lehr- und Lernmaterial)" + "Präsentation": "92c7a50c-6243-45d9-8b11-e79cbbda6305", # "Präsentation" + "Quiz": "7d591b84-9171-47cb-809a-74ef07f07261", # "Quiz" # ToDo: this is a "Tool", not a "Material" + "Recherche-Auftrag": "1cac68e6-dafe-4ce4-a52f-f33cde26da59", # "Recherche und Lernauftrag" + "Rollenspiel": "ac82dc13-3be1-464d-9cdc-88e608d99c39", # "Rollenspiel" + "Schaubild": "1dc4ed81-718c-4b76-86cb-947a86875973", # "Veranschaulichung, Schaubild und Tafelbild" + # "Schülerheft": "", + # "Schülermagazin": "", + # "Software": "", + "Stationenlernen": "ee738203-44af-4150-986f-ef01fb883f00", # "Stationenlernen" + "Tondokument": "ec2682af-08a9-4ab1-a324-9dca5151e99f", # "Audio" + "Video": "7a6e9608-2554-4981-95dc-47ab9ba924de", # Video + # "Webquest": "", + "entdeckendes Lernen": "9a86beb5-1a65-48ca-99c8-e8c789cfe2f8", # "Entdeckendes Lernen (Lehr- und Lernmaterial)" + # "kooperatives Lernen": "", + "Übung": "a33ef73d-9210-4305-97f9-7357bbf43486", # Übungsmaterial + } + MAPPING_MATERIAL_TYPE_TO_NEW_LRT = { 'Blog': '5204fc81-5dac-4cc4-a28b-aad5c241fa19', # "Webblog (dynamisch)" 'Cartoon': '667f5063-70b9-400c-b1f7-7702ec9487f1', # "Cartoon, Comic" @@ -66,6 +110,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): } FACH_IS_ACTUALLY_A_KEYWORD = [ + # ToDo: remove usage of this list after checking the Vocabs if any of these values could be used for altLabels 'Besondere Förderung', 'Computer, Internet & Co.', 'Deutsch / Kommunikation', # "Deutsch / Kommunikation" is part of "Berufsbildung", not "Deutschunterricht" @@ -188,6 +233,7 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # self.logger.info(f"Currently crawling {self.itertag.join(selector.getall())}") metadata_dict = dict() + new_lrts = set() title_raw: str = selector.xpath('titel/text()').get() # self.logger.info(f"the title is: {title_raw}") @@ -277,7 +323,7 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # can either be completely empty or there can be several -elements within a disciplines_or_additional_keywords_raw: list = selector.xpath('fach/text()').getall() actual_disciplines = list() - additional_keywords = list() + additional_keywords_from_disciplines = set() if disciplines_or_additional_keywords_raw: for potential_discipline_item in disciplines_or_additional_keywords_raw: if potential_discipline_item in self.MAPPING_FACH_TO_DISCIPLINES: @@ -291,13 +337,13 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc elif potential_discipline_item in self.FACH_IS_ACTUALLY_A_KEYWORD or potential_discipline_item: # not all "fach"-values are valid disciplines, but they can be used as additional keywords # basically: everything that's not a correct discipline is treated as an additional keyword - additional_keywords.append(potential_discipline_item) + additional_keywords_from_disciplines.add(potential_discipline_item) continue # once we iterated through all -elements, we can set/update the actual fields in metadata_dict if actual_disciplines: metadata_dict.update({'discipline': actual_disciplines}) - if additional_keywords: - keyword_list.extend(additional_keywords) + if additional_keywords_from_disciplines: + keyword_list.extend(additional_keywords_from_disciplines) metadata_dict.update({'keywords': keyword_list}) educational_context_raw: str = selector.xpath('bildungsebene/text()').get() @@ -330,8 +376,9 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc material_type_raw: str = selector.xpath('material_type/text()').get() if material_type_raw: - if material_type_raw in self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT.keys(): + if material_type_raw in self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT: new_lrt = self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT.get(material_type_raw) + new_lrts.add(new_lrt) metadata_dict.update({'new_lrt': new_lrt}) metadata_dict.update({'material_type_raw': material_type_raw}) @@ -348,10 +395,20 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # see: https://docs.python.org/3/library/stdtypes.html#truth-value-testing metadata_dict.update({'url': material_url}) - # ToDo: lernressourcentyp - currently: always (100%!) empty, needs to be implemented in a future version - # when the API actually has data for us. - # this might become useful for mapping additional values to new_lrt in the future - # lrt_raw = selector.xpath('lernressourcentyp/text()').get() + # ToDo: lernressourcentyp + lrt_raw = selector.xpath('lernressourcentyp/text()').getall() + # there can be SEVERAL "lernressourcentyp"-elements per item + if lrt_raw: + additional_keywords_from_lo_lrt = set() + for lrt_possible_value in lrt_raw: + if lrt_possible_value in self.MAPPING_LO_LRT_TO_NEW_LRT: + new_lrt = self.MAPPING_LO_LRT_TO_NEW_LRT.get(lrt_possible_value) + new_lrts.add(new_lrt) + else: + additional_keywords_from_lo_lrt.add(lrt_possible_value) + metadata_dict.update({'new_lrt': list(new_lrts)}) + keyword_list.extend(additional_keywords_from_lo_lrt) + metadata_dict.update({'keywords': keyword_list}) intended_end_user_role: str = selector.xpath('zielgruppe/text()').get() if intended_end_user_role: @@ -373,20 +430,21 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # ToDo: Confirm if behaviour is still correct after LO implemented filtering for free materials into the API # can be either 'ja' or 'nein', but it has a different meaning when "kostenpflichtig"-element is set to "ja": # frei_zugaenglich (ja) & kostenpflichtig (nein) = truly free to access, no log-in required + # frei_zugaenglich (nein) & kostenpflichtig (nein) = available for free, but log-in required (free) + # frei_zugaenglich (nein) & kostenpflichtig (ja) = login required, paywalled (premium) content + # ToDo: this is now obsolete # frei_zugaenglich (ja) & kostenpflichtig (ja) = available for free, but log-in required (free) - # frei_zugaenglich (nein) & kostenpflichtig (ja) = login required, paywalled content - # frei_zugaenglich (nein) & kostenpflichtig (nein) = Premium-Account only, paywalled content (this might - # be an oversight in the API, could change in the future) if free_to_access == "ja": + if metadata_dict.get("price") == "no": + metadata_dict.update({'conditions_of_access': 'no_login'}) + metadata_dict.update({'origin_folder_name': 'free'}) + elif free_to_access == "nein": if metadata_dict.get("price") == "yes": + metadata_dict.update({'conditions_of_access': 'login'}) + metadata_dict.update({'origin_folder_name': 'premium_only'}) + elif metadata_dict.get("price") == "no": metadata_dict.update({'conditions_of_access': 'login_for_additional_features'}) metadata_dict.update({'origin_folder_name': 'free_account_required'}) - elif metadata_dict.get("price") == "no": - metadata_dict.update({'conditions_of_access': 'no_login'}) - metadata_dict.update({'origin_folder_name': 'frei_und_kostenlos'}) - elif free_to_access == "nein": - metadata_dict.update({'conditions_of_access': 'login'}) - metadata_dict.update({'origin_folder_name': 'premium_only'}) # quelle_id currently holds just the abbreviation "LO" for all elements, check again later # quelle_logo_url is different from bild_url, always holds (the same) URL to the Lehrer-Online logo @@ -424,7 +482,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: base.add_value('lastModified', metadata_dict.get("date_published")) if "provider_address" in metadata_dict.keys(): base.add_value('publisher', metadata_dict.get("provider_address")) - base.add_value('type', Constants.TYPE_MATERIAL) if "thumbnail_url" in metadata_dict.keys(): thumbnail_url: str = metadata_dict.get("thumbnail_url") if thumbnail_url: @@ -482,8 +539,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: vs = ValuespaceItemLoader() vs.add_value('containsAdvertisement', 'yes') - # vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') - # see: https://www.eduversum.de/datenschutz/ - ToDo: can this be considered "Datensparsam" or not? + vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') + # see: https://www.eduversum.de/datenschutz/ if "conditions_of_access" in metadata_dict.keys(): vs.add_value('conditionsOfAccess', metadata_dict.get("conditions_of_access")) if "discipline" in metadata_dict.keys(): @@ -494,6 +551,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: vs.add_value('intendedEndUserRole', metadata_dict.get("intended_end_user")) if "new_lrt" in metadata_dict.keys(): vs.add_value('new_lrt', metadata_dict.get("new_lrt")) + else: + vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) if "price" in metadata_dict.keys(): vs.add_value('price', metadata_dict.get("price")) vs.add_value('sourceContentType', '004') # "Unterrichtsmaterial- und Aufgaben-Sammlung" From 8efc465f89d08d8de0ce6dbc59d3cb7dd66d5421 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Jul 2022 01:15:00 +0200 Subject: [PATCH 127/590] add: missing CC licenses to constants.py - add: CC_BY_NC_30 and CC_BY_ND_30 to constants.py - add: missing licenses to "VALID_LICENSE_URLS"-list --- converter/constants.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/converter/constants.py b/converter/constants.py index 54b8b1cc..cb10cbd9 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -4,7 +4,9 @@ class Constants: LICENSE_CC_BY_SA_40 = "https://creativecommons.org/licenses/by-sa/4.0/" LICENSE_CC_BY_30 = "https://creativecommons.org/licenses/by/3.0/" LICENSE_CC_BY_40 = "https://creativecommons.org/licenses/by/4.0/" + LICENSE_CC_BY_NC_30 = "https://creativecommons.org/licenses/by-nc/3.0/" LICENSE_CC_BY_NC_40 = "https://creativecommons.org/licenses/by-nc/4.0/" + LICENSE_CC_BY_ND_30 = "https://creativecommons.org/licenses/by-nd/3.0/" LICENSE_CC_BY_ND_40 = "https://creativecommons.org/licenses/by-nd/4.0/" LICENSE_CC_BY_NC_SA_30 = "https://creativecommons.org/licenses/by-nc-sa/3.0/" LICENSE_CC_BY_NC_SA_40 = "https://creativecommons.org/licenses/by-nc-sa/4.0/" @@ -18,6 +20,12 @@ class Constants: LICENSE_CC_BY_SA_40, LICENSE_CC_BY_30, LICENSE_CC_BY_40, + LICENSE_CC_BY_NC_30, + LICENSE_CC_BY_NC_40, + LICENSE_CC_BY_ND_30, + LICENSE_CC_BY_ND_40, + LICENSE_CC_BY_NC_SA_30, + LICENSE_CC_BY_NC_SA_40, LICENSE_CC_BY_NC_ND_30, LICENSE_CC_BY_NC_ND_40, LICENSE_PDM, From 244c4f978d96fcf186c6834a0fa18c122ae695d7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Jul 2022 12:47:23 +0200 Subject: [PATCH 128/590] lehreronline_spider v0.0.3 - update: mapping from Lehrer-Online LRT to "new_lrt"-values (according to feedback) - rework: license mapping -- while CC-BY-NC licenses are correctly recognized and mapped in the crawler, the repo doesn't render the license icons (yet) - rework: LO's "fach" to "discipline"-mapping -- remove: redundant (work-in-progress) entries that are no longer needed - fix: "general.description" string formatting --- converter/spiders/lehreronline_spider.py | 124 +++++++---------------- 1 file changed, 37 insertions(+), 87 deletions(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 31421c40..6d8f9f68 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -1,3 +1,4 @@ +import urllib.parse from datetime import datetime import scrapy.selector.unified @@ -19,7 +20,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] - version = "0.0.2" # last update: 2022-06-29 + version = "0.0.3" # last update: 2022-07-11 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -39,23 +40,23 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): # Lehrer-Online uses a different vocabulary for their "lernressourcentyp" "Ablaufplan": "7381f17f-50a6-4ce1-b3a0-9d85a482eec0", # "Unterrichtsplanung" # ToDo: confirm this mapping "Arbeitsblatt": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt - # "Arbeitsblatt interaktiv": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt # ToDo: missing vocab? + "Arbeitsblatt interaktiv": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt # "Arbeitsheft": "", - # "Außerschulischer Lernort": "", - # "Didaktik/Methodik": "", + "Außerschulischer Lernort": "92dcc3ec-fe94-451c-95ac-ea305e0e7597", # "außerschulisches Angebot" + "Didaktik/Methodik": "477115fd-5042-4174-ac39-7c05f8a24766", # "pädagogische Methode, Konzept" "Diskussion": "61462395-8303-44bf-95a4-6a4297013283", # "Argumentation, Plattformen für strukturierte Diskussion" # ToDo: this is a "Tool" # "Einzelarbeit": "", "Experiment": "4735c61a-429b-4909-9f3c-cbf975e2aa0e", # "Experiment" - # "Folien": "", # ToDo: missing vocab? slides? + "Folien": "92c7a50c-6243-45d9-8b11-e79cbbda6305", # "Präsentation" # "Hausaufgabe": "", - # "Interaktives Quiz": "", # ToDo: missing vocab? + "Interaktives Quiz": "a120ce77-59f5-4564-8d49-73f4a0de1594", "Lernen, Quiz und Spiel" # "Internetressource": "", "Kurs": "4e16015a-7862-49ed-9b5e-6c1c6e0ffcd1", # "Kurs" # "Lehrer-Begleitheft": "", "Lehrerhandreichung": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" # "Lehrerheft": "", - # "Lernkontrolle": "", + "Lernkontrolle": "9cf3c183-f37c-4b6b-8beb-65f530595dff", # "Klausur, Klassenarbeit und Test" "Lernspiel": "b0495f44-b05d-4bde-9dc5-34d7b5234d76", # "Lernspiel" "Nachrichten": "dc5763ab-6f47-4aa3-9ff3-1303efbeef6e", # "Nachrichten und Neuigkeiten" "Nachschlagewerk": "c022c920-c236-4234-bae1-e264a3e2bdf6", # "Nachschlagewerk und Glossar" @@ -98,92 +99,40 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): } MAPPING_RIGHTS_TO_URLS = { - 'CC-by': 'https://creativecommons.org/licenses/by/3.0', - 'CC-by-nc': 'https://creativecommons.org/licenses/by-nc/3.0', - 'CC-by-nc-nd': 'https://creativecommons.org/licenses/by-nc-nd/3.0', - 'CC-by-nc-nd 4.0': 'https://creativecommons.org/licenses/by-nc-nd/4.0', - 'CC-by-nc-sa': 'https://creativecommons.org/licenses/by-nc-sa/3.0/', - 'CC-by-nc-sa 4.0': 'https://creativecommons.org/licenses/by-nc-sa/4.0', - 'CC-by-nd': 'https://creativecommons.org/licenses/by-nd/3.0', - 'CC-by-sa': 'https://creativecommons.org/licenses/by-sa/3.0', - 'CC-by-sa 4.0': 'https://creativecommons.org/licenses/by-sa/4.0/', + 'CC-by': Constants.LICENSE_CC_BY_30, + 'CC-by-nc': Constants.LICENSE_CC_BY_NC_30, + 'CC-by-nc-nd': Constants.LICENSE_CC_BY_NC_ND_30, + 'CC-by-nc-nd 4.0': Constants.LICENSE_CC_BY_NC_ND_40, + 'CC-by-nc-sa': Constants.LICENSE_CC_BY_NC_SA_30, + 'CC-by-nc-sa 4.0': Constants.LICENSE_CC_BY_NC_SA_40, + 'CC-by-nd': Constants.LICENSE_CC_BY_ND_30, + 'CC-by-sa': Constants.LICENSE_CC_BY_SA_30, + 'CC-by-sa 4.0': Constants.LICENSE_CC_BY_SA_40, } - FACH_IS_ACTUALLY_A_KEYWORD = [ - # ToDo: remove usage of this list after checking the Vocabs if any of these values could be used for altLabels - 'Besondere Förderung', - 'Computer, Internet & Co.', - 'Deutsch / Kommunikation', # "Deutsch / Kommunikation" is part of "Berufsbildung", not "Deutschunterricht" - # therefore we need to treat it as a keyword - 'Fachcurricula', - 'Feste und Feiertage', - 'Früher und Heute', - 'Ich und meine Welt', - 'Kulturelle Bildung', - 'Jahreszeiten', - 'Lehrerbildung und Schulentwicklung', - 'Lesen und Schreiben', - 'Mediennutzung und Medienkompetenz: Analysieren und Reflektieren', - 'Mediennutzung und Medienkompetenz: Kommunizieren und Kooperieren', - 'Mediennutzung und Medienkompetenz: Problemlösen und Handeln', - 'Mediennutzung und Medienkompetenz: Produzieren und Präsentieren', - 'Mediennutzung und Medienkompetenz: Schützen und sicher agieren', - 'Mediennutzung und Medienkompetenz: Suchen, Verarbeiten und Aufbewahren', - 'Orga und Bürowirtschaft', - 'Pflege, Therapie, Pharmazie', - 'Rechnen und Logik', - 'Rechnungswesen', - 'Sache und Technik', - 'Schuleingangsphase', - 'Schulrecht, Schulorganisation, Schulentwicklung', - 'Sprache und Literatur', - 'Technik', - 'Wirtschaftsinformatik', - 'Kunst, Musik und Kultur', - ] - MAPPING_FACH_TO_DISCIPLINES = { 'Arbeitsschutz und Arbeitssicherheit': 'Arbeitssicherheit', - 'Astronomie': 'Astronomie', 'Berufs- und Arbeitswelt': 'Arbeitslehre', 'Berufsvorbereitung, Berufsalltag, Arbeitsrecht': 'Arbeitslehre', - 'Biologie': 'Biologie', - 'Chemie': 'Chemie', 'DaF / DaZ': 'Deutsch als Zweitsprache', - 'Deutsch': 'Deutsch', - 'Elektrotechnik': 'Elektrotechnik', - 'Englisch': 'Englisch', 'Ernährung und Gesundheit': ['Ernährung und Hauswirtschaft', 'Gesundheit'], - 'Französisch': 'Französisch', 'Fächerübergreifender Unterricht': 'Allgemein', - 'Geographie': 'Geographie', - 'Geschichte': 'Geschichte', 'Geschichte, Politik und Gesellschaftswissenschaften': ['Geschichte', 'Politik', 'Gesellschaftskunde'], 'Gesundheit und Gesundheitsschutz': 'Gesundheit', - 'Informatik': 'Informatik', 'Informationstechnik': 'Informatik', 'Klima, Umwelt, Nachhaltigkeit': 'Nachhaltigkeit', - 'Kunst': 'Kunst', - 'Latein': 'Latein', 'MINT: Mathematik, Informatik, Naturwissenschaften und Technik': 'MINT', - 'Mathematik': 'Mathematik', - 'Metalltechnik': 'Metalltechnik', - 'Musik': 'Musik', 'Natur und Umwelt': 'Environmental education', - 'Physik': 'Physik', 'Politik / SoWi': ['Politik', 'Social education'], - 'Pädagogik': 'Pädagogik', 'Religion / Ethik': ['Religion', 'Ethik'], 'Religion und Ethik': ['Religion', 'Ethik'], - 'Spanisch': 'Spanisch', - 'Sport': 'Sport', 'Sport und Bewegung': 'Sport', 'WiSo / Politik': ['Economics', 'Social education', 'Politik'], 'Wirtschaftslehre': 'Economics' } def getId(self, response=None) -> str: - pass + return response.url def getHash(self, response=None) -> str: pass @@ -206,7 +155,8 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # - titel always # - sprache always (currently: 100% "Deutsch") # - beschreibung always - # - beschreibung_lang sometimes (>50%) + # - beschreibung_lang sometimes (>50 %) + # - bild_url sometimes (<5 %) # - schlagwort sometimes (unpredictable) # - kostenpflichtig always # - autor sometimes @@ -218,11 +168,11 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # - publikationsdatum always ("2022-02-18") # - verfallsdatum never # - fach sometimes (often: multiple -elements) - # - bildungsebene sometimes (>50%, sometimes completely empty) + # - bildungsebene sometimes (>50 %, sometimes completely empty) # - material_type always # - material_id_location always # - url_ressource always - # - lernressourcentyp never + # - lernressourcentyp always? # - zielgruppe always # - rechte sometimes # - frei_zugaenglich always @@ -251,9 +201,12 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc description_long: str = selector.xpath('beschreibung_lang/text()').get() if description_long: + description_long = w3lib.html.replace_tags(description_long) + description_long = w3lib.html.replace_entities(description_long) metadata_dict.update({'description_long': description_long}) thumbnail_url: str = selector.xpath('bild_url/text()').get() + # ToDo: the "bild_url"-field is rarely useful and only appears in <5% of items, revisit this later if thumbnail_url: metadata_dict.update({'thumbnail_url': thumbnail_url}) @@ -322,7 +275,7 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # can either be completely empty or there can be several -elements within a disciplines_or_additional_keywords_raw: list = selector.xpath('fach/text()').getall() - actual_disciplines = list() + disciplines_mapped = set() additional_keywords_from_disciplines = set() if disciplines_or_additional_keywords_raw: for potential_discipline_item in disciplines_or_additional_keywords_raw: @@ -330,18 +283,18 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # since not every "fach"-value is the same as our discipline-vocabs, mapping is necessary discipline = self.MAPPING_FACH_TO_DISCIPLINES.get(potential_discipline_item) if type(discipline) is list: - actual_disciplines.extend(discipline) + disciplines_mapped.update(discipline) else: - actual_disciplines.append(discipline) - continue - elif potential_discipline_item in self.FACH_IS_ACTUALLY_A_KEYWORD or potential_discipline_item: + disciplines_mapped.add(discipline) + elif potential_discipline_item: + disciplines_mapped.add(potential_discipline_item) # not all "fach"-values are valid disciplines, but they can be used as additional keywords # basically: everything that's not a correct discipline is treated as an additional keyword additional_keywords_from_disciplines.add(potential_discipline_item) - continue + # values that don't need to be mapped (or can't be mapped) end up in the additional keywords list # once we iterated through all -elements, we can set/update the actual fields in metadata_dict - if actual_disciplines: - metadata_dict.update({'discipline': actual_disciplines}) + if disciplines_mapped: + metadata_dict.update({'discipline': list(disciplines_mapped)}) if additional_keywords_from_disciplines: keyword_list.extend(additional_keywords_from_disciplines) metadata_dict.update({'keywords': keyword_list}) @@ -349,7 +302,7 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc educational_context_raw: str = selector.xpath('bildungsebene/text()').get() educational_context_cleaned_up = set() if educational_context_raw is not None: - # if this metadata-field is left empty by Lehrer-Online, it will hold a string full of whitespaces + # if this metadata-field is left empty by Lehrer-Online, it will hold a string full of whitespaces, e.g. # '\n\t\t\t\t\n\t\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t' gets filtered out here: educational_context_raw: str = w3lib.html.strip_html5_whitespace(educational_context_raw) if ";" in educational_context_raw: @@ -395,7 +348,6 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # see: https://docs.python.org/3/library/stdtypes.html#truth-value-testing metadata_dict.update({'url': material_url}) - # ToDo: lernressourcentyp lrt_raw = selector.xpath('lernressourcentyp/text()').getall() # there can be SEVERAL "lernressourcentyp"-elements per item if lrt_raw: @@ -427,13 +379,10 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc metadata_dict.update({'license_description': rights_raw}) free_to_access: str = selector.xpath('frei_zugaenglich/text()').get() - # ToDo: Confirm if behaviour is still correct after LO implemented filtering for free materials into the API # can be either 'ja' or 'nein', but it has a different meaning when "kostenpflichtig"-element is set to "ja": # frei_zugaenglich (ja) & kostenpflichtig (nein) = truly free to access, no log-in required # frei_zugaenglich (nein) & kostenpflichtig (nein) = available for free, but log-in required (free) # frei_zugaenglich (nein) & kostenpflichtig (ja) = login required, paywalled (premium) content - # ToDo: this is now obsolete - # frei_zugaenglich (ja) & kostenpflichtig (ja) = available for free, but log-in required (free) if free_to_access == "ja": if metadata_dict.get("price") == "no": metadata_dict.update({'conditions_of_access': 'no_login'}) @@ -565,6 +514,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: elif "license_description" in metadata_dict.keys(): license_description = metadata_dict.get("license_description") if license_description == 'Frei nutzbares Material': + license_loader.add_value('internal', Constants.LICENSE_CUSTOM) # just in case the license-description changes over time, we're gathering the description from the DOM license_title: str = response.xpath('//div[@class="license-title"]/text()').get() license_text: str = response.xpath('//div[@class="license-text"]/text()').get() @@ -573,8 +523,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: license_loader.add_value('description', license_full_desc) else: license_loader.add_value('description', license_description) - if not license_description or license_description == 'Keine Angabe': - license_loader.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) + else: + license_loader.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # noinspection DuplicatedCode if "author" in metadata_dict.keys(): license_loader.add_value('author', metadata_dict.get("author")) From 990e9d13f91f02b5db9e90d2153824cfbf74f2a0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 5 Jul 2022 17:19:40 +0200 Subject: [PATCH 129/590] add: sodix_spider run/debug configuration --- .run/sodix_spider.run.xml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .run/sodix_spider.run.xml diff --git a/.run/sodix_spider.run.xml b/.run/sodix_spider.run.xml new file mode 100644 index 00000000..c169a780 --- /dev/null +++ b/.run/sodix_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file From 598746529237fab3e3303df63be68dd0b3868fcb Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 5 Jul 2022 19:01:30 +0200 Subject: [PATCH 130/590] sodix_spider v0.1.7 - fix: ResponseItemLoader.url was using an invalid field to fetch the URL - fix: technical.duration -- checking if the duration value is actually a valid string since, previously, "null" values would get mapped to 0 (instead of being left out) - fix: valuespaces.discipline error -- an additional if-check skips over empty subject lists - fix: valuespaces.intendedEndUserRole -- values were accidentally entered twice to the "valuespaces"-field after mapping them (once mapped, once raw) - add: custom_settings (ignore robots.txt) -- there's no robots.txt anyway, so we might as well skip this 401 error - add: general.keyword filter for empty strings -- the API returns some keyword lists with empty strings which need to be filtered out --- converter/spiders/sodix_spider.py | 71 ++++++++++++++++++++----------- 1 file changed, 46 insertions(+), 25 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 81edf654..fb24d77f 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -15,9 +15,12 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.6" + version = "0.1.7" apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 + custom_settings = { + "ROBOTSTXT_OBEY": False # returns an 401-error anyway, we might as well skip this scrapy.Request + } MAPPING_LRT = { "APP": "application", @@ -48,7 +51,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): "SIMULATION": "simulation", "SOFTWARE": "application", "SONSTIGES": "other", - # "TEST": "", + "TEST": "assessment", "TEXT": "text", "UBUNG": "drill and practice", "UNTERRICHTSBAUSTEIN": "teaching module", @@ -83,6 +86,8 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): 'keine Angaben (gesetzliche Regelung)': Constants.LICENSE_CUSTOM, } + # DEBUG_SUBJECTS = set() + def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -90,7 +95,7 @@ def mapResponse(self, response): r = LomBase.mapResponse(self, response, fetchData=False) r.replace_value("text", "") r.replace_value("html", "") - r.replace_value("url", response.meta["item"].get("link")) + r.replace_value("url", response.meta["item"].get("media").get("url")) return r def getId(self, response): @@ -210,17 +215,19 @@ def start_requests(self): def parse_request(self, response): results = json.loads(response.body) if results: - list = results['data']['findAllMetadata'] - if len(list) == 0: - return - for item in list: - copyResponse = response.copy() - copyResponse.meta["item"] = item - if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) - # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter - # specific media types / URLs - yield self.startRequest(response.meta["page"] + 1) + metadata_items: dict = results['data']['findAllMetadata'] + # if len(metadata_items) == 0: + # return + if metadata_items: + # lists and dictionaries only become True if they have >0 entries, empty lists are considered False + for item in metadata_items: + response_copy = response.copy() + response_copy.meta["item"] = item + if self.hasChanged(response_copy): + yield self.handleEntry(response_copy) + # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter + # specific media types / URLs + yield self.startRequest(response.meta["page"] + 1) def handleEntry(self, response): return LomBase.parse(self, response) @@ -248,10 +255,14 @@ def getLOMGeneral(self, response): "title", self.get("title", json=response.meta["item"]) ) - general.add_value( - "keyword", - self.get("keywords", json=response.meta["item"]) - ) + if "keywords" in response.meta["item"]: + keywords: list = self.get("keywords", json=response.meta["item"]) + if keywords: + # making sure that we're not receiving an empty list + for individual_keyword in keywords: + if individual_keyword.strip(): + # we're only adding valid keywords, none of the empty (whitespace) strings + general.add_value('keyword', individual_keyword) general.add_value( "description", self.get("description", json=response.meta["item"]) @@ -269,9 +280,10 @@ def getLOMTechnical(self, response): technical.add_value( "location", original ) - technical.add_value( - "duration", self.get("media.duration", json=response.meta["item"]) - ) + duration: str = self.get("media.duration", json=response.meta["item"]) + if duration and duration != 0: + # the API response contains "null"-values, we're making sure to only add valid duration values to our item + technical.add_value("duration", duration) technical.add_value( "size", self.get("media.size", json=response.meta["item"]) ) @@ -366,9 +378,19 @@ def getLOMEducational(self, response=None) -> LomEducationalItemLoader: def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) - subjects = self.get('subject', json=response.meta['item']) - for subject in subjects if subjects else []: - valuespaces.add_value("discipline", subject['name']) + if "subject" in response.meta['item'] is not None: + # the "subject"-field does not exist in every item returned by the sodix API + subjects = self.get('subject', json=response.meta['item']) + if subjects: + # the "subject"-key might exist in the API, but still be 'none' + for subject in subjects: + # ToDo: there are (currently) 837 unique subjects across all 50.697 Items + # - these values would be suitable as additional keywords + subject_name = subject['name'] + # self.DEBUG_SUBJECTS.add(subject_name) + # print(f"Amount of Subjects: {len(self.DEBUG_SUBJECTS)} // SUBJECT SET: \n {self.DEBUG_SUBJECTS}") + valuespaces.add_value('discipline', subject_name) + educational_context_list = self.get('educationalLevels', json=response.meta['item']) if educational_context_list: for potential_edu_context in educational_context_list: @@ -381,7 +403,6 @@ def getValuespaces(self, response): if target_audience_item in self.MAPPING_INTENDED_END_USER_ROLE: target_audience_item = self.MAPPING_INTENDED_END_USER_ROLE.get(target_audience_item) valuespaces.add_value('intendedEndUserRole', target_audience_item) - valuespaces.add_value("intendedEndUserRole", self.get('targetAudience', json=response.meta['item'])) if self.get('cost', json=response.meta['item']) == "FREE": valuespaces.add_value("price", "no") From 60efbd060443c74ccd7b26db05651e358f7b7c91 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 5 Jul 2022 19:05:29 +0200 Subject: [PATCH 131/590] fix: double logging output in LomBase - the parse()-method within lom_base called logging.debug on main.load_item() an additional time -- the Scrapy pipeline already prints out the assembled BaseItem once, we don't need it clogging up the .log file twice -- this should reduce the .log filesize for crawlers that are inheriting from LomBase --- converter/spiders/base_classes/lom_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 4e0a5179..fec95a7c 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -105,7 +105,7 @@ def parse(self, response): main.add_value("valuespaces", self.getValuespaces(response).load_item()) main.add_value("license", self.getLicense(response).load_item()) main.add_value("permissions", self.getPermissions(response).load_item()) - logging.debug(main.load_item()) + # logging.debug(main.load_item()) main.add_value("response", self.mapResponse(response).load_item()) return main.load_item() From 127ef5f3d6935aa0b652c371e7f717448cd7ef82 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 11 Jul 2022 11:50:26 +0200 Subject: [PATCH 132/590] sodix_spider v0.1.8 - add: get_subjects()-method which is used for both "valuespaces.discipline" and "general.keywords" so no metadata gets lost -- this is necessary since there are (currently) 837 unique values within the "subject"-field in the Sodix API --- converter/spiders/sodix_spider.py | 40 +++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index fb24d77f..a0330ff0 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -1,4 +1,5 @@ import json +from typing import Any import requests import scrapy @@ -15,7 +16,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.7" + version = "0.1.8" # last update: 2022-07-11 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -233,7 +234,7 @@ def handleEntry(self, response): return LomBase.parse(self, response) # thumbnail is always the same, do not use the one from rss - def getBase(self, response): + def getBase(self, response) -> BaseItemLoader: base = LomBase.getBase(self, response) base.replace_value( "thumbnail", self.get("media.thumbPreview", json=response.meta["item"]) @@ -249,7 +250,7 @@ def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: return lifecycle - def getLOMGeneral(self, response): + def getLOMGeneral(self, response) -> LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.replace_value( "title", @@ -257,19 +258,25 @@ def getLOMGeneral(self, response): ) if "keywords" in response.meta["item"]: keywords: list = self.get("keywords", json=response.meta["item"]) + keywords_cleaned_up: list = list() if keywords: # making sure that we're not receiving an empty list for individual_keyword in keywords: if individual_keyword.strip(): # we're only adding valid keywords, none of the empty (whitespace) strings + keywords_cleaned_up.append(individual_keyword) general.add_value('keyword', individual_keyword) + subjects = self.get_subjects(response) + if subjects: + keywords_cleaned_up.extend(subjects) + general.replace_value('keyword', keywords_cleaned_up) general.add_value( "description", self.get("description", json=response.meta["item"]) ) return general - def getLOMTechnical(self, response): + def getLOMTechnical(self, response) -> LomTechnicalItemLoader: technical = LomBase.getLOMTechnical(self, response) technical.replace_value("format", self.get("media.dataType", json=response.meta["item"])) technical.replace_value( @@ -289,7 +296,7 @@ def getLOMTechnical(self, response): ) return technical - def getLicense(self, response): + def getLicense(self, response) -> LicenseItemLoader: license_loader = LomBase.getLicense(self, response) author: str = self.get('author', json=response.meta['item']) @@ -376,20 +383,30 @@ def getLOMEducational(self, response=None) -> LomEducationalItemLoader: educational.add_value("typicalAgeRange", tar.load_item()) return educational - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) + def get_subjects(self, response) -> list[Any] | None: + # there are (currently) 837 unique subjects across all 50.697 Items, which are suitable to be used as additional + # keyword values. + subject_set = set() if "subject" in response.meta['item'] is not None: # the "subject"-field does not exist in every item returned by the sodix API subjects = self.get('subject', json=response.meta['item']) if subjects: - # the "subject"-key might exist in the API, but still be 'none' + # the "subject"-key might exist in the API, but still be of 'None'-value for subject in subjects: - # ToDo: there are (currently) 837 unique subjects across all 50.697 Items - # - these values would be suitable as additional keywords subject_name = subject['name'] # self.DEBUG_SUBJECTS.add(subject_name) # print(f"Amount of Subjects: {len(self.DEBUG_SUBJECTS)} // SUBJECT SET: \n {self.DEBUG_SUBJECTS}") - valuespaces.add_value('discipline', subject_name) + subject_set.add(subject_name) + return list(subject_set) + else: + return None + + def getValuespaces(self, response) -> ValuespaceItemLoader: + valuespaces = LomBase.getValuespaces(self, response) + subjects = self.get_subjects(response) + if subjects: + for subject in subjects: + valuespaces.add_value('discipline', subject) educational_context_list = self.get('educationalLevels', json=response.meta['item']) if educational_context_list: @@ -414,6 +431,5 @@ def getValuespaces(self, response): potential_lrt = self.MAPPING_LRT.get(potential_lrt) valuespaces.add_value('learningResourceType', potential_lrt) else: - # ToDo: lrt values that can't get mapped should be put into "keywords" to avoid losing them pass return valuespaces From dd20873eb492baa3479f3836889b08a2a0ec711a Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 1 Aug 2022 12:27:34 +0200 Subject: [PATCH 133/590] fix: wrapper for settings --- converter/pipelines.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 74014fa2..fa9cd2b7 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -22,6 +22,7 @@ import scrapy.crawler from PIL import Image from itemadapter import ItemAdapter +from scrapy import settings from scrapy.exceptions import DropItem from scrapy.exporters import JsonItemExporter from scrapy.utils.project import get_project_settings @@ -455,6 +456,8 @@ def create_thumbnails_from_image_bytes(self, image, item, settings): def get_settings_for_crawler(spider): all_settings = get_project_settings() crawler_settings = getattr(spider, "custom_settings", {}) + if type(crawler_settings) == dict: + crawler_settings = settings.BaseSettings(crawler_settings, 'spider') for key in crawler_settings.keys(): if ( all_settings.get(key) and crawler_settings.getpriority(key) > all_settings.getpriority(key) From da576131da1c49a0055112659d6b190ca8bc49a0 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 1 Aug 2022 13:29:58 +0200 Subject: [PATCH 134/590] build:bump python to 3.10 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 690f7108..579af2a5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.1-slim-buster +FROM python:3.10.0-slim-buster ENV CRAWLER wirlernenonline_spider From 9880f8c554c89136d962249bd4cbc7bc6236eebf Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Aug 2022 18:23:46 +0200 Subject: [PATCH 135/590] update: license mapping in es_connector.py - add: missing licenses to mapLicense()-method -- items in the edu-sharing repo were missing licenses even though they appeared correctly in the crawler logs -- alphabetically sorted for easier comparison between constants.py and the if-clauses that use them - optimize imports - alphabetically sort the constants.py attributes and "VALID_LICENSE_URLS"-list --- converter/constants.py | 28 +++++----- converter/es_connector.py | 105 ++++++++++++++++++++++---------------- 2 files changed, 74 insertions(+), 59 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index cb10cbd9..6bc45252 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -1,33 +1,33 @@ class Constants: - LICENSE_CC_ZERO_10 = "https://creativecommons.org/publicdomain/zero/1.0/" - LICENSE_CC_BY_SA_30 = "https://creativecommons.org/licenses/by-sa/3.0/" - LICENSE_CC_BY_SA_40 = "https://creativecommons.org/licenses/by-sa/4.0/" LICENSE_CC_BY_30 = "https://creativecommons.org/licenses/by/3.0/" LICENSE_CC_BY_40 = "https://creativecommons.org/licenses/by/4.0/" LICENSE_CC_BY_NC_30 = "https://creativecommons.org/licenses/by-nc/3.0/" LICENSE_CC_BY_NC_40 = "https://creativecommons.org/licenses/by-nc/4.0/" - LICENSE_CC_BY_ND_30 = "https://creativecommons.org/licenses/by-nd/3.0/" - LICENSE_CC_BY_ND_40 = "https://creativecommons.org/licenses/by-nd/4.0/" - LICENSE_CC_BY_NC_SA_30 = "https://creativecommons.org/licenses/by-nc-sa/3.0/" - LICENSE_CC_BY_NC_SA_40 = "https://creativecommons.org/licenses/by-nc-sa/4.0/" LICENSE_CC_BY_NC_ND_30 = "https://creativecommons.org/licenses/by-nc-nd/3.0/" LICENSE_CC_BY_NC_ND_40 = "https://creativecommons.org/licenses/by-nc-nd/4.0/" + LICENSE_CC_BY_NC_SA_30 = "https://creativecommons.org/licenses/by-nc-sa/3.0/" + LICENSE_CC_BY_NC_SA_40 = "https://creativecommons.org/licenses/by-nc-sa/4.0/" + LICENSE_CC_BY_ND_30 = "https://creativecommons.org/licenses/by-nd/3.0/" + LICENSE_CC_BY_ND_40 = "https://creativecommons.org/licenses/by-nd/4.0/" + LICENSE_CC_BY_SA_30 = "https://creativecommons.org/licenses/by-sa/3.0/" + LICENSE_CC_BY_SA_40 = "https://creativecommons.org/licenses/by-sa/4.0/" + LICENSE_CC_ZERO_10 = "https://creativecommons.org/publicdomain/zero/1.0/" LICENSE_PDM = "https://creativecommons.org/publicdomain/mark/1.0/" VALID_LICENSE_URLS = [ - LICENSE_CC_ZERO_10, - LICENSE_CC_BY_SA_30, - LICENSE_CC_BY_SA_40, LICENSE_CC_BY_30, LICENSE_CC_BY_40, LICENSE_CC_BY_NC_30, LICENSE_CC_BY_NC_40, - LICENSE_CC_BY_ND_30, - LICENSE_CC_BY_ND_40, - LICENSE_CC_BY_NC_SA_30, - LICENSE_CC_BY_NC_SA_40, LICENSE_CC_BY_NC_ND_30, LICENSE_CC_BY_NC_ND_40, + LICENSE_CC_BY_NC_SA_30, + LICENSE_CC_BY_NC_SA_40, + LICENSE_CC_BY_ND_30, + LICENSE_CC_BY_ND_40, + LICENSE_CC_BY_SA_30, + LICENSE_CC_BY_SA_40, + LICENSE_CC_ZERO_10, LICENSE_PDM, ] LICENSE_MAPPINGS = { diff --git a/converter/es_connector.py b/converter/es_connector.py index 4729d444..3bdcc828 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -1,28 +1,26 @@ +import base64 +import json +import logging import time import uuid +from enum import Enum +from typing import List + import requests -import json -import base64 import vobject -from scrapy.utils.project import get_project_settings from requests.auth import HTTPBasicAuth -from io import BytesIO -import logging - +from scrapy.utils.project import get_project_settings from vobject.vcard import VCardBehavior from converter import env from converter.constants import Constants -from edu_sharing_client.api_client import ApiClient -from edu_sharing_client.configuration import Configuration from edu_sharing_client.api.bulk_v1_api import BULKV1Api from edu_sharing_client.api.iam_v1_api import IAMV1Api -from edu_sharing_client.api.node_v1_api import NODEV1Api from edu_sharing_client.api.mediacenter_v1_api import MEDIACENTERV1Api +from edu_sharing_client.api.node_v1_api import NODEV1Api +from edu_sharing_client.api_client import ApiClient +from edu_sharing_client.configuration import Configuration from edu_sharing_client.rest import ApiException -from edu_sharing_client.models import GroupEntry -from typing import List -from enum import Enum class EduSharingConstants: @@ -76,19 +74,17 @@ def __getattribute__(self, name): def newfunc(*args, **kwargs): if time.time() - ESApiClient.lastRequestTime > ESApiClient.COOKIE_REBUILD_THRESHOLD: EduSharing.initCookie() - self.cookie = EduSharing.cookie + self.cookie = EduSharing.cookie # store last request time ESApiClient.lastRequestTime = time.time() return attr(*args, **kwargs) - return newfunc else: return attr - class EduSharing: class CreateGroupType(Enum): Regular = 1 @@ -132,7 +128,8 @@ def syncNode(self, spider, type, properties): except ApiException as e: jsonError = json.loads(e.body) if jsonError["error"] == "java.lang.IllegalStateException": - logging.warning("Node '" + properties['cm:name'][0] + "' probably blocked for sync: " + jsonError["message"]) + logging.warning( + "Node '" + properties['cm:name'][0] + "' probably blocked for sync: " + jsonError["message"]) return None raise e return response["node"] @@ -168,13 +165,14 @@ def setPermissions(self, uuid, permissions) -> bool: return True except ApiException as e: return False + def setNodeBinaryData(self, uuid, item) -> bool: if "binary" in item: logging.info(get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"] + + "rest/node/v1/nodes/-home-/" + + uuid + + "/content?mimetype=" + + item["lom"]["technical"]["format"] ) files = {"file": item["binary"]} response = requests.post( @@ -216,20 +214,17 @@ def setNodePreview(self, uuid, item) -> bool: def mapLicense(self, spaces, license): if "url" in license: - if license["url"] == Constants.LICENSE_CC_BY_40: - spaces["ccm:commonlicense_key"] = "CC_BY" - spaces["ccm:commonlicense_cc_version"] = "4.0" if license["url"] == Constants.LICENSE_CC_BY_30: spaces["ccm:commonlicense_key"] = "CC_BY" spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_SA_30: - spaces["ccm:commonlicense_key"] = "CC_BY_SA" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_SA_30: - spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + if license["url"] == Constants.LICENSE_CC_BY_40: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "4.0" + if license["url"] == Constants.LICENSE_CC_BY_NC_30: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_SA_40: - spaces["ccm:commonlicense_key"] = "CC_BY_SA" + if license["url"] == Constants.LICENSE_CC_BY_NC_40: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" spaces["ccm:commonlicense_cc_version"] = "4.0" if license["url"] == Constants.LICENSE_CC_BY_NC_ND_30: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" @@ -237,6 +232,24 @@ def mapLicense(self, spaces, license): if license["url"] == Constants.LICENSE_CC_BY_NC_ND_40: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" spaces["ccm:commonlicense_cc_version"] = "4.0" + if license["url"] == Constants.LICENSE_CC_BY_NC_SA_30: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "3.0" + if license["url"] == Constants.LICENSE_CC_BY_NC_SA_40: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "4.0" + if license["url"] == Constants.LICENSE_CC_BY_ND_30: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "3.0" + if license["url"] == Constants.LICENSE_CC_BY_ND_40: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "4.0" + if license["url"] == Constants.LICENSE_CC_BY_SA_30: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "3.0" + if license["url"] == Constants.LICENSE_CC_BY_SA_40: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "4.0" if license["url"] == Constants.LICENSE_CC_ZERO_10: spaces["ccm:commonlicense_key"] = "CC_0" spaces["ccm:commonlicense_cc_version"] = "1.0" @@ -304,8 +317,8 @@ def transformItem(self, uuid, spider, item): if not "role" in person: continue if ( - not person["role"].lower() - in EduSharingConstants.LIFECYCLE_ROLES_MAPPING + not person["role"].lower() + in EduSharingConstants.LIFECYCLE_ROLES_MAPPING ): logging.warning( "The lifecycle role " @@ -391,9 +404,9 @@ def createGroupsIfNotExists(self, groups, type: CreateGroupType): for group in groups: if type == EduSharing.CreateGroupType.MediaCenter: uuid = ( - EduSharingConstants.GROUP_PREFIX - + EduSharingConstants.MEDIACENTER_PREFIX - + group + EduSharingConstants.GROUP_PREFIX + + EduSharingConstants.MEDIACENTER_PREFIX + + group ) else: uuid = EduSharingConstants.GROUP_PREFIX + group @@ -440,8 +453,8 @@ def setNodePermissions(self, uuid, item): public = item["permissions"]["public"] if public == True: if ( - "groups" in item["permissions"] - or "mediacenters" in item["permissions"] + "groups" in item["permissions"] + or "mediacenters" in item["permissions"] ): logging.error( "Invalid state detected: Permissions public is set to true but groups or mediacenters are also set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!" @@ -467,8 +480,8 @@ def setNodePermissions(self, uuid, item): mergedGroups = [] if "groups" in item["permissions"]: if ( - "autoCreateGroups" in item["permissions"] - and item["permissions"]["autoCreateGroups"] == True + "autoCreateGroups" in item["permissions"] + and item["permissions"]["autoCreateGroups"] == True ): self.createGroupsIfNotExists( item["permissions"]["groups"], @@ -482,8 +495,8 @@ def setNodePermissions(self, uuid, item): ) if "mediacenters" in item["permissions"]: if ( - "autoCreateMediacenters" in item["permissions"] - and item["permissions"]["autoCreateMediacenters"] == True + "autoCreateMediacenters" in item["permissions"] + and item["permissions"]["autoCreateMediacenters"] == True ): self.createGroupsIfNotExists( item["permissions"]["mediacenters"], @@ -492,8 +505,8 @@ def setNodePermissions(self, uuid, item): mergedGroups = mergedGroups + list( map( lambda x: EduSharingConstants.GROUP_PREFIX - + EduSharingConstants.MEDIACENTER_PROXY_PREFIX - + x, + + EduSharingConstants.MEDIACENTER_PROXY_PREFIX + + x, item["permissions"]["mediacenters"], ) ) @@ -525,6 +538,7 @@ def insertItem(self, spider, uuid, item): def updateItem(self, spider, uuid, item): self.insertItem(spider, uuid, item) + @staticmethod def initCookie(): settings = get_project_settings() @@ -541,6 +555,7 @@ def initCookie(): if isAdmin: EduSharing.cookie = auth.headers["SET-COOKIE"].split(";")[0] return auth + def initApiClient(self): if EduSharing.cookie == None: settings = get_project_settings() @@ -601,8 +616,8 @@ def findItem(self, id, spider): response = EduSharing.bulkApi.find(properties) properties = response["node"]["properties"] if ( - "ccm:replicationsourcehash" in properties - and "ccm:replicationsourceuuid" in properties + "ccm:replicationsourcehash" in properties + and "ccm:replicationsourceuuid" in properties ): return [ properties["ccm:replicationsourceuuid"][0], From 885150affd470674564450622c21e1adc13cd4a2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Aug 2022 19:57:49 +0200 Subject: [PATCH 136/590] add: typehints to Constants class attributes - in accordance to PEP 591 and PEP 484 - replace "http"-links with "https" - questions in regard to LICENSE_MAPPINGS remain --- converter/constants.py | 66 +++++++++++++++++++++++------------------- 1 file changed, 37 insertions(+), 29 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index 6bc45252..cfa6164d 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -1,20 +1,23 @@ +from typing import Final, Any + + class Constants: - LICENSE_CC_BY_30 = "https://creativecommons.org/licenses/by/3.0/" - LICENSE_CC_BY_40 = "https://creativecommons.org/licenses/by/4.0/" - LICENSE_CC_BY_NC_30 = "https://creativecommons.org/licenses/by-nc/3.0/" - LICENSE_CC_BY_NC_40 = "https://creativecommons.org/licenses/by-nc/4.0/" - LICENSE_CC_BY_NC_ND_30 = "https://creativecommons.org/licenses/by-nc-nd/3.0/" - LICENSE_CC_BY_NC_ND_40 = "https://creativecommons.org/licenses/by-nc-nd/4.0/" - LICENSE_CC_BY_NC_SA_30 = "https://creativecommons.org/licenses/by-nc-sa/3.0/" - LICENSE_CC_BY_NC_SA_40 = "https://creativecommons.org/licenses/by-nc-sa/4.0/" - LICENSE_CC_BY_ND_30 = "https://creativecommons.org/licenses/by-nd/3.0/" - LICENSE_CC_BY_ND_40 = "https://creativecommons.org/licenses/by-nd/4.0/" - LICENSE_CC_BY_SA_30 = "https://creativecommons.org/licenses/by-sa/3.0/" - LICENSE_CC_BY_SA_40 = "https://creativecommons.org/licenses/by-sa/4.0/" - LICENSE_CC_ZERO_10 = "https://creativecommons.org/publicdomain/zero/1.0/" - LICENSE_PDM = "https://creativecommons.org/publicdomain/mark/1.0/" + LICENSE_CC_BY_30: Final[str] = "https://creativecommons.org/licenses/by/3.0/" + LICENSE_CC_BY_40: Final[str] = "https://creativecommons.org/licenses/by/4.0/" + LICENSE_CC_BY_NC_30: Final[str] = "https://creativecommons.org/licenses/by-nc/3.0/" + LICENSE_CC_BY_NC_40: Final[str] = "https://creativecommons.org/licenses/by-nc/4.0/" + LICENSE_CC_BY_NC_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/3.0/" + LICENSE_CC_BY_NC_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/4.0/" + LICENSE_CC_BY_NC_SA_30: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/3.0/" + LICENSE_CC_BY_NC_SA_40: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/4.0/" + LICENSE_CC_BY_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nd/3.0/" + LICENSE_CC_BY_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nd/4.0/" + LICENSE_CC_BY_SA_30: Final[str] = "https://creativecommons.org/licenses/by-sa/3.0/" + LICENSE_CC_BY_SA_40: Final[str] = "https://creativecommons.org/licenses/by-sa/4.0/" + LICENSE_CC_ZERO_10: Final[str] = "https://creativecommons.org/publicdomain/zero/1.0/" + LICENSE_PDM: Final[str] = "https://creativecommons.org/publicdomain/mark/1.0/" - VALID_LICENSE_URLS = [ + VALID_LICENSE_URLS: list[str | Any] = [ LICENSE_CC_BY_30, LICENSE_CC_BY_40, LICENSE_CC_BY_NC_30, @@ -30,16 +33,21 @@ class Constants: LICENSE_CC_ZERO_10, LICENSE_PDM, ] - LICENSE_MAPPINGS = { - "https://creativecommons.org/publicdomain/zero/": LICENSE_CC_ZERO_10, - "https://creativecommons.org/licenses/by/": LICENSE_CC_BY_40, - "https://creativecommons.org/licenses/by-sa/": LICENSE_CC_BY_SA_40, - # wrong mapping (currently from edu-sharing) - "https://creativecommons.org/licenses/pdm/": LICENSE_PDM, + LICENSE_MAPPINGS: dict[str, str] = { + "https://creativecommons.org/licenses/by/": LICENSE_CC_BY_40, # ToDo: outdated approximation? + # ToDo: - CC_BY_NC (3.0 + 4.0) "https://creativecommons.org/licenses/by-nc-nd/3.0/": LICENSE_CC_BY_NC_ND_30, "https://creativecommons.org/licenses/by-nc-nd/4.0/": LICENSE_CC_BY_NC_ND_40, + # ToDo: + # - CC_BY_NC_SA (3.0 + 4.0) + # - CC_BY_ND (3.0 + 4.0) + # - CC_BY_SA (3.0) + "https://creativecommons.org/licenses/by-sa/": LICENSE_CC_BY_SA_40, # Todo: outdated approximation? + # wrong mapping (currently from edu-sharing) + "https://creativecommons.org/publicdomain/zero/": LICENSE_CC_ZERO_10, + "https://creativecommons.org/licenses/pdm/": LICENSE_PDM, } - LICENSE_MAPPINGS_INTERNAL = { + LICENSE_MAPPINGS_INTERNAL: dict[str, list[str]] = { "CC_0": [LICENSE_CC_ZERO_10], "CC_BY": [LICENSE_CC_BY_40, LICENSE_CC_BY_30], "CC_BY_SA": [LICENSE_CC_BY_SA_40, LICENSE_CC_BY_SA_30], @@ -47,15 +55,15 @@ class Constants: "PDM": [LICENSE_PDM], } - LICENSE_COPYRIGHT_LAW = "COPYRIGHT_LAW" - LICENSE_CUSTOM = "CUSTOM" # Custom License, use the license description field for arbitrary values - LICENSE_NONPUBLIC = "NONPUBLIC" + LICENSE_COPYRIGHT_LAW: Final[str] = "COPYRIGHT_LAW" + LICENSE_CUSTOM: Final[str] = "CUSTOM" # Custom License, use the license description field for arbitrary values + LICENSE_NONPUBLIC: Final[str] = "NONPUBLIC" - NEW_LRT_MATERIAL = "http://w3id.org/openeduhub/vocabs/new_lrt/1846d876-d8fd-476a-b540-b8ffd713fedb" - NEW_LRT_TOOL = "http://w3id.org/openeduhub/vocabs/new_lrt/cefccf75-cba3-427d-9a0f-35b4fedcbba1" + NEW_LRT_MATERIAL: Final[str] = "https://w3id.org/openeduhub/vocabs/new_lrt/1846d876-d8fd-476a-b540-b8ffd713fedb" + NEW_LRT_TOOL: Final[str] = "https://w3id.org/openeduhub/vocabs/new_lrt/cefccf75-cba3-427d-9a0f-35b4fedcbba1" - SOURCE_TYPE_SPIDER = 1 - SOURCE_TYPE_EDITORIAL = 2 + SOURCE_TYPE_SPIDER: int = 1 + SOURCE_TYPE_EDITORIAL: int = 2 class OerType: From 96cf2eef6789dc11093d1c0027c27a7aed7f1769 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Aug 2022 21:42:42 +0200 Subject: [PATCH 137/590] lehreronline_spider v0.0.4 - fix: duplicated keywords -- since we have to accept additional keywords from either potential "discipline"- or "LRT"-candidates, we're now making sure that no duplicate entries are within the final keyword_list - feat: "discipline"-mapping now splits grouped strings into its individual parts before mapping -- this was necessary because Lehrer-Online serves strings like "Politik / WiSo / SoWi / Wirtschaft" instead of individual items --- converter/spiders/lehreronline_spider.py | 38 ++++++++++++++++-------- 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 6d8f9f68..5a48ecc9 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -1,4 +1,3 @@ -import urllib.parse from datetime import datetime import scrapy.selector.unified @@ -20,7 +19,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] - version = "0.0.3" # last update: 2022-07-11 + version = "0.0.4" # last update: 2022-08-02 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -114,7 +113,6 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): 'Arbeitsschutz und Arbeitssicherheit': 'Arbeitssicherheit', 'Berufs- und Arbeitswelt': 'Arbeitslehre', 'Berufsvorbereitung, Berufsalltag, Arbeitsrecht': 'Arbeitslehre', - 'DaF / DaZ': 'Deutsch als Zweitsprache', 'Ernährung und Gesundheit': ['Ernährung und Hauswirtschaft', 'Gesundheit'], 'Fächerübergreifender Unterricht': 'Allgemein', 'Geschichte, Politik und Gesellschaftswissenschaften': ['Geschichte', 'Politik', 'Gesellschaftskunde'], @@ -123,11 +121,10 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): 'Klima, Umwelt, Nachhaltigkeit': 'Nachhaltigkeit', 'MINT: Mathematik, Informatik, Naturwissenschaften und Technik': 'MINT', 'Natur und Umwelt': 'Environmental education', - 'Politik / SoWi': ['Politik', 'Social education'], - 'Religion / Ethik': ['Religion', 'Ethik'], 'Religion und Ethik': ['Religion', 'Ethik'], 'Sport und Bewegung': 'Sport', - 'WiSo / Politik': ['Economics', 'Social education', 'Politik'], + 'SoWi': ['Social education', 'Economics'], + 'WiSo': ['Economics', 'Social education'], 'Wirtschaftslehre': 'Economics' } @@ -274,11 +271,24 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # metadata_dict.update({'expiration_date': expiration_date}) # can either be completely empty or there can be several -elements within a - disciplines_or_additional_keywords_raw: list = selector.xpath('fach/text()').getall() + disciplines_or_additional_keywords: list = selector.xpath('fach/text()').getall() + individual_disciplines_or_keywords = set() + for potential_discipline_or_keyword in disciplines_or_additional_keywords: + # to make mapping more precise, we're separating strings like "Politik / WiSo / SoWi / Wirtschaft" into its + # individual parts + if " / " in potential_discipline_or_keyword: + disciplines_or_keywords_separated = potential_discipline_or_keyword.split(" / ") + for each_string in disciplines_or_keywords_separated: + each_string_stripped = each_string.strip() + individual_disciplines_or_keywords.add(each_string_stripped) + else: + individual_disciplines_or_keywords.add(potential_discipline_or_keyword) + disciplines_or_additional_keywords = list(individual_disciplines_or_keywords) + disciplines_mapped = set() additional_keywords_from_disciplines = set() - if disciplines_or_additional_keywords_raw: - for potential_discipline_item in disciplines_or_additional_keywords_raw: + if disciplines_or_additional_keywords: + for potential_discipline_item in disciplines_or_additional_keywords: if potential_discipline_item in self.MAPPING_FACH_TO_DISCIPLINES: # since not every "fach"-value is the same as our discipline-vocabs, mapping is necessary discipline = self.MAPPING_FACH_TO_DISCIPLINES.get(potential_discipline_item) @@ -296,7 +306,9 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc if disciplines_mapped: metadata_dict.update({'discipline': list(disciplines_mapped)}) if additional_keywords_from_disciplines: - keyword_list.extend(additional_keywords_from_disciplines) + keyword_set = set(keyword_list) + keyword_set.update(additional_keywords_from_disciplines) + keyword_list = list(keyword_set) metadata_dict.update({'keywords': keyword_list}) educational_context_raw: str = selector.xpath('bildungsebene/text()').get() @@ -318,7 +330,7 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc if educational_context_cleaned_up: educational_context_cleaned_up = list(educational_context_cleaned_up) educational_context = list() - # we need to map some values to our educatonalContext vocabulary + # we need to map some values to our educationalContext vocabulary for edu_context_item in educational_context_cleaned_up: if edu_context_item in self.MAPPING_EDU_CONTEXT.keys(): edu_context_temp = self.MAPPING_EDU_CONTEXT.get(edu_context_item) @@ -359,7 +371,9 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc else: additional_keywords_from_lo_lrt.add(lrt_possible_value) metadata_dict.update({'new_lrt': list(new_lrts)}) - keyword_list.extend(additional_keywords_from_lo_lrt) + keyword_set = set(keyword_list) + keyword_set.update(additional_keywords_from_lo_lrt) + keyword_list = list(keyword_set) metadata_dict.update({'keywords': keyword_list}) intended_end_user_role: str = selector.xpath('zielgruppe/text()').get() From f218744498385049468b31944a47ef3ca39d9267 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 3 Aug 2022 12:03:53 +0200 Subject: [PATCH 138/590] refactor: es_connector mapLicense()-method - refactor: license mapping in accordance to PEP 634 (match - case) -- docs: improve logging in the edge-case that a license.url was provided that isn't mapped by es_connector --- previously, URLs that weren't recognized were just (silently) dropped - docs: in case that license.internal can't be mapped, log a clear warning to make troubleshooting easier --- converter/es_connector.py | 104 +++++++++++++++++++++----------------- 1 file changed, 57 insertions(+), 47 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 3bdcc828..e5fe4394 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -214,54 +214,64 @@ def setNodePreview(self, uuid, item) -> bool: def mapLicense(self, spaces, license): if "url" in license: - if license["url"] == Constants.LICENSE_CC_BY_30: - spaces["ccm:commonlicense_key"] = "CC_BY" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_40: - spaces["ccm:commonlicense_key"] = "CC_BY" - spaces["ccm:commonlicense_cc_version"] = "4.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_30: - spaces["ccm:commonlicense_key"] = "CC_BY_NC" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_40: - spaces["ccm:commonlicense_key"] = "CC_BY_NC" - spaces["ccm:commonlicense_cc_version"] = "4.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_ND_30: - spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_ND_40: - spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" - spaces["ccm:commonlicense_cc_version"] = "4.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_SA_30: - spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_NC_SA_40: - spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" - spaces["ccm:commonlicense_cc_version"] = "4.0" - if license["url"] == Constants.LICENSE_CC_BY_ND_30: - spaces["ccm:commonlicense_key"] = "CC_BY_ND" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_ND_40: - spaces["ccm:commonlicense_key"] = "CC_BY_ND" - spaces["ccm:commonlicense_cc_version"] = "4.0" - if license["url"] == Constants.LICENSE_CC_BY_SA_30: - spaces["ccm:commonlicense_key"] = "CC_BY_SA" - spaces["ccm:commonlicense_cc_version"] = "3.0" - if license["url"] == Constants.LICENSE_CC_BY_SA_40: - spaces["ccm:commonlicense_key"] = "CC_BY_SA" - spaces["ccm:commonlicense_cc_version"] = "4.0" - if license["url"] == Constants.LICENSE_CC_ZERO_10: - spaces["ccm:commonlicense_key"] = "CC_0" - spaces["ccm:commonlicense_cc_version"] = "1.0" - if license["url"] == Constants.LICENSE_PDM: - spaces["ccm:commonlicense_key"] = "PDM" + match license["url"]: + case Constants.LICENSE_CC_BY_30: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "3.0" + case Constants.LICENSE_CC_BY_40: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_30: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" + spaces["ccm:commonlicense_cc_version"] = "3.0" + case Constants.LICENSE_CC_BY_NC_40: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" + spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_ND_30: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" + spaces["ccm:commonlicense_cc_version"] = "3.0" + case Constants.LICENSE_CC_BY_NC_ND_40: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" + spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_SA_30: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "3.0" + case Constants.LICENSE_CC_BY_NC_SA_40: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_ND_30: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "3.0" + case Constants.LICENSE_CC_BY_ND_40: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_SA_30: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "3.0" + case Constants.LICENSE_CC_BY_SA_40: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_ZERO_10: + spaces["ccm:commonlicense_key"] = "CC_0" + spaces["ccm:commonlicense_cc_version"] = "1.0" + case Constants.LICENSE_PDM: + spaces["ccm:commonlicense_key"] = "PDM" + case _: + logging.warning(f"License.url {license['url']} could not be mapped to a license from Constants.\n" + f"If you are sure that you provided a correct URL to a license, " + f"please check if the license-mapping within es_connector.py is up-to-date.") if "internal" in license: - if license["internal"] == Constants.LICENSE_COPYRIGHT_LAW: - spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" - if license["internal"] == Constants.LICENSE_CUSTOM: - spaces["ccm:commonlicense_key"] = "CUSTOM" - if "description" in license: - spaces["cclom:rights_description"] = license["description"] + match license["internal"]: + case Constants.LICENSE_COPYRIGHT_LAW: + spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" + case Constants.LICENSE_CUSTOM: + spaces["ccm:commonlicense_key"] = "CUSTOM" + if "description" in license: + spaces["cclom:rights_description"] = license["description"] + case _: + logging.warning(f"Received a value for license['internal'] that is not recognized by es_connector." + f"Please double-check if the provided value {license['internal']} is correctly " + f"mapped within Constants AND es_connector.") if "author" in license: spaces["ccm:author_freetext"] = license["author"] From 3a7e73478b32bcaf667ff52fcc365b04e05dad8a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 3 Aug 2022 12:09:42 +0200 Subject: [PATCH 139/590] fix: LomBase outdated string concatenation - replace (outdated, error-prone) "+"-string-concatenation with more rigid f-strings -- logging (info) messages were causing errors due to using (outdated) string-concatenation via "+" between different types --- converter/spiders/base_classes/lom_base.py | 25 ++++++++++------------ 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index fec95a7c..cbc15529 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -37,14 +37,13 @@ def __init__(self, **kwargs): self.remoteId = kwargs["remoteId"] if "cleanrun" in kwargs and kwargs["cleanrun"] == "true": logging.info( - "cleanrun requested, will force update for crawler " + self.name + f"cleanrun requested, will force update for crawler {self.name}" ) # EduSharing().deleteAll(self) self.forceUpdate = True if "resetVersion" in kwargs and kwargs["resetVersion"] == "true": logging.info( - "resetVersion requested, will force update + reset versions for crawler " - + self.name + f"resetVersion requested, will force update + reset versions for crawler {self.name}" ) # EduSharing().deleteAll(self) EduSharing.resetVersion = True @@ -70,21 +69,18 @@ def hasChanged(self, response=None) -> bool: return True if self.uuid: if self.getUUID(response) == self.uuid: - logging.info("matching requested id: " + self.uuid) + logging.info(f"matching requested id: {self.uuid}") return True return False if self.remoteId: if str(self.getId(response)) == self.remoteId: - logging.info("matching requested id: " + self.remoteId) + logging.info(f"matching requested id: {self.remoteId}") return True return False db = EduSharing().findItem(self.getId(response), self) - changed = db == None or db[1] != self.getHash(response) + changed = db is None or db[1] != self.getHash(response) if not changed: - logging.info( - "Item " + self.getId(response) + - "(uuid: " + db[0] + ") has not changed" - ) + logging.info(f"Item {self.getId(response)} (uuid: {db[0]}) has not changed") return changed # you might override this method if you don't want to import specific entries @@ -118,6 +114,7 @@ def html2Text(self, html): # directly use WebTools instead def getUrlData(self, url): return WebTools.getUrlData(url) + def mapResponse(self, response, fetchData=True): r = ResponseItemLoader(response=response) r.add_value("status", response.status) @@ -164,11 +161,11 @@ def getBase(self, response=None) -> BaseItemLoader: def getLOMGeneral(self, response=None) -> LomGeneralItemloader: return LomGeneralItemloader(response=response) - """ - return one or more lifecycle element - If you want to return more than one, use yield and generate multiple LomLifecycleItemloader - """ def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: + """ + return one or more lifecycle element + If you want to return more than one, use yield and generate multiple LomLifecycleItemloader + """ return LomLifecycleItemloader(response=response) def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: From 3424f4ac1b24aa5a5633829cafdb9cbdb67fdee1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 3 Aug 2022 12:59:19 +0200 Subject: [PATCH 140/590] chore: GitHub workflow, flake8 version bump - GitHub workflow: bump Python version to 3.10 - requirements.txt: -- bump flake8 to v5.0.3 (2022-08-01) -- bump requests to v2.28.1 (2022-06-29) -- bump playwright to 1.24.1 (2022-08-01) --- .github/workflows/python.yaml | 6 +++--- requirements.txt | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python.yaml b/.github/workflows/python.yaml index 803edfad..fbf78619 100644 --- a/.github/workflows/python.yaml +++ b/.github/workflows/python.yaml @@ -15,12 +15,12 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.9] + python-version: ["3.10"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Cache pip diff --git a/requirements.txt b/requirements.txt index 9ebc65b0..96d13f2c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,12 +8,12 @@ scrapy-splash==0.8.0 python-dateutil==2.8.2 python-dotenv==0.20.0 Scrapy==2.6.1 -requests==2.27.1 +requests==2.28.1 vobject==0.9.6.1 xmltodict~=0.12.0 overrides==3.1.0 jmespath==1.0.0 -flake8==4.0.1 +flake8==5.0.3 pytest==7.1.1 extruct~=0.13.0 lxml~=4.6.3 @@ -24,4 +24,4 @@ itemadapter==0.5.0 six==1.16.0 certifi==2021.10.8 urllib3~=1.26.09 -playwright==1.21.0 \ No newline at end of file +playwright==1.24.1 \ No newline at end of file From 545f94866c2ddf6e3db1323f7d803646fe1c3816 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 3 Aug 2022 16:05:26 +0200 Subject: [PATCH 141/590] grundschulkoenig_spider v0.0.6 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - fix: description -- due to feedback from the "Rohdatenprüfung", we're now using the description from within the first paragraph of the DOM (previously this was our fallback) - fix: new_lrt ("Arbeitsblatt") -- each crawled item is now marked, by default, also as a worksheet --- converter/spiders/grundschulkoenig_spider.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index c004d6c3..9b6de2dc 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -19,7 +19,7 @@ class GrundSchulKoenigSpider(CrawlSpider, LomBase): start_urls = ['https://www.grundschulkoenig.de/sitemap.xml?sitemap=pages&cHash=b8e1a6633393d69093d0ebe93a3d2616'] name = 'grundschulkoenig_spider' - version = "0.0.5" # last update: 2022-06-27 + version = "0.0.6" # last update: 2022-08-03 custom_settings = { "ROBOTSTXT_OBEY": False, # while there is no robots.txt, there is a 404-forward-page that gets misinterpreted by Scrapy @@ -47,10 +47,11 @@ class GrundSchulKoenigSpider(CrawlSpider, LomBase): "https://www.grundschulkoenig.de/mathe/", "https://www.grundschulkoenig.de/musikkunst/kunst/", "https://www.grundschulkoenig.de/musikkunst/musik/", + "https://www.grundschulkoenig.de/newsletter-abonnieren/", "https://www.grundschulkoenig.de/religion/", - "https://www.grundschulkoenig.de/weitere-faecher/", - "https://www.grundschulkoenig.de/vorschule/", "https://www.grundschulkoenig.de/suchergebnisse/", + "https://www.grundschulkoenig.de/vorschule/", + "https://www.grundschulkoenig.de/weitere-faecher/", ] def start_requests(self): @@ -117,10 +118,11 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry lom = LomBaseItemloader() general = LomGeneralItemloader(response=response) general.add_value('title', title) - description: str = response.xpath('//meta[@name="description"]/@content').get() + description = response.xpath('//div[@class="content-item module-headline-paragraph"]/p/text()').get() + # due to the generic descriptions of grundschulkoenig used in the headers, we're using the first paragraph + # as our description instead. Only if this XPath is somehow unavailable, we're falling back to the actual header if description is None: - # this is a workaround for (currently: 4) sub-pages that have no description in the header meta-fields - description = response.xpath('//div[@class="content-item module-headline-paragraph"]/p/text()').get() + description: str = response.xpath('//meta[@name="description"]/@content').get() general.add_value('description', description) # ToDo: check if "keywords" are available at the source when the next crawler update becomes necessary lom.add_value("general", general.load_item()) @@ -182,8 +184,10 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry if "/vorschule/" in response.url: vs.add_value('educationalContext', "Elementarbereich") vs.add_value('new_lrt', "65330f23-2802-4789-86ee-c21f9afe74b1") # "Frühkindliches Bildungsangebot und KITA" - vs.add_value('new_lrt', ["5098cf0b-1c12-4a1b-a6d3-b3f29621e11d", "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9"]) - # "Unterrichtsbaustein", "Webseite und Portal (stabil) + vs.add_value('new_lrt', ["5098cf0b-1c12-4a1b-a6d3-b3f29621e11d", + "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9", + "36e68792-6159-481d-a97b-2c00901f4f78"]) + # "Unterrichtsbaustein", "Webseite und Portal (stabil), "Arbeitsblatt" base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() From 380014cf453903e27817f185401cf68a260cfeb4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 9 Aug 2022 17:05:03 +0200 Subject: [PATCH 142/590] update: license mappings in Constants.py - add previously missing licenses to LICENSE_MAPPINGS_INTERNAL --- converter/constants.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/constants.py b/converter/constants.py index cfa6164d..aa8ff04d 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -50,8 +50,11 @@ class Constants: LICENSE_MAPPINGS_INTERNAL: dict[str, list[str]] = { "CC_0": [LICENSE_CC_ZERO_10], "CC_BY": [LICENSE_CC_BY_40, LICENSE_CC_BY_30], - "CC_BY_SA": [LICENSE_CC_BY_SA_40, LICENSE_CC_BY_SA_30], + "CC_BY_NC": [LICENSE_CC_BY_NC_40, LICENSE_CC_BY_NC_30], "CC_BY_NC_ND": [LICENSE_CC_BY_NC_ND_40, LICENSE_CC_BY_NC_ND_30], + "CC_BY_NC_SA": [LICENSE_CC_BY_NC_SA_40, LICENSE_CC_BY_NC_SA_30], + "CC_BY_ND": [LICENSE_CC_BY_ND_40, LICENSE_CC_BY_ND_30], + "CC_BY_SA": [LICENSE_CC_BY_SA_40, LICENSE_CC_BY_SA_30], "PDM": [LICENSE_PDM], } From 724dcc43c16a4bc5a8b4822e764ab9061b381acc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 9 Aug 2022 17:22:48 +0200 Subject: [PATCH 143/590] digitallearninglab_spider v0.1.3, fix: lrmi_base (license) - fix: new_lrt -- "type"-field from the API contains the value "tool", but our method was trying to compare it with the string "tools" - fix: license -- Digital Learning Lab serves the "license"-field as a string containing a CC-pattern --- since LrmiBase expected the 'license'-field to contain a URL pointing towards a CC-license, this string was previously saved within the 'url' field, but couldn't be mapped -- add warnings if "license"-field within target's JSON_LD doesn't contain a URL - code cleanup lrmi_base.py: - fix: license -- throw warning if "license" within JSON_LD isn't URL --- converter/spiders/base_classes/lrmi_base.py | 22 +++++++-- .../spiders/digitallearninglab_spider.py | 49 +++++++++++++++---- 2 files changed, 59 insertions(+), 12 deletions(-) diff --git a/converter/spiders/base_classes/lrmi_base.py b/converter/spiders/base_classes/lrmi_base.py index 8c3404ae..245e1e22 100644 --- a/converter/spiders/base_classes/lrmi_base.py +++ b/converter/spiders/base_classes/lrmi_base.py @@ -1,3 +1,5 @@ +import re + from .lom_base import LomBase from .json_base import JSONBase import json @@ -7,6 +9,10 @@ # base spider mapping data via LRMI inside the html pages # Please override the lrmi_path if necessary and add your sitemap_urls +from ...constants import Constants +from ...items import LicenseItemLoader + + class LrmiBase(LomBase, JSONBase): friendlyName = "LRMI-Header Based spider" lrmi_path = '//script[@type="application/ld+json"]//text()' @@ -84,9 +90,19 @@ def getValuespaces(self, response): return valuespaces def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("url", self.getLRMI("license", response=response)) - return license + license_loader: LicenseItemLoader = LomBase.getLicense(self, response) + license_raw = self.getLRMI("license", response=response) + if license_raw: + if license_raw.startswith("http"): + # the "license" field holds a valid URL -> use it directly as is + license_loader.add_value("url", license_raw) + else: + logging.warning(f"Could not map the received 'license'-value {license_raw} within LrmiBase. " + f"Please check Constants.py and LrmiBase for missing mappings/values.") + else: + logging.warning("LrmiBase: The 'license'-field returned within the JSON_LD doesn't seem to be a URL.\n" + "Please check if additional license-mapping is necessary within the spider itself.") + return license_loader def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) diff --git a/converter/spiders/digitallearninglab_spider.py b/converter/spiders/digitallearninglab_spider.py index 9da66419..b2591934 100644 --- a/converter/spiders/digitallearninglab_spider.py +++ b/converter/spiders/digitallearninglab_spider.py @@ -1,4 +1,5 @@ import html +import re import time import scrapy @@ -6,14 +7,15 @@ from converter.constants import Constants from converter.valuespace_helper import ValuespaceHelper -from .base_classes import LrmiBase +from .base_classes import LrmiBase, LomBase +from ..items import LicenseItemLoader class DigitallearninglabSpider(CrawlSpider, LrmiBase): name = "digitallearninglab_spider" friendlyName = "digital.learning.lab" url = "https://digitallearninglab.de" - version = "0.1.2" # last update: 2022-05-20 + version = "0.1.3" # last update: 2022-08-09 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -24,6 +26,7 @@ class DigitallearninglabSpider(CrawlSpider, LrmiBase): "AUTOTHROTTLE_START_DELAY": 0.25 } apiUrl = "https://digitallearninglab.de/api/%type?q=&sorting=latest&page=%page" + # Unterrichtsbausteine (API "count" value): 228 # tools: 182 # therefore we expect 410 items after a successful crawl @@ -78,7 +81,7 @@ def handle_entry(self, response): @staticmethod def get_new_lrt(response): - if response.meta["type"] == "tools": + if response.meta["type"] == "tool": return Constants.NEW_LRT_TOOL else: return Constants.NEW_LRT_MATERIAL @@ -111,8 +114,36 @@ def getLOMTechnical(self, response): return technical def getLicense(self, response): - license = LrmiBase.getLicense(self, response) - return license + license_loader: LicenseItemLoader = LomBase.getLicense(self, response) + # Footer: "Inhalte der Seite stehen unter CC BY-SA 4.0 Lizenz, wenn nicht anders angegeben." + license_loader.add_value('url', Constants.LICENSE_CC_BY_SA_40) # default for every item + license_raw = self.getLRMI("license", response=response) + if license_raw: + if license_raw.startswith("http"): + # the "license" field holds a valid URL -> use it directly as is + license_loader.add_value("url", license_raw) + elif license_raw.startswith("CC"): + # this mapping is necessary for digitallearninglab since it serves a CC-pattern within its + # "license"-field (e.g. "CC BY-NC-SA") + cc_pattern = re.compile(r'C{2}\s' + r'\w{2}' + r'(-\w{2})*') + if cc_pattern.search(license_raw) is not None: + license_prepared_for_mapping: str = license_raw.replace(' ', '_') + license_prepared_for_mapping = license_prepared_for_mapping.replace('-', '_') + if license_prepared_for_mapping in Constants.LICENSE_MAPPINGS_INTERNAL: + license_mapped = Constants.LICENSE_MAPPINGS_INTERNAL.get(license_prepared_for_mapping) + license_mapped = license_mapped[0] + # assumption: the most recent CC-Version 4.0 is used for all materials + license_loader.replace_value('url', license_mapped) + else: + self.logger.warning(f"The specified value {license_prepared_for_mapping} can't be mapped to " + f"Constants.LICENSE_MAPPINGS_INTERNAL." + f"Please check Constants.py and LrmiBase for missing mappings/values.") + else: + self.logger.warning(f"Could not map the received 'license'-value {license_raw} . " + f"Please check Constants.py and LrmiBase for missing mappings/values.") + return license_loader def getValuespaces(self, response): valuespaces = LrmiBase.getValuespaces(self, response) @@ -122,10 +153,10 @@ def getValuespaces(self, response): response.xpath( '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-level")]/parent::*//text()' ) - .get() - .replace("Stufe", "") - .strip() - .split(" - ") + .get() + .replace("Stufe", "") + .strip() + .split(" - ") ) if len(range): valuespaces.add_value( From 0d122b56bf8d3fa90e78e518d4d1fcdfb4ad0184 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 9 Aug 2022 18:16:02 +0200 Subject: [PATCH 144/590] fix: Error when trying to run spider without custom_settings - while the get_settings_for_crawler()-method worked for crawlers that carried an (empty) custom_settings dictionary as class attributes -- it threw errors when custom_settings wasn't set for a spider -- (@MRuecklCC came up with this elegant solution during our recent pair-programming session, thank you!) --- converter/pipelines.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index fa9cd2b7..e6e0936e 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -455,7 +455,7 @@ def create_thumbnails_from_image_bytes(self, image, item, settings): def get_settings_for_crawler(spider): all_settings = get_project_settings() - crawler_settings = getattr(spider, "custom_settings", {}) + crawler_settings = settings.BaseSettings(getattr(spider, "custom_settings") or {}, 'spider') if type(crawler_settings) == dict: crawler_settings = settings.BaseSettings(crawler_settings, 'spider') for key in crawler_settings.keys(): From 928805abc71a0fe20a37374103d13fd3617cbcd7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 15 Aug 2022 16:26:38 +0200 Subject: [PATCH 145/590] fobizz_spider v0.0.3 - add: conditionsOfAccess defaults to "login required" for all Fobizz learning materials - add: valuespaces.educationalContext and mapping - fix: valuespaces.discipline - fix: technical.location - add: general.keyword -- Values that appear within "discipline"- or "educationalContext" as unpredictable edge-cases, e.g. a string beginning with "Other: " are now handled as additional keywords --- converter/spiders/fobizz_spider.py | 69 +++++++++++++++++++++++++----- 1 file changed, 58 insertions(+), 11 deletions(-) diff --git a/converter/spiders/fobizz_spider.py b/converter/spiders/fobizz_spider.py index 14fde645..82d752a9 100644 --- a/converter/spiders/fobizz_spider.py +++ b/converter/spiders/fobizz_spider.py @@ -14,11 +14,6 @@ jslde = JsonLdExtractor() -about_maps = { - "Lernfeld Gesundheit (LF16)": "Gesundheit", - "Handlungsfeld Gesellschaft": "Gesellschaftskunde" -} - class FobizzSpider(scrapy.Spider, LomBase): """ @@ -28,7 +23,7 @@ class FobizzSpider(scrapy.Spider, LomBase): start_urls = ['https://plattform.fobizz.com/sitemap'] name = 'fobizz_spider' - version = '0.0.2' # last update: 2022-05-23 + version = '0.0.3' # last update: 2022-08-15 overview_pages_without_a_json_ld = [ "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Religion", @@ -64,6 +59,22 @@ class FobizzSpider(scrapy.Spider, LomBase): "https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Vocational%20School" ] + MAPPING_ABOUT_TO_DISCIPLINE = { + "Handlungsfeld Gesellschaft": "Gesellschaftskunde", + "Lernfeld Gesundheit (LF16)": "Gesundheit", + "Media": "media education", + "Unspecified": "", + } + + MAPPING_EDUCATIONALCONTEXT = { + "Elementary School": "elementary school", # Grundschule + "Lower Grade": "Secondary I", # Unterstufe? Kl. 5-7 (untere Hälfte der Sekundarstufe I) + "Middle Level": "Secondary I", # Mittelstufe? Kl. 7-9 (obere Hälfte der Sekundarstufe I) + "Upper School": "Secondary II", # Oberstufe? + "Vocational School": "vocational school", + "Special School": "special education", + } + def getId(self, response: scrapy.http.Response = None) -> str: return parse.urlparse(response.meta["sitemap_entry"].loc).path @@ -100,6 +111,7 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE data = jslde.extract(response.text)[0] response.meta['sitemap_entry'] = sitemap_entry base = super().getBase(response=response) + base.replace_value('sourceId', response.url) base.add_value("response", super().mapResponse(response).load_item()) # we assume that content is imported. Please use replace_value if you import something different base.add_value('thumbnail', data.get("thumbnailUrl", None)) @@ -110,16 +122,16 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE lom = LomBaseItemloader() general = LomGeneralItemloader(response=response) + additional_keywords = set() general.add_value('title', data.get("name", None)) general.add_value('description', data.get("description", None)) general.add_value("identifier", data.get("identifier", None)) for language in data.get("language", []): general.add_value("language", language) - lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() technical.add_value('format', 'text/html') - technical.add_value('location', sitemap_entry.loc) + technical.add_value('location', response.url) lom.add_value("technical", technical.load_item()) lifecycle = LomLifecycleItemloader() @@ -128,7 +140,6 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE lom.add_value("educational", edu.load_item()) # classification = LomClassificationItemLoader() # lom.add_value("classification", classification.load_item()) - base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) @@ -136,12 +147,43 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE vs.add_value("intendedEndUserRole", audience) for discipline in (d.strip() for d in data.get("about", []).split(",")): - if discipline in about_maps.keys(): - discipline = about_maps[discipline] + if "Other: " in discipline: + # edge-case handling for https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Other + # the discipline field may also hold a (freetext) String beginning with "Other: " + # since these values can't be mapped to disciplines, their information is a suitable keyword candidate + discipline_other: str = discipline.replace("Other: ", "") + if discipline_other: + # making sure that we're not adding empty '' strings + vs.add_value('discipline', discipline_other) # this will work for values like "Other: Spanisch", + # but Strings like "Digitales Gestalten/Gestaltungstechnik" would be lost, since they can't be + # mapped within the pipeline, therefore saving this value as an additional_keyword + additional_keywords.add(discipline_other) + if discipline in self.MAPPING_ABOUT_TO_DISCIPLINE.keys(): + discipline = self.MAPPING_ABOUT_TO_DISCIPLINE[discipline] vs.add_value('discipline', discipline) for lrt in data.get("type", []): vs.add_value('new_lrt', lrt) + vs.add_value('conditionsOfAccess', 'login required') # a login is always required to download the learning + # materials as .pdf files + + for educational_context in (edu_context_candidate.strip() for + edu_context_candidate in data.get("oeh:educationalContext", []).split(",")): + if "Other: " in educational_context: + # edge-case handling for https://plattform.fobizz.com/unterrichtsmaterialien/klassenstufen/Other + # a typical edge-case: "oeh:educationalContext": "Lower Grade, Middle Level, Other: Schach " + educational_context_other: str = educational_context.replace("Other: ", "") + if educational_context_other: + additional_keywords.add(educational_context_other) + elif educational_context in self.MAPPING_EDUCATIONALCONTEXT.keys(): + educational_context = self.MAPPING_EDUCATIONALCONTEXT[educational_context] + vs.add_value('educationalContext', educational_context) + elif educational_context not in self.MAPPING_EDUCATIONALCONTEXT.keys(): + # some educational_context values can't be mapped, but are suitable for keywords, e.g.: + # "Technology", "Personal Education", "Foreign Languages" + additional_keywords.add(educational_context) + vs.add_value('educationalContext', educational_context) + base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() @@ -153,6 +195,11 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE permissions = super().getPermissions(response) + if additional_keywords: + general.add_value("keyword", list(additional_keywords)) + lom.add_value("general", general.load_item()) + base.add_value("lom", lom.load_item()) + base.add_value("permissions", permissions.load_item()) response_loader = ResponseItemLoader() response_loader.add_value('url', response.url) From 1b536d7af61acbfd81d1540b79ec50ee467a365e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 15 Aug 2022 20:39:26 +0200 Subject: [PATCH 146/590] update sample_spider_alternative - by calling the .load_item() methods later we are able to .replace_value() on specific fields like "keyword" up until the last moment -- this became necessary after dealing with several sources that returned messy metadata (e.g. if you want to save "invalid" valuespaces.discipline values to general.keyword (since those values are still holding useful metadata / information, but simply don't belong into the "discipline"-field) --- .../spiders/sample_spider_alternative.py | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 33198f9f..c3ea5b3b 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -103,9 +103,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # e.g.: the unique identifier might be the URL to a material general.add_value('identifier', response.url) # TODO: don't forget to add key-value-pairs for 'title', 'keyword' and 'description'! - # once we've added all available values to the necessary keys in our LomGeneralItemLoader, - # we call the load_item()-method to return a (now filled) LomGeneralItem to the LomBaseItemLoader - lom.add_value('general', general.load_item()) technical = LomTechnicalItemLoader() # TODO: fill "technical"-keys with values for @@ -121,9 +118,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # or replaced with: # technical.replace_value('key', 'value') technical.add_value('format', 'text/html') # e.g. if the learning object is a web-page - technical.add_value('location', response.url) # if the the learning object has a unique URL that's being + technical.add_value('location', response.url) # if the learning object has a unique URL that's being # navigated by the crawler - lom.add_value('technical', technical.load_item()) lifecycle = LomLifecycleItemloader() # TODO: fill "lifecycle"-keys with values for @@ -137,7 +133,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - uuid optional lifecycle.add_value('role', 'author') # supported roles: "author" / "editor" / "publisher" # for available roles mapping, please take a look at converter/es_connector.py - lom.add_value('lifecycle', lifecycle.load_item()) educational = LomEducationalItemLoader() # TODO: fill "educational"-keys with values for @@ -149,7 +144,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - typicalAgeRange optional # - difficulty optional # - typicalLearningTime optional - lom.add_value('educational', educational.load_item()) classification = LomClassificationItemLoader() # TODO: fill "classification"-keys with values for @@ -158,11 +152,9 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - taxonPath optional # - description optional # - keyword optional - lom.add_value('classification', classification.load_item()) # once you've filled "general", "technical", "lifecycle" and "educational" with values, # the LomBaseItem is loaded into the "base"-BaseItemLoader - base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() # for possible values, either consult https://vocabs.openeduhub.de @@ -197,7 +189,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - oer optional # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/oer.ttl) vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) - base.add_value('valuespaces', vs.load_item()) lic = LicenseItemLoader() # TODO: fill "license"-keys with values for @@ -209,7 +200,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - internal optional # - description optional # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) - base.add_value('license', lic.load_item()) # Either fill the PermissionItemLoader manually (not necessary most of the times) permissions = PermissionItemLoader() @@ -222,7 +212,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - mediacenters optional # - autoCreateGroups optional # - autoCreateMediacenters optional - base.add_value('permissions', permissions.load_item()) # Either fill the ResponseItemLoader manually (not necessary most of the time) response_loader = ResponseItemLoader() @@ -237,7 +226,20 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - headers optional # - cookies optional # - har optional - base.add_value('response', response_loader.load_item()) - # once all scrapy.Item are loaded into our "base", we yield the BaseItem by calling the .load_item() method + # once we've added all available values to the necessary keys in our LomGeneralItemLoader, + # we call the load_item()-method to return a (now filled) LomGeneralItem to the LomBaseItemLoader. + # We do the same for every other nested Item within LomBaseItem as well: + lom.add_value('general', general.load_item()) + lom.add_value('technical', technical.load_item()) + lom.add_value('lifecycle', lifecycle.load_item()) + lom.add_value('educational', educational.load_item()) + lom.add_value('classification', classification.load_item()) + # after LomBaseItem is filled with metadata, we build and return it to our BaseItem + base.add_value('lom', lom.load_item()) + base.add_value('license', lic.load_item()) + base.add_value('valuespaces', vs.load_item()) + base.add_value('permissions', permissions.load_item()) + base.add_value('response', response_loader.load_item()) + # once all scrapy.Items are loaded into our "base", we yield the BaseItem by calling the .load_item() method yield base.load_item() From cf4a78b5b76434b72ef7354e54d7cab1b3e040e9 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 16 Aug 2022 13:00:24 +0200 Subject: [PATCH 147/590] materialnetzwerk_spider v0.0.7 - hotfix: discipline "Erdkunde" did not get properly mapped in the pipelines, even though the "Erdkunde"-string is correctly saved to the item -- since values like "Geografie", "Geography" and "Geographie" result in a properly mapped discipline fields, this problem seems to be an edge-case from the SKOS vocab for this specific entry --- the next crawler version should get rid of the (hopefully then unnecessary) discipline_mapping entry --- converter/spiders/materialnetzwerk_spider.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/materialnetzwerk_spider.py b/converter/spiders/materialnetzwerk_spider.py index a7ed1956..d978daf1 100644 --- a/converter/spiders/materialnetzwerk_spider.py +++ b/converter/spiders/materialnetzwerk_spider.py @@ -15,7 +15,7 @@ class MaterialNetzwerkSpider(CrawlSpider, LomBase): name = "materialnetzwerk_spider" friendlyName = "Materialnetzwerk.org" - version = "0.0.6" # last update: 2022-04-24 + version = "0.0.7" # last update: 2022-08-16 start_urls = [ # 'https://editor.mnweg.org/?p=1&materialType=bundle', # this doesn't list any materials since they're loaded dynamically @@ -41,6 +41,9 @@ class MaterialNetzwerkSpider(CrawlSpider, LomBase): 'AES': "Ernährung und Hauswirtschaft", # Ernährung und Hauswirtschaft 'Erdkunde, Gemeinschaftskunde, Geschichte': ['Erdkunde', 'Gesellschaftskunde', 'Sozialkunde', 'Geschichte'], # Gemeinschaftskunde can be either "Gesellschaftskunde" or "Sozialkunde" (depending on the county) + 'Erdkunde': "Geography", # mapping "Erdkunde" shouldn't be necessary, but the Vocab's altLabel for this entry + # needs a bugfix. + # ToDo: remove this workaround/hotfix in v0.0.8 and see if "Erdkunde" gets properly mapped by the pipeline } # debug_disciplines = set() From 2883665341baba6c1a67842c46ab880c58ea0658 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 23 Sep 2022 10:35:56 +0200 Subject: [PATCH 148/590] fix:es_connector obey multiple cookies of first response and re-send them on later responses (loosing sessions in Cluster-Envs otherwise) --- converter/es_connector.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index e5fe4394..2f5524b2 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -563,7 +563,10 @@ def initCookie(): ) isAdmin = json.loads(auth.text)["isAdmin"] if isAdmin: - EduSharing.cookie = auth.headers["SET-COOKIE"].split(";")[0] + cookies = [] + for cookie in auth.headers["SET-COOKIE"].split(","): + cookies.append(cookie.split(";")[0]) + EduSharing.cookie = ";".join(cookies) return auth def initApiClient(self): From 8a040e37eaedff192da16de8515f194067553a72 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 13 Oct 2022 19:03:08 +0200 Subject: [PATCH 149/590] feat:es connector support metadata contributor --- converter/es_connector.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 2f5524b2..295a6abf 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -37,6 +37,8 @@ class EduSharingConstants: "publisher": "ccm:lifecyclecontributer_publisher", "author": "ccm:lifecyclecontributer_author", "editor": "ccm:lifecyclecontributer_editor", + "metadata_creator": "ccm:metadatacontributer_creator", + "metadata_provider": "ccm:metadatacontributer_provider", } From 9b8cbf97b277defbc54d43606dd84e17f3e8807a Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 13 Oct 2022 19:03:29 +0200 Subject: [PATCH 150/590] rpi virtuell: map provider, fix mapping licenses, fix mapping costs --- converter/spiders/rpi_virtuell_spider.py | 65 +++++++++++++----------- 1 file changed, 35 insertions(+), 30 deletions(-) diff --git a/converter/spiders/rpi_virtuell_spider.py b/converter/spiders/rpi_virtuell_spider.py index ff4788a6..d0c20908 100644 --- a/converter/spiders/rpi_virtuell_spider.py +++ b/converter/spiders/rpi_virtuell_spider.py @@ -22,7 +22,7 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): friendlyName = "rpi-virtuell" start_urls = ['https://material.rpi-virtuell.de/wp-json/mymaterial/v1/material/'] - version = "0.0.5" + version = "0.0.6" custom_settings = { 'ROBOTSTXT_OBEY': False, @@ -74,6 +74,15 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): 'Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet': Constants.LICENSE_CC_BY_NC_SA_30, } + mapping_copyright_url = { + '?fwp_lizenz=non-commercial-remixable': Constants.LICENSE_CC_BY_NC_SA_30, + '?fwp_lizenz=non-commercial-copyable': Constants.LICENSE_CC_BY_NC_ND_40, + '?fwp_lizenz=remixable': Constants.LICENSE_CC_BY_SA_40, + '?fwp_verfuegbarkeit=kostenpflichtig': Constants.LICENSE_COPYRIGHT_LAW + # unclear to map to anything + # '?fwp_lizenz=copyable': Constants. + } + mapping_media_types = {'Anforderungssituation': "", 'Arbeitsblatt': "worksheet", 'Audio': "audio", @@ -265,6 +274,7 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) # logging.debug("DEBUG inside get_metadata_from_review_url: response type = ", type(response), # "url =", response.url) + base = BaseItemLoader() base.add_value("sourceId", response.url) date_modified: str = response.xpath('//meta[@property="og:article:modified_time"]/@content').get() @@ -372,36 +382,23 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) license_regex_free_after_signup = re.compile(r'kostenfrei nach Anmeldung') license_regex_with_costs = re.compile(r'kostenpflichtig') - license_description = response.xpath('//div[@class="material-detail-meta-access material-meta"]' - '/div[@class="material-meta-content-entry"]/text()').get() - - if license_description is not None: - license_description = html.unescape(license_description.strip()) - lic.add_value("description", license_description) - - cc_by_nc_nd = license_regex_nc_reuse.search(license_description) - cc_by_nc_sa = license_regex_nc_reuse_and_change.search(license_description) - # if the RegEx search finds something, it returns a match-object. otherwise, by default it returns None - if cc_by_nc_nd is not None: - lic.add_value("url", Constants.LICENSE_CC_BY_NC_ND_40) - if cc_by_nc_sa is not None: - lic.add_value("url", Constants.LICENSE_CC_BY_NC_SA_30) - # if a material is "frei zugänglich", set price to none, but don't override a previously set CC-license - if license_regex_free_access.search(license_description) is not None: - vs.add_value("price", "no") - # only if "frei zugänglich" is the only license-description this will trigger: - # see https://rpi-virtuell.de/nutzungsbedingungen/ (5.) - if license_regex_free_access.match(license_description) is not None: - lic.add_value("url", Constants.LICENSE_CC_BY_SA_40) - if license_regex_with_costs.search(license_description): - lic.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) - vs.add_value("price", "yes") - if license_regex_free_after_signup.search(license_description): - vs.add_value("price", "yes") - vs.add_value("conditionsOfAccess", "login") + for key in self.mapping_copyright_url: + if response.xpath('//a[contains(@href,"' + key + '")]').get(): + lic.add_value("url", self.mapping_copyright_url[key]) + break + + # by default, all materials should be CC_BY_SA - according to the rpi-virtuell ToS + # changed/decided on 2022-10-13: We can't assume that this license is correct and will not set any license + #lic.replace_value("url", Constants.LICENSE_CC_BY_SA_40) + + + if response.xpath('//a[contains(@href,"' + "?fwp_verfuegbarkeit=kostenpflichtig" + '")]').get(): + vs.add_value("price", "yes") else: - # by default, all materials should be CC_BY_SA - according to the rpi-virtuell ToS - lic.replace_value("url", Constants.LICENSE_CC_BY_SA_40) + vs.add_value("price", "no") + if response.xpath('//a[contains(@href,"' + "?fwp_verfuegbarkeit=kostenfrei-nach-anmeldung" + '")]').get(): + vs.add_value("conditionsOfAccess", "login") + authors = list() # the author should end up in LOM lifecycle, but the returned metadata are too messily formatted to parse them # by easy patterns like (first name) + (last name) @@ -420,6 +417,14 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) base.add_value("license", lic.load_item()) + contributor = LomLifecycleItemloader() + publisher = response.xpath('//div[@class="detail-herkunft-organisation"]//a[2]/text()').get() + if publisher: + contributor.add_value("role", "metadata_provider") + contributor.add_value("organization", publisher) + contributor.add_value("url", response.xpath('//div[@class="detail-herkunft-organisation"]/a/@href').get()) + lom.add_value("lifecycle", contributor.load_item()) + permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) From 76de856e16de2d7b2e3e4a43f597db2688ff4702 Mon Sep 17 00:00:00 2001 From: tsimon Date: Thu, 13 Oct 2022 20:05:55 +0200 Subject: [PATCH 151/590] add pyOpenSSL requirement --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 96d13f2c..fa905c25 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,4 +24,5 @@ itemadapter==0.5.0 six==1.16.0 certifi==2021.10.8 urllib3~=1.26.09 -playwright==1.24.1 \ No newline at end of file +playwright==1.24.1 +pyOpenSSL==22.0.0 \ No newline at end of file From c8a5ba38129fe784a902c0649f984c0c595fc865 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 13 Oct 2022 20:21:02 +0200 Subject: [PATCH 152/590] fix:keep dev changes after merge --- converter/spiders/base_classes/lom_base.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 08b12110..cbc15529 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -80,10 +80,7 @@ def hasChanged(self, response=None) -> bool: db = EduSharing().findItem(self.getId(response), self) changed = db is None or db[1] != self.getHash(response) if not changed: - logging.info( - "Item " + str(self.getId(response)) + - "(uuid: " + db[0] + ") has not changed" - ) + logging.info(f"Item {self.getId(response)} (uuid: {db[0]}) has not changed") return changed # you might override this method if you don't want to import specific entries From d79cd5f2458c03f066917e2cf472d464df7d020a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Aug 2022 17:53:27 +0200 Subject: [PATCH 153/590] sodix_spider v0.1.9 - add: oer_filter -- controllable via global variable OER_FILTER (default: False) --- either use spider arguments or the .env file to set the desired behavior - by using the OER-Filter only OER-materials get crawled (all other (copyright/ambiguous) licenses are skipped while crawling) --- converter/spiders/sodix_spider.py | 54 ++++++++++++++++++++++++++++--- 1 file changed, 50 insertions(+), 4 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index a0330ff0..ae7ea7ef 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -1,5 +1,4 @@ import json -from typing import Any import requests import scrapy @@ -8,20 +7,34 @@ from converter.items import * from .base_classes import JSONBase from .base_classes import LomBase -# Spider to fetch RSS from planet schule from .. import env class SodixSpider(scrapy.Spider, LomBase, JSONBase): + """ + Crawler for learning materials from SODIX GraphQL API. + This crawler cannot run without login-data. Please make sure that you have the necessary settings saved + to your .env file: + SODIX_SPIDER_USERNAME="your_username" + SODIX_SPIDER_PASSWORD="your_password" + SODIX_SPIDER_OER_FILTER=True/False + """ name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.8" # last update: 2022-07-11 + version = "0.1.9" # last update: 2022-08-25 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { "ROBOTSTXT_OBEY": False # returns an 401-error anyway, we might as well skip this scrapy.Request } + OER_FILTER = False # flag used for controlling the crawling process between two modes + # - by default (OER_FILTER=False), ALL entries from the GraphQL API are crawled. + # - If OER_FILTER=TRUE, only materials with OER-compatible licenses are crawled (everything else gets skipped) + # control the modes either + # - via spider arguments: "scrapy crawl sodix_spider -a oer_filter=true" + # - or by setting SODIX_SPIDER_OER_FILTER=True in your .env file + NOT_OER_THROWAWAY_COUNTER = 0 # counts the amount of skipped items, in case that the OER-Filter is enabled MAPPING_LRT = { "APP": "application", @@ -89,7 +102,10 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): # DEBUG_SUBJECTS = set() - def __init__(self, **kwargs): + def __init__(self, oer_filter=False, **kwargs): + if oer_filter == "True" or oer_filter == "true": + # scrapy arguments are handled as strings + self.OER_FILTER = True LomBase.__init__(self, **kwargs) def mapResponse(self, response): @@ -224,6 +240,17 @@ def parse_request(self, response): for item in metadata_items: response_copy = response.copy() response_copy.meta["item"] = item + # ToDo: don't handle an entry if the license is not OER-compatible? + # (DropItem exceptions can only be raised from the pipeline) + if self.OER_FILTER is True or env.get_bool('SODIX_SPIDER_OER_FILTER', default=False): + # controlling the OER-Filter via spider arguments is useful for debugging, but we also need + # an easy way to control the spider via the .env file (while running as a Docker container) + if self.license_is_oer(response_copy) is False: + self.NOT_OER_THROWAWAY_COUNTER += 1 + self.logger.info(f"Item dropped due to OER-incompatibility. \n" + f"Total amount of items dropped so far: " + f"{self.NOT_OER_THROWAWAY_COUNTER}") + continue if self.hasChanged(response_copy): yield self.handleEntry(response_copy) # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter @@ -243,6 +270,7 @@ def getBase(self, response) -> BaseItemLoader: base.add_value( "publisher", publisher['title'] ) + # ToDo: use 'source'-field from the GraphQL item for 'origin'? return base def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: @@ -296,6 +324,24 @@ def getLOMTechnical(self, response) -> LomTechnicalItemLoader: ) return technical + def license_is_oer(self, response) -> bool: + """ + Checks if the Item is licensed under an OER-compatible license. + Returns True if license is OER-compatible. (CC-BY/CC-BY-SA/CC0/PublicDomain) + Otherwise returns False. + """ + license_name: str = self.get("license.name", json=response.meta["item"]) + if license_name: + if license_name in self.MAPPING_LICENSE_NAMES: + license_internal_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) + return license_internal_mapped in [ + Constants.LICENSE_CC_BY_30, + Constants.LICENSE_CC_BY_40, + Constants.LICENSE_CC_BY_SA_30, + Constants.LICENSE_CC_BY_SA_40, + Constants.LICENSE_CC_ZERO_10, + Constants.LICENSE_PDM] + def getLicense(self, response) -> LicenseItemLoader: license_loader = LomBase.getLicense(self, response) From c73a6ac133bd6436dac9fa8ced4d8121780dff03 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 4 Oct 2022 17:22:36 +0200 Subject: [PATCH 154/590] cleanup:env --- .env.example | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 .env.example diff --git a/.env.example b/.env.example deleted file mode 100644 index 21080049..00000000 --- a/.env.example +++ /dev/null @@ -1,5 +0,0 @@ -CRAWLER=wirlernenonline_spider -LOG_LEVEL=INFO -EDU_SHARING_BASE_URL=http://host.docker.internal/edu-sharing/ -EDU_SHARING_USERNAME=admin -EDU_SHARING_PASSWORD=Joint#17# \ No newline at end of file From 8da8895d4f4f7fa5cbb5a1d04df6d2e6314cd9a3 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 4 Oct 2022 17:22:54 +0200 Subject: [PATCH 155/590] feat:add config for other mds id's via env file --- converter/.env.example | 3 +++ converter/es_connector.py | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/converter/.env.example b/converter/.env.example index ab02649b..4bc949e7 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -26,6 +26,9 @@ EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" EDU_SHARING_USERNAME = "admin" EDU_SHARING_PASSWORD = "admin" +# Metadataset to be used for generated nodes. You may use "default" to use the default mds of the repository +# EDU_SHARING_METADATASET=mds_oeh + # If set to true, don't upload to (above mentioned) Edu-Sharing instance DRY_RUN = True diff --git a/converter/es_connector.py b/converter/es_connector.py index 295a6abf..02ac59fd 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -402,8 +402,10 @@ def transformItem(self, uuid, spider, item): # educationalContext = Field(output_processor=JoinMultivalues()) # learningResourceType = Field(output_processor=JoinMultivalues()) # sourceContentType = Field(output_processor=JoinMultivalues()) - spaces["cm:edu_metadataset"] = "mds_oeh" - spaces["cm:edu_forcemetadataset"] = "true" + mdsId = env.get("EDU_SHARING_METADATASET", allow_null=True, default="mds_oeh") + if mdsId != "default": + spaces["cm:edu_metadataset"] = mdsId + spaces["cm:edu_forcemetadataset"] = "true" for key in spaces: if type(spaces[key]) is tuple: spaces[key] = list([x for y in spaces[key] for x in y]) From f574f2785a3213c47985db09823a643bb20ed110 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 4 Oct 2022 18:00:19 +0200 Subject: [PATCH 156/590] feat:add custom pipelines for mapping --- converter/.env.example | 5 +++++ converter/pipelines.py | 8 ++++++++ converter/settings.py | 6 ++++++ 3 files changed, 19 insertions(+) diff --git a/converter/.env.example b/converter/.env.example index 4bc949e7..4418fb95 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -32,6 +32,11 @@ EDU_SHARING_PASSWORD = "admin" # If set to true, don't upload to (above mentioned) Edu-Sharing instance DRY_RUN = True +# you can add one or more custom pipelines here to trigger +# the syntax is: pipeline.package.id:PRIORITY[,pipeline.package.id:PRIORITY,...] +# Use this if you e.g. want to do custom property mapping for any crawler before storing the data +# CUSTOM_PIPELINES = "converter.pipelines.ExampleLoggingPipeline:100" + # your youtube api key (required for youtube crawler) YOUTUBE_API_KEY = "" diff --git a/converter/pipelines.py b/converter/pipelines.py index e6e0936e..2f109cae 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -662,3 +662,11 @@ def process_item(self, item, spider): log.info("DRY RUN scraped {}".format(item["response"]["url"])) # self.exporter.export_item(item) return item + + +# example pipeline which simply outputs the item in the log +class ExampleLoggingPipeline(BasicPipeline): + def process_item(self, item, spider): + log.info(item) + # self.exporter.export_item(item) + return item \ No newline at end of file diff --git a/converter/settings.py b/converter/settings.py index 59b4c15c..141cf018 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -120,6 +120,12 @@ ): 1000, } +# add custom pipelines from the .env file, if any +ADDITIONAL_PIPELINES = env.get("CUSTOM_PIPELINES", True) +if ADDITIONAL_PIPELINES: + for pipe in map(lambda p: p.split(":"), ADDITIONAL_PIPELINES.split(",")): + ITEM_PIPELINES[pipe[0]] = int(pipe[1]) + # Enable and configure the AutoThrottle extension (disabled by default) # See https://docs.scrapy.org/en/latest/topics/autothrottle.html AUTOTHROTTLE_ENABLED = False From 70bc4d0fd824b1ce666510d6491ad94205f14386 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 6 Oct 2022 10:42:21 +0200 Subject: [PATCH 157/590] sodix_spider v0.2.0 (WIP) - add: thumbnails are grabbed by priority from different fields with two fallbacks - add: educationalContext mapping and 'schoolType'-fallback - improve: OER-Filter -- if set to True, the 'recordStatus'-parameter is sent during the GraphQL request --- converter/spiders/sodix_spider.py | 59 +++++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index ae7ea7ef..6df976a1 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -22,7 +22,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.1.9" # last update: 2022-08-25 + version = "0.2.0" # last update: 2022-10-06 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -81,6 +81,18 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): "Fort- und Weiterbildung": "Fortbildung" } + MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT = { + "Berufsschule": "Berufliche Bildung", + "Fachoberschule": "Sekundarstufe II", + # "Förderschule": "Förderschule", + "Gesamtschule": "Sekundarstufe I", + "Grundschule": "Primarstufe", + "Gymnasium": "Sekundarstufe II", + "Kindergarten": "Elementarbereich", + "Mittel- / Hauptschule": "Sekundarstufe I", + "Realschule": "Sekundarstufe I" + } + MAPPING_INTENDED_END_USER_ROLE = { "pupils": "learner", } @@ -100,8 +112,6 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): 'keine Angaben (gesetzliche Regelung)': Constants.LICENSE_CUSTOM, } - # DEBUG_SUBJECTS = set() - def __init__(self, oer_filter=False, **kwargs): if oer_filter == "True" or oer_filter == "true": # scrapy arguments are handled as strings @@ -134,12 +144,19 @@ def startRequest(self, page=0): "password": env.get("SODIX_SPIDER_PASSWORD"), } ).json()['access_token'] + if self.OER_FILTER is True: + recordstatus_parameter = ", recordStatus: ACTIVATED" + # by using the recordStatus parameter during the GraphQL query, only a subset of available items is returned + # by the Sodix API: OER-only items carry the recordStatus: ACTIVATED + else: + recordstatus_parameter = "" + # if OER-Filter is off (default), the GraphQL query will return all items (including non-OER materials) return scrapy.Request( url=self.apiUrl, callback=self.parse_request, body=json.dumps({ "query": f"""{{ - findAllMetadata(page: {page}, pageSize: {self.page_size}) {{ + findAllMetadata(page: {page}, pageSize: {self.page_size}{recordstatus_parameter}) {{ id identifier title @@ -263,9 +280,17 @@ def handleEntry(self, response): # thumbnail is always the same, do not use the one from rss def getBase(self, response) -> BaseItemLoader: base = LomBase.getBase(self, response) - base.replace_value( - "thumbnail", self.get("media.thumbPreview", json=response.meta["item"]) - ) + # thumbnail-priority from different fields: + # 1) media.thumbDetails (480x360) 2) media.thumbPreview (256x256) 3) source.imageUrl (480x360) + media_thumb_details = self.get("media.thumbDetails", json=response.meta["item"]) + media_thumb_preview = self.get("media.thumbPreview", json=response.meta["item"]) + source_image_url = self.get("source.imageUrl", json=response.meta["item"]) + if media_thumb_details: + base.replace_value("thumbnail", media_thumb_details) + elif media_thumb_preview: + base.replace_value("thumbnail", media_thumb_preview) + elif source_image_url: + base.replace_value("thumbnail", source_image_url) for publisher in self.get("publishers", json=response.meta["item"]): base.add_value( "publisher", publisher['title'] @@ -440,8 +465,6 @@ def get_subjects(self, response) -> list[Any] | None: # the "subject"-key might exist in the API, but still be of 'None'-value for subject in subjects: subject_name = subject['name'] - # self.DEBUG_SUBJECTS.add(subject_name) - # print(f"Amount of Subjects: {len(self.DEBUG_SUBJECTS)} // SUBJECT SET: \n {self.DEBUG_SUBJECTS}") subject_set.add(subject_name) return list(subject_set) else: @@ -453,13 +476,27 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: if subjects: for subject in subjects: valuespaces.add_value('discipline', subject) - educational_context_list = self.get('educationalLevels', json=response.meta['item']) + # ToDo: use 'schoolTypes'-field as a fallback for educationalLevels -> educationalContext + school_types_list = self.get('schoolTypes', json=response.meta['item']) + educational_context_set = set() if educational_context_list: + # the Sodix field 'educationalLevels' is directly mappable to our 'educationalContext' for potential_edu_context in educational_context_list: if potential_edu_context in self.MAPPING_EDUCONTEXT: potential_edu_context = self.MAPPING_EDUCONTEXT.get(potential_edu_context) - valuespaces.add_value('educationalContext', potential_edu_context) + educational_context_set.add(potential_edu_context) + elif school_types_list: + # if 'educationalLevels' doesn't exist, the fallback is to map the 'schoolTypes'-field + for school_type in school_types_list: + if school_type in self.MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT: + school_type = self.MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT.get(school_type) + educational_context_set.add(school_type) + educational_context_list = list(educational_context_set) + educational_context_list.sort() + if educational_context_list: + valuespaces.add_value("educationalContext", educational_context_list) + target_audience_list = self.get('targetAudience', json=response.meta['item']) if target_audience_list: for target_audience_item in target_audience_list: From f8a2fd2ed7c1964de020e199455fc2bd3fd6c3ea Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 6 Oct 2022 10:45:01 +0200 Subject: [PATCH 158/590] add: LisumPipeline LRT-mapping (WIP, squashed) - add: 'educationalContext'-mapping (WIP) -- mapping from OEH educationalContext to Lisum valuespace keys - add: valuespaces.discipline mapping from eafCode (e.g. "80") to B-B abbreviations ("C-BIO" etc.) - problematic: 10 values from Sodix can't (at least not yet) be properly mapped from valuespaces.learningResourceType to Lisum keys - code cleanup -- improved readability of 'valuespaces.discipline' and 'valuespaces.educationalContext'-mapping by refactoring it --- converter/pipelines.py | 192 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 191 insertions(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 2f109cae..93a96a11 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -30,6 +30,7 @@ from converter import env from converter.constants import * from converter.es_connector import EduSharing +from converter.items import BaseItem from converter.web_tools import WebTools, WebEngine from valuespace_converter.app.valuespaces import Valuespaces @@ -669,4 +670,193 @@ class ExampleLoggingPipeline(BasicPipeline): def process_item(self, item, spider): log.info(item) # self.exporter.export_item(item) - return item \ No newline at end of file + return item + + +class LisumPipeline(BasicPipeline): + DISCIPLINE_TO_LISUM = { + "060": "C-KU", # Bildende Kunst + "080": "C-BIO", # Biologie + "100": "C-CH", # Chemie + "120": "C-DE", # Deutsch + "160": "C-Eth", # Ethik + "200": "C-FS", # Fremdsprachen + "220": "C-GEO", # Geographie, + "240": "C-GE", # Geschichte + "380": "C-MA", # Mathematik + "420": "C-MU", # Musik + "450": "C-Phil", # Philosophie + "460": "C-Ph", # Physik + "480": "C-PB", # Politische Bildung + "510": "C-Psy", # Psychologie + "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde + "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" + "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater + "20001": "C-EN", # Englisch + "20002": "C-FR", # Französisch + "20003": "C-AGR", # Griechisch -> Altgriechisch + "20004": "C-IT", # Italienisch + "20005": "C-La", # Latein + "20006": "C-RU", # Russisch + "28010": "C-SU", # Sachkunde -> Sachunterricht + "32002": "C-Inf", # Informatik + "46014": "C-AS", # Astronomie + "48005": "C-GEWIWI", # Gesellschaftspolitische Gegenwartsfragen -> Gesellschaftswissenschaften + } + + EDUCATIONALCONTEXT_TO_LISUM = { + "elementarbereich": "pre-school", + "grundschule": "primary school", + "sekundarstufe_1": "lower secondary school", + "sekundarstufe_2": "upper secondary school", + "berufliche_bildung": "vocational education", + # "fortbildung": "", # does not exist in Lisum valuespace + "erwachsenenbildung": "continuing education", + "foerderschule": "special education", + # "fernunterricht": "" # does not exist in Lisum valuespace + } + + LRT_OEH_TO_LISUM = { + # ToDo: LRT-values that aren't listed here, can be mapped 1:1 + "audiovisual_medium": ["audio", "video"], + # ToDo: BROSCHUERE? + "data": "", # ToDo + "exploration": "", # ToDo + "case_study": "", # ToDo + "glossary": "reference_book", + "guide": "reference_book", + # ToDo: INTERAKTION + "model": "", # ToDo + "open_activity": "", # ToDo + "broadcast": "audio", + "enquiry_oriented_activity": "", # ToDo + "other": "", # ToDo + "text": "teaching_aids", # teaching_aids = "Arbeitsmaterial" in Lisum mds + "teaching_module": "", # ToDo + "demonstration": "image", # "Veranschaulichung" + "web_page": "portal", + } + + def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy.Item]: + """ + Takes a BaseItem and transforms its metadata-values to Lisum-metadataset-compatible values. + Touches the following fields within the BaseItem: + - valuespaces.discipline + - valuespaces.educationalContext + - valuespaces.intendedEndUserRole + - valuespaces.learningResourceType + """ + base_item_adapter = ItemAdapter(item) + # ToDo: + # - map ValueSpaceItem.discipline from SKOS to ccm:taxonid keys + # - e.g. "Astronomie" (eafCode: 46014) to "C-AS" + # - after the "valuespaces"-mapping, + # a discipline looks like 'http://w3id.org/openeduhub/vocabs/discipline/380' -> eafCode at the end + # from 380 ("Mathematik") map to "C-MA" + # - make sure that discipline.ttl has all possible values, otherwise information loss occurs + # - keep raw list for debugging purposes? + if base_item_adapter.get("valuespaces"): + valuespaces = base_item_adapter.get("valuespaces") + if valuespaces.get("discipline"): + discipline_list = valuespaces.get("discipline") + # a singular entry will look like 'http://w3id.org/openeduhub/vocabs/discipline/380' + # the last part of the URL string equals to a corresponding eafCode + # (see: http://agmud.de/wp-content/uploads/2021/09/eafsys.txt) + discipline_lisum_keys = set() + if discipline_list: + for discipline_w3id in discipline_list: + discipline_eaf_code: str = discipline_w3id.split(sep='/')[-1] + match discipline_eaf_code in self.DISCIPLINE_TO_LISUM: + case True: + discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM.get(discipline_eaf_code)) + case False: + # ToDo: missing Sodix values for mapping to + # - Chinesisch (C-ZH) + # - Deutsche Gebärdensprache (C-DGS) + # - Hebräisch (C-HE) + # - Japanisch (C-JP) + # - Naturwissenschaften (5/6) (= C-NW56) + # - Naturwissenschaften (C-NW) + # - Neu Griechisch (C-EL) + # - Polnisch (C-PL) + # - Portugiesisch (C-PT) + # - Sorbisch/Wendisch (C-SW) + # - Türkisch (C-TR) + # - Wirtschaft-Arbeit-Technik (C-WAT) + pass + case _: + # ToDo: fallback -> if eafCode can't be mapped, save to keywords? + logging.warning(f"Lisum Pipeline failed to map from eafCode {discipline_eaf_code} " + f"to its corresponding ccm:taxonid short-handle") + discipline_lisum_keys = list(discipline_lisum_keys) + discipline_lisum_keys.sort() + logging.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " + f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") + valuespaces["discipline"] = discipline_lisum_keys + + if valuespaces.get("educationalContext"): + # mapping educationalContext values from OEH SKOS to lisum keys + educational_context_list = valuespaces.get("educationalContext") + educational_context_lisum_keys = set() + if educational_context_list: + # making sure that we filter out empty lists [] + # up until this point, every educationalContext entry will be a w3id link, e.g. + # 'http://w3id.org/openeduhub/vocabs/educationalContext/grundschule' + for educational_context_w3id in educational_context_list: + educational_context_w3id_key = educational_context_w3id.split(sep='/')[-1] + match educational_context_w3id_key in self.EDUCATIONALCONTEXT_TO_LISUM: + case True: + educational_context_w3id_key = self.EDUCATIONALCONTEXT_TO_LISUM.get( + educational_context_w3id_key) + educational_context_lisum_keys.add(educational_context_w3id_key) + case _: + logging.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key}" + f"not found in mapping table.") + educational_context_list = list(educational_context_lisum_keys) + educational_context_list.sort() + valuespaces["educationalContext"] = educational_context_list + + if valuespaces.get("intendedEndUserRole"): + intended_end_user_role_list = valuespaces.get("intendedEndUserRole") + intended_end_user_roles = set() + if intended_end_user_role_list: + for item_w3id in intended_end_user_role_list: + item_w3id: str = item_w3id.split(sep='/')[-1] + if item_w3id: + intended_end_user_roles.add(item_w3id) + intended_end_user_role_list = list(intended_end_user_roles) + intended_end_user_role_list.sort() + valuespaces["intendedEndUserRole"] = intended_end_user_role_list + + if valuespaces.get("learningResourceType"): + lrt_list: list = valuespaces.get("learningResourceType") + lrt_temporary_list = list() + if lrt_list: + for lrt_item in lrt_list: + if type(lrt_item) is list: + # some values like "audiovisual" were already mapped to ["audio", "visual"] multivalues + # during transformation from Sodix to OEH + lrt_multivalue = list() + for lrt_string in lrt_item: + lrt_string = lrt_string.split(sep='/')[-1] + if lrt_string in self.LRT_OEH_TO_LISUM: + lrt_string = self.LRT_OEH_TO_LISUM.get(lrt_string) + if lrt_string: + # making sure to exclude ''-strings + lrt_multivalue.append(lrt_string) + lrt_temporary_list.append(lrt_multivalue) + if type(lrt_item) is str: + lrt_w3id: str = lrt_item.split(sep='/')[-1] + if lrt_w3id in self.LRT_OEH_TO_LISUM: + lrt_w3id = self.LRT_OEH_TO_LISUM.get(lrt_w3id) + if lrt_w3id: + # ToDo: workaround + # making sure to exclude '' strings from populating the list + lrt_temporary_list.append(lrt_w3id) + lrt_list = lrt_temporary_list + lrt_list.sort() + valuespaces["learningResourceType"] = lrt_list + pass + # ToDo: learningResourceType + + return item From 1a1c3c594bdf753847e8176cd0eae1943af134f5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 10 Oct 2022 12:41:34 +0200 Subject: [PATCH 159/590] sodix_spider v0.2.0 (WIP, squashed) - add: lifecycle differentiation between publishers and authors -- fix: lifecycle items -- the sodix field 'author' is only used in 'license' (since Sodix doesn't differentiate between persons, projects, agencies or usernames in this "free text"-field) - add: parse()-method to allow program flow control in regard to multiple lifecycle items - Sodix metadata fields are not always returning valid values, therefore additional checks and fallbacks were needed for: -- general.description -- license.description - add: missing metadata fields -- general.language -- lifecycle.author, lifecycle.url --- converter/spiders/sodix_spider.py | 118 +++++++++++++++++++++++++++--- 1 file changed, 108 insertions(+), 10 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 6df976a1..a07a1ba2 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -1,4 +1,5 @@ import json +from typing import Iterator import requests import scrapy @@ -42,13 +43,14 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): "AUDIO": "audio", "AUDIOVISUELLES": "audiovisual medium", "BILD": "image", + "BROSCHUERE": "text", "DATEN": "data", "ENTDECKENDES": "exploration", "EXPERIMENT": "experiment", "FALLSTUDIE": "case_study", "GLOSSAR": "glossary", "HANDBUCH": "guide", - # "INTERAKTION": "", + # "INTERAKTION": "", # ToDo: find a fitting value or leave empty? "KARTE": "map", "KURS": "course", "LERNKONTROLLE": "assessment", @@ -275,7 +277,7 @@ def parse_request(self, response): yield self.startRequest(response.meta["page"] + 1) def handleEntry(self, response): - return LomBase.parse(self, response) + return self.parse(response=response) # thumbnail is always the same, do not use the one from rss def getBase(self, response) -> BaseItemLoader: @@ -298,11 +300,53 @@ def getBase(self, response) -> BaseItemLoader: # ToDo: use 'source'-field from the GraphQL item for 'origin'? return base - def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: + def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader: lifecycle = LomBase.getLOMLifecycle(response) - + # the Sodix 'author'-field returns a wild mix of agencies, persons, usernames and project-names + # which would inevitably lead to bad metadata in this field. It is therefore only used in license.author + author_website = self.get("authorWebsite", json=response.meta["item"]) + if author_website: + lifecycle.add_value('role', 'author') + lifecycle.add_value('url', author_website) return lifecycle + def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleItemloader]: + lifecycle = LomBase.getLOMLifecycle(response) + publishers: list[dict] = self.get("publishers", json=response.meta["item"]) + # Sodix 'publishers'-field is a list of Publishers, therefore we need to iterate through them + if publishers: + for publisher in publishers: + lifecycle.add_value('role', 'publisher') + if "title" in publisher: + publisher_name = publisher.get("title") + if publisher_name: + lifecycle.add_value('organization', publisher_name) + if "id" in publisher: + publisher_sodix_uuid: str = publisher.get("id") + if publisher_sodix_uuid: + lifecycle.add_value('uuid', publisher_sodix_uuid) + if "officialWebsite" in publishers: + publisher_url: str = publisher.get("officialWebsite") + if publisher_url: + lifecycle.add_value('url', publisher_url) + published_time = self.get("publishedTime", json=response.meta["item"]) + creation_date = self.get("creationDate", json=response.meta["item"]) + source: dict = self.get("source", json=response.meta["item"]) + if published_time: + # the 'publishedTime'-field is 95% null or empty, which is why several fallbacks are needed + lifecycle.add_value('date', published_time) + elif creation_date: + lifecycle.add_value('date', creation_date) + elif source: + if "created" in source: + # Sodix field 'source.created' is of type LocalDateTime and available most of the time. Its usage + # and meaning is undocumented, though, which is why we use this field only as the last fallback + # in case the other fields aren't available + created_date = source.get("created") + if created_date: + lifecycle.add_value('date', created_date) + yield lifecycle + def getLOMGeneral(self, response) -> LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.replace_value( @@ -323,10 +367,17 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: if subjects: keywords_cleaned_up.extend(subjects) general.replace_value('keyword', keywords_cleaned_up) - general.add_value( - "description", - self.get("description", json=response.meta["item"]) - ) + if "language" in response.meta["item"]: + languages: list = self.get("language", json=response.meta["item"]) + if languages and isinstance(languages, list): + # Sodix returns empty lists and 'null' occasionally + for language in languages: + general.add_value('language', language) + if "description" in response.meta["item"]: + description: str = self.get("description", json=response.meta["item"]) + if description: + # Sodix sometimes returns the 'description'-field as null + general.add_value("description", description) return general def getLOMTechnical(self, response) -> LomTechnicalItemLoader: @@ -365,6 +416,7 @@ def license_is_oer(self, response) -> bool: Constants.LICENSE_CC_BY_SA_30, Constants.LICENSE_CC_BY_SA_40, Constants.LICENSE_CC_ZERO_10, + # ToDo: confirm if 'public domain' should be included in the OER-filter or not Constants.LICENSE_PDM] def getLicense(self, response) -> LicenseItemLoader: @@ -374,8 +426,13 @@ def getLicense(self, response) -> LicenseItemLoader: if author: license_loader.add_value('author', author) license_description: str = self.get("license.text", json=response.meta["item"]) + additional_license_information: str = self.get("additionalLicenseInformation") + # the Sodix field 'additionalLicenseInformation' is empty 95% of the time, but sometimes it might serve as a + # fallback for the license description if license_description: license_loader.add_value('description', license_description) + elif additional_license_information: + license_loader.add_value('description', additional_license_information) license_name: str = self.get("license.name", json=response.meta["item"]) if license_name: if license_name in self.MAPPING_LICENSE_NAMES: @@ -473,11 +530,12 @@ def get_subjects(self, response) -> list[Any] | None: def getValuespaces(self, response) -> ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) subjects = self.get_subjects(response) + # ToDo: if subjects can't be mapped to SKOS, save them to the keywords field + # - this needs to happen during ValuespacePipeline mapping if subjects: for subject in subjects: valuespaces.add_value('discipline', subject) educational_context_list = self.get('educationalLevels', json=response.meta['item']) - # ToDo: use 'schoolTypes'-field as a fallback for educationalLevels -> educationalContext school_types_list = self.get('schoolTypes', json=response.meta['item']) educational_context_set = set() if educational_context_list: @@ -487,7 +545,7 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: potential_edu_context = self.MAPPING_EDUCONTEXT.get(potential_edu_context) educational_context_set.add(potential_edu_context) elif school_types_list: - # if 'educationalLevels' doesn't exist, the fallback is to map the 'schoolTypes'-field + # if 'educationalLevels' isn't available, fallback to: map 'schoolTypes'-field to 'educationalContext' for school_type in school_types_list: if school_type in self.MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT: school_type = self.MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT.get(school_type) @@ -515,4 +573,44 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: valuespaces.add_value('learningResourceType', potential_lrt) else: pass + # ToDo: Lisum special use-case: use 'ccm:taxonentry' to store eafCodes return valuespaces + + def parse(self, response, **kwargs): + if LomBase.shouldImport(response) is False: + self.logger.debug( + f"Skipping entry {str(self.getId(response))} because shouldImport() returned false" + ) + return None + if self.getId(response) is not None and self.getHash(response) is not None: + if not self.hasChanged(response): + return None + + base = self.getBase(response) + + lom = LomBaseItemloader() + general = self.getLOMGeneral(response) + technical = self.getLOMTechnical(response) + if self.get("author", json=response.meta["item"]): + lifecycle_author = self.get_lom_lifecycle_author(response) + lom.add_value('lifecycle', lifecycle_author.load_item()) + if self.get("publishers", json=response.meta["item"]): + # theoretically, there can be multiple publisher fields per item, but in reality this doesn't occur (yet). + lifecycle_iterator: Iterator[LomLifecycleItemloader] = self.get_lom_lifecycle_publisher(response) + for lifecycle_publisher in lifecycle_iterator: + lom.add_value('lifecycle', lifecycle_publisher.load_item()) + educational = self.getLOMEducational(response) + classification = self.getLOMClassification(response) + + lom.add_value('general', general.load_item()) + lom.add_value('technical', technical.load_item()) + lom.add_value('educational', educational.load_item()) + lom.add_value('classification', classification.load_item()) + base.add_value("lom", lom.load_item()) + + base.add_value("valuespaces", self.getValuespaces(response).load_item()) + base.add_value("license", self.getLicense(response).load_item()) + base.add_value("permissions", self.getPermissions(response).load_item()) + base.add_value("response", self.mapResponse(response).load_item()) + + return base.load_item() From 063090b4d5381fe82c8ca0393167baf288c1d9ae Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 10 Oct 2022 12:45:31 +0200 Subject: [PATCH 160/590] LisumPipeline (WIP) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - add: missing discipline mapping values for "Arbeitslehre", "Türkisch", "Polnisch", "Portugiesisch", "Chinesisch" - ToDos and minor code cleanups --- converter/pipelines.py | 29 +++++++++++------------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 93a96a11..e0539f5d 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -675,6 +675,7 @@ def process_item(self, item, spider): class LisumPipeline(BasicPipeline): DISCIPLINE_TO_LISUM = { + "020": "C-WAT", # Arbeitslehre -> Wirtschaft, Arbeit, Technik "060": "C-KU", # Bildende Kunst "080": "C-BIO", # Biologie "100": "C-CH", # Chemie @@ -698,10 +699,15 @@ class LisumPipeline(BasicPipeline): "20004": "C-IT", # Italienisch "20005": "C-La", # Latein "20006": "C-RU", # Russisch + "20008": "C-TR", # Türkisch + "20011": "C-PL", # Polnisch + "20014": "C-PT", # Portugiesisch + "20041": "C-ZH", # Chinesisch "28010": "C-SU", # Sachkunde -> Sachunterricht "32002": "C-Inf", # Informatik "46014": "C-AS", # Astronomie "48005": "C-GEWIWI", # Gesellschaftspolitische Gegenwartsfragen -> Gesellschaftswissenschaften + "2800506": "C-PL", # Polnisch } EDUCATIONALCONTEXT_TO_LISUM = { @@ -747,14 +753,8 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy - valuespaces.learningResourceType """ base_item_adapter = ItemAdapter(item) - # ToDo: - # - map ValueSpaceItem.discipline from SKOS to ccm:taxonid keys - # - e.g. "Astronomie" (eafCode: 46014) to "C-AS" - # - after the "valuespaces"-mapping, - # a discipline looks like 'http://w3id.org/openeduhub/vocabs/discipline/380' -> eafCode at the end - # from 380 ("Mathematik") map to "C-MA" - # - make sure that discipline.ttl has all possible values, otherwise information loss occurs - # - keep raw list for debugging purposes? + # ToDo: - make sure that discipline.ttl has all possible values, otherwise information loss occurs + # - keep raw list for debugging purposes? if base_item_adapter.get("valuespaces"): valuespaces = base_item_adapter.get("valuespaces") if valuespaces.get("discipline"): @@ -762,6 +762,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # a singular entry will look like 'http://w3id.org/openeduhub/vocabs/discipline/380' # the last part of the URL string equals to a corresponding eafCode # (see: http://agmud.de/wp-content/uploads/2021/09/eafsys.txt) + # this eafCode (key) gets mapped to Lisum specific B-B shorthands like "C-MA" discipline_lisum_keys = set() if discipline_list: for discipline_w3id in discipline_list: @@ -769,21 +770,14 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy match discipline_eaf_code in self.DISCIPLINE_TO_LISUM: case True: discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM.get(discipline_eaf_code)) - case False: - # ToDo: missing Sodix values for mapping to - # - Chinesisch (C-ZH) + # ToDo: there are no Sodix eafCode-values for these Lisum keys: # - Deutsche Gebärdensprache (C-DGS) # - Hebräisch (C-HE) # - Japanisch (C-JP) # - Naturwissenschaften (5/6) (= C-NW56) # - Naturwissenschaften (C-NW) # - Neu Griechisch (C-EL) - # - Polnisch (C-PL) - # - Portugiesisch (C-PT) # - Sorbisch/Wendisch (C-SW) - # - Türkisch (C-TR) - # - Wirtschaft-Arbeit-Technik (C-WAT) - pass case _: # ToDo: fallback -> if eafCode can't be mapped, save to keywords? logging.warning(f"Lisum Pipeline failed to map from eafCode {discipline_eaf_code} " @@ -856,7 +850,6 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy lrt_list = lrt_temporary_list lrt_list.sort() valuespaces["learningResourceType"] = lrt_list - pass - # ToDo: learningResourceType + # ToDo: which fields am I missing? what's next? return item From d3e6e1195bda5bee2b00f8e3e7c5189f57f44d1d Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 10 Oct 2022 18:05:51 +0200 Subject: [PATCH 161/590] fix:es connector map general.aggregationLevel --- converter/es_connector.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 02ac59fd..a328948e 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -293,6 +293,7 @@ def transformItem(self, uuid, spider, item): "cclom:location": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, + "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] if "aggregationLevel" in item["lom"]["general"] else None, "cclom:title": item["lom"]["general"]["title"], } if "notes" in item: From a9e21a99409cc3fcbb031a11e81f1b78c329d0e2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 10 Oct 2022 19:09:28 +0200 Subject: [PATCH 162/590] Remove no longer needed Lisum-Mappings for LRT - since Lisum's learningResourceType will be extended by additional keys, some previously used mappings are no longer needed (in cases where 1:1 mapping from OEH to Lisum is possible, these entries were deleted from the LisumPipeline LRT_OEH_TO_LISUM mapping) --- converter/pipelines.py | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index e0539f5d..b7cc92c5 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -723,24 +723,12 @@ class LisumPipeline(BasicPipeline): } LRT_OEH_TO_LISUM = { - # ToDo: LRT-values that aren't listed here, can be mapped 1:1 + # LRT-values that aren't listed here, can be mapped 1:1 "audiovisual_medium": ["audio", "video"], - # ToDo: BROSCHUERE? - "data": "", # ToDo - "exploration": "", # ToDo - "case_study": "", # ToDo - "glossary": "reference_book", - "guide": "reference_book", # ToDo: INTERAKTION - "model": "", # ToDo - "open_activity": "", # ToDo + "open_activity": "", # exists in 2 out of 60.000 items "broadcast": "audio", - "enquiry_oriented_activity": "", # ToDo - "other": "", # ToDo - "text": "teaching_aids", # teaching_aids = "Arbeitsmaterial" in Lisum mds - "teaching_module": "", # ToDo "demonstration": "image", # "Veranschaulichung" - "web_page": "portal", } def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy.Item]: From 54df70ea637c909fff00f3c6a0cdec20eeba3f42 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 10 Oct 2022 19:16:39 +0200 Subject: [PATCH 163/590] add: aggregationLevel handling and fix lifecycle.url for 'publishers'-field - fix: lifecycle.url (publisher) - add: aggregationLevel handling for "UNTERRICHTSBAUSTEIN"-LRTs -- if an item is a "UNTERRICHTSBAUSTEIN" (Sodix), it will get mapped to 'teaching_module' by our pipeline and, additionally, the crawler sets its 'general.aggregationLevel' to 2 in accordance to LOM-DE --- converter/spiders/sodix_spider.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index a07a1ba2..9153cd0d 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -324,8 +324,9 @@ def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleIte if "id" in publisher: publisher_sodix_uuid: str = publisher.get("id") if publisher_sodix_uuid: + # this uuid is used by Sodix to differentiate publishers lifecycle.add_value('uuid', publisher_sodix_uuid) - if "officialWebsite" in publishers: + if "officialWebsite" in publisher: publisher_url: str = publisher.get("officialWebsite") if publisher_url: lifecycle.add_value('url', publisher_url) @@ -590,6 +591,13 @@ def parse(self, response, **kwargs): lom = LomBaseItemloader() general = self.getLOMGeneral(response) + + # "UNTERRICHTSBAUSTEIN"-Materials need to handled as aggregationLevel = 2 (according to LOM-DE) + potential_lrts = self.get('learnResourceType', json=response.meta['item']) + if potential_lrts: + if "UNTERRICHTSBAUSTEIN" in potential_lrts: + general.add_value('aggregationLevel', 2) + technical = self.getLOMTechnical(response) if self.get("author", json=response.meta["item"]): lifecycle_author = self.get_lom_lifecycle_author(response) From 89b4e8cbf4bc679bff615fdaf4af605b1ea17677 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 11 Oct 2022 09:54:57 +0200 Subject: [PATCH 164/590] feat:custom mappings for arbitrary fields of ccm/cclom --- converter/es_connector.py | 7 +++++++ converter/items.py | 2 ++ 2 files changed, 9 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index a328948e..0e8e3ae5 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -1,4 +1,5 @@ import base64 +import collections import json import logging import time @@ -398,6 +399,12 @@ def transformItem(self, uuid, spider, item): spaces["ccm:educationaltypicalagerange_from"] = tar["fromRange"] if "toRange" in tar: spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] + + # map custom fields directly into the edu-sharing properties + if "custom" in item: + for key in item["custom"]: + spaces[key] = item["custom"][key] + # intendedEndUserRole = Field(output_processor=JoinMultivalues()) # discipline = Field(output_processor=JoinMultivalues()) # educationalContext = Field(output_processor=JoinMultivalues()) diff --git a/converter/items.py b/converter/items.py index 959c16c2..8e5e6362 100644 --- a/converter/items.py +++ b/converter/items.py @@ -194,6 +194,8 @@ class BaseItem(Item): "editorial notes" binary = Field() "binary data which should be uploaded (raw data)" + custom = Field() + "custom data, it can be used by the target transformer to store data in the native format (i.e. ccm/cclom properties in edu-sharing)" screenshot_bytes = Field() # this is a (temporary) field that gets deleted after the thumbnail pipeline processed its byte-data From ec8550308bd10b823e53246769e7e6992c574c63 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 11 Oct 2022 14:27:43 +0200 Subject: [PATCH 165/590] sodix_spider v0.2.1 - add: collecting eafCodes from Sodix to 'base.custom'-field -- first gathers all eafCodes if available -- afterwards saves eafCodes (of subjects and competencies) for to the edu-sharing repository's 'ccm:taxonentry'-field --- converter/spiders/sodix_spider.py | 66 ++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 5 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 9153cd0d..f5113d51 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -11,6 +11,18 @@ from .. import env +def extract_eaf_codes_to_set(eaf_code_list: list[str]) -> set: + """ + This helper method extracts (only valid) entries from a list of strings and returns a set. + """ + temporary_set = set() + for eaf_code in eaf_code_list: + if eaf_code: + # while this might be (theoretically) unnecessary, we're make sure to never grab empty strings + temporary_set.add(eaf_code) + return temporary_set + + class SodixSpider(scrapy.Spider, LomBase, JSONBase): """ Crawler for learning materials from SODIX GraphQL API. @@ -23,7 +35,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.0" # last update: 2022-10-06 + version = "0.2.1" # last update: 2022-10-11 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -297,9 +309,56 @@ def getBase(self, response) -> BaseItemLoader: base.add_value( "publisher", publisher['title'] ) - # ToDo: use 'source'-field from the GraphQL item for 'origin'? + # ToDo: (optional feature) use 'source'-field from the GraphQL item for 'origin'? + self.extract_and_save_eaf_codes_to_custom_field(base, response) return base + def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, response): + """ + Extracts eafCodes as a String from two Sodix API fields ('eafCode', 'competencies.id') and saves them to + 'base.custom' as a dictionary. + (The dictionary-key 'ccm:taxonentry' is (later on) used by es_connector.py to transmit the collected values + into edu-sharing.) + """ + eaf_code_subjects = set() + eaf_code_competencies = set() + eaf_code_subjects_list = self.get("eafCode", json=response.meta["item"]) + # Extracting eafCodes from 'subject.id': + if eaf_code_subjects_list: + eaf_code_subjects: set = extract_eaf_codes_to_set(eaf_code_subjects_list) + # attention: eafCodes from Sodix field 'eafCode' and 'subject.id' carry the same information + eaf_code_competencies_list: list[dict] = self.get("competencies", json=response.meta["item"]) + # eafCodes from Sodix field 'competencies.id' are not listed within the 'eafCode' field, therefore we're + # gathering them separately and merge them with the other collected eafCodes if necessary + if eaf_code_competencies_list: + for competency_item in eaf_code_competencies_list: + if "id" in competency_item: + competency_eaf_code: str = competency_item.get("id") + eaf_code_competencies.add(competency_eaf_code) + # after collecting eafCodes from both Sodix fields, we're merging the sets (if possible) and saving them: + if eaf_code_subjects and eaf_code_competencies: + # subjects and competencies can be independently available from each other. If both fields are available + # in Sodix, we merge the sets and save them to a list + eaf_code_subjects.update(eaf_code_competencies) + eaf_code_combined = list(eaf_code_subjects) + eaf_code_combined.sort() + base.add_value('custom', { + 'ccm:taxonentry': eaf_code_combined + }) + elif eaf_code_subjects or eaf_code_competencies: + if eaf_code_subjects: + eaf_code_subjects_list: list = list(eaf_code_subjects) + eaf_code_subjects_list.sort() + base.add_value('custom', { + 'ccm:taxonentry': eaf_code_subjects_list + }) + if eaf_code_competencies: + eaf_code_competencies_list: list = list(eaf_code_competencies) + eaf_code_competencies_list.sort() + base.add_value('custom', { + 'ccm:taxonentry': eaf_code_competencies_list + }) + def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader: lifecycle = LomBase.getLOMLifecycle(response) # the Sodix 'author'-field returns a wild mix of agencies, persons, usernames and project-names @@ -572,9 +631,6 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: if potential_lrt in self.MAPPING_LRT: potential_lrt = self.MAPPING_LRT.get(potential_lrt) valuespaces.add_value('learningResourceType', potential_lrt) - else: - pass - # ToDo: Lisum special use-case: use 'ccm:taxonentry' to store eafCodes return valuespaces def parse(self, response, **kwargs): From 1fe637b4a4912d8299958d32685de988a6e0fcc4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 11 Oct 2022 15:01:25 +0200 Subject: [PATCH 166/590] docs: stub for future "custom"-field handling - in case that future work on LisumPipeline needs to be done in regard to the "base.custom"-field, documented where it would need to happen --- converter/pipelines.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index b7cc92c5..26e97947 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -743,6 +743,12 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy base_item_adapter = ItemAdapter(item) # ToDo: - make sure that discipline.ttl has all possible values, otherwise information loss occurs # - keep raw list for debugging purposes? + if base_item_adapter.get("custom"): + custom_field = base_item_adapter.get("custom") + # ToDo: handling or extending this field might or might not be necessary in the future + if "ccm:taxonentry" in custom_field: + pass + pass if base_item_adapter.get("valuespaces"): valuespaces = base_item_adapter.get("valuespaces") if valuespaces.get("discipline"): From 603c0c3d9a9763cd54f3b1ebe69c747b26b09feb Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 11 Oct 2022 18:23:47 +0200 Subject: [PATCH 167/590] sodix_spider v0.2.2 / LisumPipeline features - sodix_spider: -- add custom handling for Sodix LRT "INTERAKTION" - LisumPipeline: -- harden the mapping from eafCodes to Lisum keys (taxonid) --- by using the 'custom'-field with its raw list of eafCodes (if available), we have an additional, more rigid way of mapping disciplines to Lisum's keys --- (this was necessary due to the observation of missing "C-GEO"-values during crawler debugging) --- converter/pipelines.py | 38 ++++++++++++++++++++----------- converter/spiders/sodix_spider.py | 4 +++- 2 files changed, 28 insertions(+), 14 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 26e97947..629d4f08 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -725,7 +725,6 @@ class LisumPipeline(BasicPipeline): LRT_OEH_TO_LISUM = { # LRT-values that aren't listed here, can be mapped 1:1 "audiovisual_medium": ["audio", "video"], - # ToDo: INTERAKTION "open_activity": "", # exists in 2 out of 60.000 items "broadcast": "audio", "demonstration": "image", # "Veranschaulichung" @@ -741,14 +740,23 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy - valuespaces.learningResourceType """ base_item_adapter = ItemAdapter(item) - # ToDo: - make sure that discipline.ttl has all possible values, otherwise information loss occurs - # - keep raw list for debugging purposes? + discipline_lisum_keys = set() + sodix_lisum_custom_lrts = set() if base_item_adapter.get("custom"): custom_field = base_item_adapter.get("custom") - # ToDo: handling or extending this field might or might not be necessary in the future if "ccm:taxonentry" in custom_field: - pass - pass + taxon_entries: list = custom_field.get("ccm:taxonentry") + # first round of mapping from (all) Sodix eafCodes to 'ccm:taxonid' + if taxon_entries: + for taxon_entry in taxon_entries: + if taxon_entry in self.DISCIPLINE_TO_LISUM: + discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM.get(taxon_entry)) + if "sodix_lisum_lrt" in custom_field: + # this is necessary for special edge-case values like "INTERAKTION" which have no equivalent in OEH LRT + sodix_lisum_lrt: list = custom_field.get("sodix_lisum_lrt") + for custom_lrt in sodix_lisum_lrt: + sodix_lisum_custom_lrts.add(custom_lrt) + del item["custom"]["sodix_lisum_lrt"] if base_item_adapter.get("valuespaces"): valuespaces = base_item_adapter.get("valuespaces") if valuespaces.get("discipline"): @@ -757,7 +765,6 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # the last part of the URL string equals to a corresponding eafCode # (see: http://agmud.de/wp-content/uploads/2021/09/eafsys.txt) # this eafCode (key) gets mapped to Lisum specific B-B shorthands like "C-MA" - discipline_lisum_keys = set() if discipline_list: for discipline_w3id in discipline_list: discipline_eaf_code: str = discipline_w3id.split(sep='/')[-1] @@ -773,14 +780,12 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # - Neu Griechisch (C-EL) # - Sorbisch/Wendisch (C-SW) case _: - # ToDo: fallback -> if eafCode can't be mapped, save to keywords? + # due to having the 'custom'-field as a (raw) list of all eafCodes, this mainly serves + # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum logging.warning(f"Lisum Pipeline failed to map from eafCode {discipline_eaf_code} " f"to its corresponding ccm:taxonid short-handle") - discipline_lisum_keys = list(discipline_lisum_keys) - discipline_lisum_keys.sort() logging.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") - valuespaces["discipline"] = discipline_lisum_keys if valuespaces.get("educationalContext"): # mapping educationalContext values from OEH SKOS to lisum keys @@ -843,7 +848,14 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy lrt_temporary_list.append(lrt_w3id) lrt_list = lrt_temporary_list lrt_list.sort() - valuespaces["learningResourceType"] = lrt_list - # ToDo: which fields am I missing? what's next? + if sodix_lisum_custom_lrts: + # if there's any Sodix custom LRT values present (e.g. "INTERAKTION"), extend the lrt list + lrt_temporary_list.extend(lrt_list) + # after everything is mapped and sorted, save the list: + valuespaces["learningResourceType"] = lrt_list + if discipline_lisum_keys: + discipline_lisum_keys = list(discipline_lisum_keys) + discipline_lisum_keys.sort() + valuespaces["discipline"] = discipline_lisum_keys return item diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index f5113d51..e2b8abd8 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -35,7 +35,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.1" # last update: 2022-10-11 + version = "0.2.2" # last update: 2022-10-11 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -653,6 +653,8 @@ def parse(self, response, **kwargs): if potential_lrts: if "UNTERRICHTSBAUSTEIN" in potential_lrts: general.add_value('aggregationLevel', 2) + if "INTERAKTION" in potential_lrts: + base.add_value('custom', {'sodix_lisum_lrt': ['interactive_material']}) technical = self.getLOMTechnical(response) if self.get("author", json=response.meta["item"]): From a542b152faa846ec361c56ed78893da2e89d564e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 13 Oct 2022 15:37:36 +0200 Subject: [PATCH 168/590] sodix_spider v0.2.3 - add: base.identifier - code cleanup and docs --- converter/spiders/sodix_spider.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index e2b8abd8..9367a09e 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -35,7 +35,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.2" # last update: 2022-10-11 + version = "0.2.3" # last update: 2022-10-13 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -62,7 +62,6 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): "FALLSTUDIE": "case_study", "GLOSSAR": "glossary", "HANDBUCH": "guide", - # "INTERAKTION": "", # ToDo: find a fitting value or leave empty? "KARTE": "map", "KURS": "course", "LERNKONTROLLE": "assessment", @@ -271,11 +270,11 @@ def parse_request(self, response): for item in metadata_items: response_copy = response.copy() response_copy.meta["item"] = item - # ToDo: don't handle an entry if the license is not OER-compatible? - # (DropItem exceptions can only be raised from the pipeline) if self.OER_FILTER is True or env.get_bool('SODIX_SPIDER_OER_FILTER', default=False): - # controlling the OER-Filter via spider arguments is useful for debugging, but we also need - # an easy way to control the spider via the .env file (while running as a Docker container) + # Since DropItem exceptions can only be raised from within the pipeline, the filtering of items + # that aren't strictly OER-licenses needs to happen here. + # - controlling the OER-Filter via spider arguments is useful for debugging, but we also need + # an easy way to control the spider via the .env file (while running it as a Docker container) if self.license_is_oer(response_copy) is False: self.NOT_OER_THROWAWAY_COUNTER += 1 self.logger.info(f"Item dropped due to OER-incompatibility. \n" @@ -291,7 +290,6 @@ def parse_request(self, response): def handleEntry(self, response): return self.parse(response=response) - # thumbnail is always the same, do not use the one from rss def getBase(self, response) -> BaseItemLoader: base = LomBase.getBase(self, response) # thumbnail-priority from different fields: @@ -438,6 +436,11 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: if description: # Sodix sometimes returns the 'description'-field as null general.add_value("description", description) + sodix_identifier = self.get("identifier", json=response.meta["item"]) + # ToDo: save Sodix 'id'-field to an additional field? + # - also: find out where 'base.identifier' ends up in (which metadata-field is it saved to? -> documentation) + if sodix_identifier: + general.add_value('identifier', sodix_identifier) return general def getLOMTechnical(self, response) -> LomTechnicalItemLoader: @@ -476,7 +479,6 @@ def license_is_oer(self, response) -> bool: Constants.LICENSE_CC_BY_SA_30, Constants.LICENSE_CC_BY_SA_40, Constants.LICENSE_CC_ZERO_10, - # ToDo: confirm if 'public domain' should be included in the OER-filter or not Constants.LICENSE_PDM] def getLicense(self, response) -> LicenseItemLoader: @@ -590,8 +592,6 @@ def get_subjects(self, response) -> list[Any] | None: def getValuespaces(self, response) -> ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) subjects = self.get_subjects(response) - # ToDo: if subjects can't be mapped to SKOS, save them to the keywords field - # - this needs to happen during ValuespacePipeline mapping if subjects: for subject in subjects: valuespaces.add_value('discipline', subject) @@ -625,7 +625,7 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: if self.get('cost', json=response.meta['item']) == "FREE": valuespaces.add_value("price", "no") potential_lrts = self.get('learnResourceType', json=response.meta['item']) - # attention: sodix calls their LRT "learnResourceType" + # attention: sodix calls their LRT "learnResourceType", not "learningResourceType" if potential_lrts: for potential_lrt in potential_lrts: if potential_lrt in self.MAPPING_LRT: From 27b3be3f2944fa4cd92f2b47a90e859a0db9befe Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Oct 2022 11:22:55 +0200 Subject: [PATCH 169/590] add: "general.identifier" mapping to "cclom:general_identifier" --- converter/es_connector.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 0e8e3ae5..92f7ace8 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -296,6 +296,7 @@ def transformItem(self, uuid, spider, item): "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] if "aggregationLevel" in item["lom"]["general"] else None, "cclom:title": item["lom"]["general"]["title"], + "cclom:general_identifier": item["lom"]["general"]["identifier"] } if "notes" in item: spaces["ccm:notes"] = item["notes"] @@ -308,6 +309,9 @@ def transformItem(self, uuid, spider, item): if "description" in item["lom"]["general"]: spaces["cclom:general_description"] = item["lom"]["general"]["description"] + if "identifier" in item["lom"]["general"]: + spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] + if "language" in item["lom"]["general"]: spaces["cclom:general_language"] = item["lom"]["general"]["language"] @@ -319,7 +323,7 @@ def transformItem(self, uuid, spider, item): if "duration" in item["lom"]["technical"]: duration = item["lom"]["technical"]["duration"] try: - # edusharing requries milliseconds + # edusharing requires milliseconds duration = int(float(duration) * 1000) except: pass From 5f67af26ae29d40435bd5db242f9d6ce29e0fa97 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Oct 2022 13:17:09 +0200 Subject: [PATCH 170/590] sodix_spider v0.2.4 fix for multiple 'general.identifier' values - sodix_spider v0.2.4 (fix for multiple 'identifier'-values) -- the Sodix API carries TWO distinct identifiers for their objects -- both are saved, if available, as a list of strings to 'general.identifier' --- these identifier values might be necessary to recognize duplicates later on in the edu-sharing repository --- converter/items.py | 2 +- converter/spiders/sodix_spider.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/converter/items.py b/converter/items.py index 8e5e6362..3a873a67 100644 --- a/converter/items.py +++ b/converter/items.py @@ -32,7 +32,7 @@ class MutlilangItem(Item): class LomGeneralItem(Item): - identifier = Field() + identifier = Field(output_processor=JoinMultivalues()) title = Field() language = Field() keyword = Field(output_processor=JoinMultivalues()) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 9367a09e..4bbc1ccf 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -35,7 +35,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.3" # last update: 2022-10-13 + version = "0.2.4" # last update: 2022-10-14 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -436,11 +436,18 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: if description: # Sodix sometimes returns the 'description'-field as null general.add_value("description", description) - sodix_identifier = self.get("identifier", json=response.meta["item"]) - # ToDo: save Sodix 'id'-field to an additional field? - # - also: find out where 'base.identifier' ends up in (which metadata-field is it saved to? -> documentation) + + # Sodix has TWO distinct identifiers (uuids) for their objects: + # the Sodix field 'identifier' carries a prefix, e.g. "SODIX-", "BY-" etc. + # the Sodix field 'id' is an uuid without further explanation + # If both are available, they're saved as a [String] to 'cclom:general_identifier' (this might be necessary to + # identify duplicates later in edu-sharing) + sodix_identifier: str = self.get("identifier", json=response.meta["item"]) if sodix_identifier: general.add_value('identifier', sodix_identifier) + sodix_id: str = self.get("id", json=response.meta["item"]) + if sodix_id: + general.add_value('identifier', sodix_id) return general def getLOMTechnical(self, response) -> LomTechnicalItemLoader: From 3e42c3d7cdc005104c478b9413fa0b5175dc243d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Oct 2022 18:45:47 +0200 Subject: [PATCH 171/590] sodix_spider v0.2.5 - feat: get_lom_lifecycle_metadata_provider()-method: -- implements a LomLifeCycleItemLoader for metadata_provider values from Sodix 'source'-field (to edu-sharing 'ccm:metadatacontributer_provider') - add: last_modified date (LocalDateTime from Sodix "updated"-field) - fix: after mapping license names to their URLs, the public domain license (PDM 1.0) was accidentally written to 'license.internal' instead of 'license.url' --- converter/spiders/sodix_spider.py | 42 ++++++++++++++++++++++++++----- 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 4bbc1ccf..811ccb69 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -35,7 +35,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.4" # last update: 2022-10-14 + version = "0.2.5" # last update: 2022-10-20 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -307,6 +307,9 @@ def getBase(self, response) -> BaseItemLoader: base.add_value( "publisher", publisher['title'] ) + last_modified = self.get("updated", json=response.meta["item"]) + if last_modified: + base.add_value('lastModified', last_modified) # ToDo: (optional feature) use 'source'-field from the GraphQL item for 'origin'? self.extract_and_save_eaf_codes_to_custom_field(base, response) return base @@ -405,6 +408,29 @@ def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleIte lifecycle.add_value('date', created_date) yield lifecycle + def get_lom_lifecycle_metadata_provider(self, response=None) -> LomLifecycleItemloader: + """ + Collects metadata from Sodix 'source'-field with the purpose of saving it to edu-sharing's + 'ccm:metadatacontributer_provider'-field. + """ + lifecycle = LomBase.getLOMLifecycle(response) + source: dict = self.get('source', json=response.meta["item"]) + if source: + lifecycle.add_value('role', 'metadata_provider') + # all 'source'-subfields are of Type: String + if source.get('id'): + lifecycle.add_value('uuid', source.get('id')) + if source.get('name'): + lifecycle.add_value('organization', source.get('name')) + if source.get('created'): + # LocalDateTime within the String, e.g.: "2022-10-17T11:42:49.198" + lifecycle.add_value('date', source.get('created')) + # ToDo: Sodix 'source.edited'-field also carries a LocalDateTime, but we currently can't make a distinction + # between lifecycle metadata_provider dates (e.g. between a creationDate <-> lastModified) + if source.get('website'): + lifecycle.add_value('url', source.get('website')) + return lifecycle + def getLOMGeneral(self, response) -> LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.replace_value( @@ -505,14 +531,15 @@ def getLicense(self, response) -> LicenseItemLoader: license_name: str = self.get("license.name", json=response.meta["item"]) if license_name: if license_name in self.MAPPING_LICENSE_NAMES: - license_internal_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) + license_mapped_url = self.MAPPING_LICENSE_NAMES.get(license_name) + # if mapping was successful, license_mapped_url contains a license URL if license_name.startswith("CC"): - # ToDo: for CC-licenses the actual URL is more precise than our 'internal' license mapping - # (you will see differences between the 'internal' value and the actual URL from the API, + # for CC-licenses the actual URL is more precise than our 'internal' license mapping + # (you would see differences between the 'internal' value and the actual URL from the API, # e.g. a license pointing to v3.0 and v4.0 at the same time) pass else: - license_loader.add_value('internal', license_internal_mapped) + license_loader.add_value('url', license_mapped_url) if not license_description: # "name"-fields with the "Copyright, freier Zugang"-value don't have "text"-fields, therefore # we're carrying over the custom description, just in case @@ -632,7 +659,7 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: if self.get('cost', json=response.meta['item']) == "FREE": valuespaces.add_value("price", "no") potential_lrts = self.get('learnResourceType', json=response.meta['item']) - # attention: sodix calls their LRT "learnResourceType", not "learningResourceType" + # attention: Sodix calls their LRT "learnResourceType", not "learningResourceType" if potential_lrts: for potential_lrt in potential_lrts: if potential_lrt in self.MAPPING_LRT: @@ -672,6 +699,9 @@ def parse(self, response, **kwargs): lifecycle_iterator: Iterator[LomLifecycleItemloader] = self.get_lom_lifecycle_publisher(response) for lifecycle_publisher in lifecycle_iterator: lom.add_value('lifecycle', lifecycle_publisher.load_item()) + if self.get("source", json=response.meta["item"]): + lifecycle_metadata_provider = self.get_lom_lifecycle_metadata_provider(response) + lom.add_value('lifecycle', lifecycle_metadata_provider.load_item()) educational = self.getLOMEducational(response) classification = self.getLOMClassification(response) From 9d0ed5e5a0a9b89390a93c9856ac133fea0c08ed Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 17 Oct 2022 13:43:28 +0200 Subject: [PATCH 172/590] add: LisumPipeline Mapping for discipline 'Spanisch' --- converter/pipelines.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 629d4f08..2d7d1ffc 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -691,6 +691,7 @@ class LisumPipeline(BasicPipeline): "480": "C-PB", # Politische Bildung "510": "C-Psy", # Psychologie "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde + # ToDo: 560 -> "C-NW56-3-8" ? (Sexualerziehung) "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater "20001": "C-EN", # Englisch @@ -699,6 +700,7 @@ class LisumPipeline(BasicPipeline): "20004": "C-IT", # Italienisch "20005": "C-La", # Latein "20006": "C-RU", # Russisch + "20007": "C-ES", # Spanisch "20008": "C-TR", # Türkisch "20011": "C-PL", # Polnisch "20014": "C-PT", # Portugiesisch From 3ed8e16dab243abdc68a1be99da22c777609166a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 17 Oct 2022 14:28:25 +0200 Subject: [PATCH 173/590] add: License Mappings for v2.0 and v2.5 license URLs from Sodix - while working on sodix_spider and the sodix GraphQL API it was observed that Sodix serves some of its learning content with v2.0 and v2.5 URLs of CreativeCommons licenses (which were previously not available in the constants.py) --- converter/constants.py | 42 +++++++++++++++++++++++++------ converter/spiders/sodix_spider.py | 2 +- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index aa8ff04d..836c54ac 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -2,50 +2,78 @@ class Constants: + LICENSE_CC_BY_20: Final[str] = "https://creativecommons.org/licenses/by/2.0/" + LICENSE_CC_BY_25: Final[str] = "https://creativecommons.org/licenses/by/2.5/" LICENSE_CC_BY_30: Final[str] = "https://creativecommons.org/licenses/by/3.0/" LICENSE_CC_BY_40: Final[str] = "https://creativecommons.org/licenses/by/4.0/" LICENSE_CC_BY_NC_30: Final[str] = "https://creativecommons.org/licenses/by-nc/3.0/" LICENSE_CC_BY_NC_40: Final[str] = "https://creativecommons.org/licenses/by-nc/4.0/" + LICENSE_CC_BY_NC_ND_20: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/2.0/" LICENSE_CC_BY_NC_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/3.0/" LICENSE_CC_BY_NC_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/4.0/" + LICENSE_CC_BY_NC_SA_20: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/2.0/" + LICENSE_CC_BY_NC_SA_25: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/2.5/" LICENSE_CC_BY_NC_SA_30: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/3.0/" LICENSE_CC_BY_NC_SA_40: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/4.0/" + LICENSE_CC_BY_ND_20: Final[str] = "https://creativecommons.org/licenses/by-nd/2.0/" LICENSE_CC_BY_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nd/3.0/" LICENSE_CC_BY_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nd/4.0/" + LICENSE_CC_BY_SA_20: Final[str] = "https://creativecommons.org/licenses/by-sa/2.0/" + LICENSE_CC_BY_SA_25: Final[str] = "https://creativecommons.org/licenses/by-sa/2.5/" LICENSE_CC_BY_SA_30: Final[str] = "https://creativecommons.org/licenses/by-sa/3.0/" LICENSE_CC_BY_SA_40: Final[str] = "https://creativecommons.org/licenses/by-sa/4.0/" LICENSE_CC_ZERO_10: Final[str] = "https://creativecommons.org/publicdomain/zero/1.0/" LICENSE_PDM: Final[str] = "https://creativecommons.org/publicdomain/mark/1.0/" VALID_LICENSE_URLS: list[str | Any] = [ + LICENSE_CC_BY_20, + LICENSE_CC_BY_25, LICENSE_CC_BY_30, LICENSE_CC_BY_40, LICENSE_CC_BY_NC_30, LICENSE_CC_BY_NC_40, + LICENSE_CC_BY_NC_ND_20, LICENSE_CC_BY_NC_ND_30, LICENSE_CC_BY_NC_ND_40, + LICENSE_CC_BY_NC_SA_20, + LICENSE_CC_BY_NC_SA_25, LICENSE_CC_BY_NC_SA_30, LICENSE_CC_BY_NC_SA_40, + LICENSE_CC_BY_ND_20, LICENSE_CC_BY_ND_30, LICENSE_CC_BY_ND_40, + LICENSE_CC_BY_SA_20, + LICENSE_CC_BY_SA_25, LICENSE_CC_BY_SA_30, LICENSE_CC_BY_SA_40, LICENSE_CC_ZERO_10, LICENSE_PDM, ] LICENSE_MAPPINGS: dict[str, str] = { - "https://creativecommons.org/licenses/by/": LICENSE_CC_BY_40, # ToDo: outdated approximation? - # ToDo: - CC_BY_NC (3.0 + 4.0) + "https://creativecommons.org/licenses/by/2.0/": LICENSE_CC_BY_20, + "https://creativecommons.org/licenses/by/2.5/": LICENSE_CC_BY_25, + "https://creativecommons.org/licenses/by/3.0/": LICENSE_CC_BY_30, + "https://creativecommons.org/licenses/by/4.0/": LICENSE_CC_BY_40, + "https://creativecommons.org/licenses/by-nc/3.0/": LICENSE_CC_BY_NC_30, + "https://creativecommons.org/licenses/by-nc/4.0/": LICENSE_CC_BY_NC_40, + "https://creativecommons.org/licenses/by-nc-nd/2.0/": LICENSE_CC_BY_NC_ND_20, "https://creativecommons.org/licenses/by-nc-nd/3.0/": LICENSE_CC_BY_NC_ND_30, "https://creativecommons.org/licenses/by-nc-nd/4.0/": LICENSE_CC_BY_NC_ND_40, - # ToDo: - # - CC_BY_NC_SA (3.0 + 4.0) - # - CC_BY_ND (3.0 + 4.0) - # - CC_BY_SA (3.0) - "https://creativecommons.org/licenses/by-sa/": LICENSE_CC_BY_SA_40, # Todo: outdated approximation? + "https://creativecommons.org/licenses/by-nc-sa/2.0/": LICENSE_CC_BY_NC_SA_20, + "https://creativecommons.org/licenses/by-nc-sa/2.5/": LICENSE_CC_BY_NC_SA_25, + "https://creativecommons.org/licenses/by-nc-sa/3.0/": LICENSE_CC_BY_NC_SA_30, + "https://creativecommons.org/licenses/by-nc-sa/4.0/": LICENSE_CC_BY_NC_SA_40, + "https://creativecommons.org/licenses/by-nd/2.0/": LICENSE_CC_BY_ND_20, + "https://creativecommons.org/licenses/by-nd/3.0/": LICENSE_CC_BY_ND_30, + "https://creativecommons.org/licenses/by-nd/4.0/": LICENSE_CC_BY_ND_40, + "https://creativecommons.org/licenses/by-sa/2.0/": LICENSE_CC_BY_SA_20, + "https://creativecommons.org/licenses/by-sa/2.5/": LICENSE_CC_BY_SA_25, + "https://creativecommons.org/licenses/by-sa/3.0/": LICENSE_CC_BY_SA_30, + "https://creativecommons.org/licenses/by-sa/4.0/": LICENSE_CC_BY_SA_40, # wrong mapping (currently from edu-sharing) "https://creativecommons.org/publicdomain/zero/": LICENSE_CC_ZERO_10, "https://creativecommons.org/licenses/pdm/": LICENSE_PDM, + "https://creativecommons.org/publicdomain/mark/1.0/": LICENSE_PDM, } LICENSE_MAPPINGS_INTERNAL: dict[str, list[str]] = { "CC_0": [LICENSE_CC_ZERO_10], diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 811ccb69..7c784646 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -546,6 +546,7 @@ def getLicense(self, response) -> LicenseItemLoader: license_loader.replace_value('description', license_name) license_url: str = self.get("license.url", json=response.meta["item"]) + # possible license URL values returned by the Sodix API: # license_urls_sorted = ['https://creativecommons.org/licenses/by-nc-nd/2.0/de/', # 'https://creativecommons.org/licenses/by-nc-nd/3.0/de/', # 'https://creativecommons.org/licenses/by-nc-nd/3.0/deed.de', @@ -576,7 +577,6 @@ def getLicense(self, response) -> LicenseItemLoader: # 'https://creativecommons.org/licenses/by/4.0/', # 'https://creativecommons.org/publicdomain/mark/1.0/deed.de', # 'https://creativecommons.org/publicdomain/zero/1.0/deed.de'] - # ToDo: our constants.py doesn't have entries for v2.0 or 2.5 values of CC licenses if license_url: # making sure to only handle valid license urls, since the API result can be NoneType or empty string ('') if license_url.endswith("deed.de"): From 2245dad8d0369621632b88472fe3faa6da75c7f3 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 20 Oct 2022 08:28:08 +0200 Subject: [PATCH 174/590] feat:option to control if permissions are transfered to edu-sharing --- converter/.env.example | 3 +++ converter/es_connector.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/converter/.env.example b/converter/.env.example index 4418fb95..3753cf93 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -26,6 +26,9 @@ EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" EDU_SHARING_USERNAME = "admin" EDU_SHARING_PASSWORD = "admin" +# Configure if permissions of edu-sharing nodes are handled by the crawler (default true) +# You may want to set this to false if you don't want to apply permissions from crawlers or have a custom implementation in the repository +# EDU_SHARING_PERMISSION_CONTROL=true # Metadataset to be used for generated nodes. You may use "default" to use the default mds of the repository # EDU_SHARING_METADATASET=mds_oeh diff --git a/converter/es_connector.py b/converter/es_connector.py index 92f7ace8..e2193dfd 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -471,6 +471,9 @@ def createGroupsIfNotExists(self, groups, type: CreateGroupType): EduSharing.groupCache.append(result["authorityName"]) def setNodePermissions(self, uuid, item): + if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) == False: + logging.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") + return if "permissions" in item: permissions = { "inherited": True, # let inherited = true to add additional permissions via edu-sharing From c5426d2a85c1dd05812763e70ff5fc2ba8d311a8 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 20 Oct 2022 16:39:49 +0200 Subject: [PATCH 175/590] fix:es_connector fetch bulk api version --- converter/es_connector.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index e2193dfd..0bb51daf 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -15,6 +15,7 @@ from converter import env from converter.constants import Constants +from edu_sharing_client import ABOUTApi from edu_sharing_client.api.bulk_v1_api import BULKV1Api from edu_sharing_client.api.iam_v1_api import IAMV1Api from edu_sharing_client.api.mediacenter_v1_api import MEDIACENTERV1Api @@ -95,7 +96,9 @@ class CreateGroupType(Enum): cookie: str = None resetVersion: bool = False + version: any apiClient: ESApiClient + aboutApi: ABOUTApi bulkApi: BULKV1Api iamApi: IAMV1Api mediacenterApi: MEDIACENTERV1Api @@ -610,10 +613,20 @@ def initApiClient(self): header_name="Accept", header_value="application/json", ) + EduSharing.aboutApi = ABOUTApi(EduSharing.apiClient) EduSharing.bulkApi = BULKV1Api(EduSharing.apiClient) EduSharing.iamApi = IAMV1Api(EduSharing.apiClient) EduSharing.mediacenterApi = MEDIACENTERV1Api(EduSharing.apiClient) EduSharing.nodeApi = NODEV1Api(EduSharing.apiClient) + about = EduSharing.aboutApi.about() + EduSharing.version = list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0]["version"] + version_str = str(EduSharing.version["major"]) + "." + str(EduSharing.version["minor"]) + if EduSharing.version["major"] != 1 or EduSharing.version["minor"] < 0 or EduSharing.version["minor"] > 1: + raise Exception( + f"Given repository api version is unsupported: " + version_str + ) + else: + logging.info("Detected edu-sharing bulk api with version " + version_str) EduSharing.groupCache = list( map( lambda x: x["authorityName"], From ae851203f08c156506e9fd8efc5dca93816fb65f Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 20 Oct 2022 18:49:54 +0200 Subject: [PATCH 176/590] feat:keep raw valuespace + send it to edu-sharing (if api supports it) --- converter/es_connector.py | 6 ++++++ converter/items.py | 3 +++ converter/pipelines.py | 1 + 3 files changed, 10 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 0bb51daf..a49872fa 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -400,6 +400,12 @@ def transformItem(self, uuid, spider, item): } for key in item["valuespaces"]: spaces[valuespaceMapping[key]] = item["valuespaces"][key] + # add raw values if the api supports it + if EduSharing.version["major"] >= 1 and EduSharing.version["minor"] >= 1: + for key in item["valuespaces_raw"]: + splitted = valuespaceMapping[key].split(":") + splitted[0] = "virtual" + spaces[":".join(splitted)] = item["valuespaces_raw"][key] if "typicalAgeRange" in item["lom"]["educational"]: tar = item["lom"]["educational"]["typicalAgeRange"] if "fromRange" in tar: diff --git a/converter/items.py b/converter/items.py index 3a873a67..14455f84 100644 --- a/converter/items.py +++ b/converter/items.py @@ -186,6 +186,9 @@ class BaseItem(Item): lastModified = Field() lom = Field(serializer=LomBaseItem) valuespaces = Field(serializer=ValuespaceItem) + "all items which are based on (skos) based valuespaces. The ProcessValuespacePipeline will automatically convert items inside here" + valuespaces_raw = Field(serializer=ValuespaceItem) + "this item is only used by the ProcessValuespacePipeline and holds the ""raw"" data which were given to the valuespaces. Please do not use it inside crawlers" permissions = Field(serializer=PermissionItem) "permissions (access rights) for this entry" license = Field(serializer=LicenseItem) diff --git a/converter/pipelines.py b/converter/pipelines.py index 2d7d1ffc..d7b9eee5 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -269,6 +269,7 @@ def __init__(self): def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) json = item["valuespaces"] + item["valuespaces_raw"] = dict(json) delete = [] for key in json: # remap to new i18n layout From fc129c6b24826471d8f4eb4bf7feed95a33fd925 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Oct 2022 20:36:10 +0200 Subject: [PATCH 177/590] sodix_spider v0.2.6 - refactor: 'valuespaces.discipline' is now using eafCodes to set individual disciplines instead of the previous 'subject.name'-string - fix: 'sodix_lisum_lrt'-key (within 'base.custom') is only ever saved if the LisumPipeline is activated -- since LisumPipeline automatically deletes the key after handling the 'sodix_lisum_lrt' edge-case, this workaround should prevent the crawler from unintentionally creating new DB fields --- converter/spiders/sodix_spider.py | 42 ++++++++++++++++++------------- 1 file changed, 25 insertions(+), 17 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 7c784646..645ff276 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -35,7 +35,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.5" # last update: 2022-10-20 + version = "0.2.6" # last update: 2022-10-20 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -447,9 +447,11 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: # we're only adding valid keywords, none of the empty (whitespace) strings keywords_cleaned_up.append(individual_keyword) general.add_value('keyword', individual_keyword) - subjects = self.get_subjects(response) + subjects = self.get_subject_dictionary(response) if subjects: - keywords_cleaned_up.extend(subjects) + subject_names = list(subjects.values()) + subject_names.sort() + keywords_cleaned_up.extend(subject_names) general.replace_value('keyword', keywords_cleaned_up) if "language" in response.meta["item"]: languages: list = self.get("language", json=response.meta["item"]) @@ -607,28 +609,34 @@ def getLOMEducational(self, response=None) -> LomEducationalItemLoader: educational.add_value("typicalAgeRange", tar.load_item()) return educational - def get_subjects(self, response) -> list[Any] | None: - # there are (currently) 837 unique subjects across all 50.697 Items, which are suitable to be used as additional - # keyword values. - subject_set = set() + def get_subject_dictionary(self, response) -> dict[str, str] | None: + """ + Parses the Sodix API field 'subject' and returns a dictionary consisting of: + Sodix 'subject.id' (= the eafCode of a "Schulfach") and its human-readable counterpart + Sodix 'subject.name' as its value. + """ + subject_dictionary = dict() if "subject" in response.meta['item'] is not None: # the "subject"-field does not exist in every item returned by the sodix API - subjects = self.get('subject', json=response.meta['item']) - if subjects: + subjects_list: list = self.get('subject', json=response.meta['item']) + if subjects_list: # the "subject"-key might exist in the API, but still be of 'None'-value - for subject in subjects: - subject_name = subject['name'] - subject_set.add(subject_name) - return list(subject_set) + for subject in subjects_list: + subject_name: str = subject['name'] + subject_id: str = subject['id'] + subject_dictionary.update({subject_id: subject_name}) + return subject_dictionary else: return None def getValuespaces(self, response) -> ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) - subjects = self.get_subjects(response) + subjects = self.get_subject_dictionary(response) if subjects: - for subject in subjects: - valuespaces.add_value('discipline', subject) + subject_ids = list(subjects.keys()) + if subject_ids: + subject_ids.sort() + valuespaces.add_value('discipline', subject_ids) educational_context_list = self.get('educationalLevels', json=response.meta['item']) school_types_list = self.get('schoolTypes', json=response.meta['item']) educational_context_set = set() @@ -687,7 +695,7 @@ def parse(self, response, **kwargs): if potential_lrts: if "UNTERRICHTSBAUSTEIN" in potential_lrts: general.add_value('aggregationLevel', 2) - if "INTERAKTION" in potential_lrts: + if "INTERAKTION" in potential_lrts and "LisumPipeline" in env.get(key='CUSTOM_PIPELINES'): base.add_value('custom', {'sodix_lisum_lrt': ['interactive_material']}) technical = self.getLOMTechnical(response) From 3151ab0d1e09b253a1a29bc12f3ed162d98b83d1 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 21 Oct 2022 12:45:24 +0200 Subject: [PATCH 178/590] fix:es connector skip group cache if permission handlingg is disabled --- converter/es_connector.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index a49872fa..69729712 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -633,16 +633,17 @@ def initApiClient(self): ) else: logging.info("Detected edu-sharing bulk api with version " + version_str) - EduSharing.groupCache = list( - map( - lambda x: x["authorityName"], - EduSharing.iamApi.search_groups( - EduSharingConstants.HOME, "", max_items=1000000 - )["groups"], + if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) == False: + EduSharing.groupCache = list( + map( + lambda x: x["authorityName"], + EduSharing.iamApi.search_groups( + EduSharingConstants.HOME, "", max_items=1000000 + )["groups"], + ) ) - ) - logging.debug("Built up edu-sharing group cache: {}".format(EduSharing.groupCache)) - return + logging.debug("Built up edu-sharing group cache: {}".format(EduSharing.groupCache)) + return logging.warning(auth.text) raise Exception( "Could not authentify as admin at edu-sharing. Please check your settings for repository " From b5d07bd16c306b8da731fecb205d58db2809032c Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 21 Oct 2022 13:01:20 +0200 Subject: [PATCH 179/590] fix:es connector skip group cache if permission handlingg is disabled --- converter/es_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 69729712..ee12edd6 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -633,7 +633,7 @@ def initApiClient(self): ) else: logging.info("Detected edu-sharing bulk api with version " + version_str) - if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) == False: + if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) == True: EduSharing.groupCache = list( map( lambda x: x["authorityName"], From c07c884fd0d74bdb04680e8af34c38087a7bd493 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 21 Oct 2022 13:13:25 +0200 Subject: [PATCH 180/590] fix:es connector skip group cache if permission handling is disabled --- converter/es_connector.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index ee12edd6..b81a3e27 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -644,6 +644,8 @@ def initApiClient(self): ) logging.debug("Built up edu-sharing group cache: {}".format(EduSharing.groupCache)) return + else: + return logging.warning(auth.text) raise Exception( "Could not authentify as admin at edu-sharing. Please check your settings for repository " From 205d07c172eb0c519a85dd189bc69e3762fbdf96 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 21 Oct 2022 14:10:48 +0200 Subject: [PATCH 181/590] sodix_spider v0.2.7 - feat: 'origin'-subfolders depending on which Sodix 'source' an object is originally coming from -- this makes navigating the edu-sharing "workspace"-view more pleasant because a full Sodix crawl would yield >60.000 items in a single spider directory otherwise - fix: controlling the OER-Filter via .env-File -- if the OER-Filter was activated via the .env-File only, the spider's GraphQL Request wouldn't activate the recordStatus parameter -- documented how spider arguments are treated by Scrapy and made sure that the proper GraphQL parameter is sent during the initial request - fix: get_lom_lifecycle_author()-method caused unintended behaviour in the edu-sharing 'author'-field (author-symbols without any descriptions/text) -- due to Sodix "author" and "authorWebsite" not always being available at the same time --- workaround: only save author-information to 'lifecycle'-Item if both fields are available - fix: remove 'base.publisher'-field from crawler (ToDo: needs to be removed from the items.py) --- converter/spiders/sodix_spider.py | 42 +++++++++++++++++++++---------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 645ff276..b7795f82 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -9,6 +9,7 @@ from .base_classes import JSONBase from .base_classes import LomBase from .. import env +from ..items import LomLifecycleItemloader def extract_eaf_codes_to_set(eaf_code_list: list[str]) -> set: @@ -35,7 +36,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.6" # last update: 2022-10-20 + version = "0.2.7" # last update: 2022-10-21 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -125,9 +126,10 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): 'keine Angaben (gesetzliche Regelung)': Constants.LICENSE_CUSTOM, } - def __init__(self, oer_filter=False, **kwargs): - if oer_filter == "True" or oer_filter == "true": - # scrapy arguments are handled as strings + def __init__(self, oer_filter: str = "False", **kwargs): + if oer_filter.lower() == "true" or env.get_bool(key='SODIX_SPIDER_OER_FILTER') is True: + # Scrapy arguments are always handled as Strings, even if you try to set a boolean + # see: https://docs.scrapy.org/en/latest/topics/spiders.html#spider-arguments self.OER_FILTER = True LomBase.__init__(self, **kwargs) @@ -303,14 +305,20 @@ def getBase(self, response) -> BaseItemLoader: base.replace_value("thumbnail", media_thumb_preview) elif source_image_url: base.replace_value("thumbnail", source_image_url) - for publisher in self.get("publishers", json=response.meta["item"]): - base.add_value( - "publisher", publisher['title'] - ) + # for publisher in self.get("publishers", json=response.meta["item"]): + # base.add_value( + # "publisher", publisher['title'] + # ) + # ToDo: the 'publisher'-field in BaseItem will be removed in the future last_modified = self.get("updated", json=response.meta["item"]) if last_modified: base.add_value('lastModified', last_modified) # ToDo: (optional feature) use 'source'-field from the GraphQL item for 'origin'? + source_id: str = self.get("source.id", json=response.meta["item"]) + # ToDo: the crawler can't write description text to subfolder names yet + # 'source.name' or 'source.description' could be used here to make the subfolders more human-readable + if source_id: + base.add_value('origin', source_id) self.extract_and_save_eaf_codes_to_custom_field(base, response) return base @@ -360,15 +368,22 @@ def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, respo 'ccm:taxonentry': eaf_code_competencies_list }) - def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader: + def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader | None: lifecycle = LomBase.getLOMLifecycle(response) # the Sodix 'author'-field returns a wild mix of agencies, persons, usernames and project-names - # which would inevitably lead to bad metadata in this field. It is therefore only used in license.author + # therfore all author-strings from Sodix are treated as "organization"-values + author = self.get("author", json=response.meta["item"]) author_website = self.get("authorWebsite", json=response.meta["item"]) - if author_website: + if author and author_website: + # edge-case: Some Sodix Items can have a "authorWebsite", but no valid "author"-value (e.g. null). + # saving only the authorWebsite would lead to an empty author-symbol in the edu-sharing workspace view, + # which is why the current workaround is to only save this field if BOTH values are available and valid. lifecycle.add_value('role', 'author') + lifecycle.add_value('organization', author) lifecycle.add_value('url', author_website) - return lifecycle + return lifecycle + else: + return None def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleItemloader]: lifecycle = LomBase.getLOMLifecycle(response) @@ -701,7 +716,8 @@ def parse(self, response, **kwargs): technical = self.getLOMTechnical(response) if self.get("author", json=response.meta["item"]): lifecycle_author = self.get_lom_lifecycle_author(response) - lom.add_value('lifecycle', lifecycle_author.load_item()) + if lifecycle_author: + lom.add_value('lifecycle', lifecycle_author.load_item()) if self.get("publishers", json=response.meta["item"]): # theoretically, there can be multiple publisher fields per item, but in reality this doesn't occur (yet). lifecycle_iterator: Iterator[LomLifecycleItemloader] = self.get_lom_lifecycle_publisher(response) From c60573dd1ad19a391881597b06e9f39ed86be3bb Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 21 Oct 2022 14:35:00 +0200 Subject: [PATCH 182/590] update: license mappings for Sodix values --- converter/es_connector.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index b81a3e27..aec11b5a 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -221,6 +221,13 @@ def setNodePreview(self, uuid, item) -> bool: def mapLicense(self, spaces, license): if "url" in license: match license["url"]: + # ToDo: refactor this ungodly method asap + case Constants.LICENSE_CC_BY_20: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "2.0" + case Constants.LICENSE_CC_BY_25: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "2.5" case Constants.LICENSE_CC_BY_30: spaces["ccm:commonlicense_key"] = "CC_BY" spaces["ccm:commonlicense_cc_version"] = "3.0" @@ -233,24 +240,42 @@ def mapLicense(self, spaces, license): case Constants.LICENSE_CC_BY_NC_40: spaces["ccm:commonlicense_key"] = "CC_BY_NC" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_ND_20: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" + spaces["ccm:commonlicense_cc_version"] = "20" case Constants.LICENSE_CC_BY_NC_ND_30: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" spaces["ccm:commonlicense_cc_version"] = "3.0" case Constants.LICENSE_CC_BY_NC_ND_40: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_SA_20: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "2.0" + case Constants.LICENSE_CC_BY_NC_SA_25: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "2.5" case Constants.LICENSE_CC_BY_NC_SA_30: spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" spaces["ccm:commonlicense_cc_version"] = "3.0" case Constants.LICENSE_CC_BY_NC_SA_40: spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_ND_20: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "2.0" case Constants.LICENSE_CC_BY_ND_30: spaces["ccm:commonlicense_key"] = "CC_BY_ND" spaces["ccm:commonlicense_cc_version"] = "3.0" case Constants.LICENSE_CC_BY_ND_40: spaces["ccm:commonlicense_key"] = "CC_BY_ND" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_SA_20: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "2.0" + case Constants.LICENSE_CC_BY_SA_25: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "2.5" case Constants.LICENSE_CC_BY_SA_30: spaces["ccm:commonlicense_key"] = "CC_BY_SA" spaces["ccm:commonlicense_cc_version"] = "3.0" From e3bf092664eee1da765496739e984f98fead2776 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 21 Oct 2022 16:26:25 +0200 Subject: [PATCH 183/590] fix: error when checking for activated LisumPipeline - this edge-case happened when encountering Sodix LRT "INTERAKTION" --- converter/spiders/sodix_spider.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index b7795f82..456c951a 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -710,8 +710,9 @@ def parse(self, response, **kwargs): if potential_lrts: if "UNTERRICHTSBAUSTEIN" in potential_lrts: general.add_value('aggregationLevel', 2) - if "INTERAKTION" in potential_lrts and "LisumPipeline" in env.get(key='CUSTOM_PIPELINES'): - base.add_value('custom', {'sodix_lisum_lrt': ['interactive_material']}) + if "INTERAKTION" in potential_lrts and env.get(key='CUSTOM_PIPELINES', allow_null=True) is not None: + if "LisumPipeline" in env.get(key='CUSTOM_PIPELINES', allow_null=True): + base.add_value('custom', {'sodix_lisum_lrt': 'interactive_material'}) technical = self.getLOMTechnical(response) if self.get("author", json=response.meta["item"]): From d31600d048e2d9ab9e648f8186b274006f5b78b6 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 21 Oct 2022 16:49:45 +0200 Subject: [PATCH 184/590] feat:keep "status" info of data entries and transfer it to edu-sharing --- converter/es_connector.py | 2 ++ converter/items.py | 2 ++ converter/spiders/sodix_spider.py | 1 + 3 files changed, 5 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index aec11b5a..51c33edc 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -328,6 +328,8 @@ def transformItem(self, uuid, spider, item): } if "notes" in item: spaces["ccm:notes"] = item["notes"] + if "status" in item: + spaces["ccm:editorial_state"] = item["status"] if "origin" in item: spaces["ccm:replicationsourceorigin"] = item[ "origin" diff --git a/converter/items.py b/converter/items.py index 14455f84..dea1330c 100644 --- a/converter/items.py +++ b/converter/items.py @@ -195,6 +195,8 @@ class BaseItem(Item): publisher = Field() notes = Field() "editorial notes" + status = Field() + "status information of a given node, i.e. activated or deactivated" binary = Field() "binary data which should be uploaded (raw data)" custom = Field() diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 456c951a..afcf1528 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -310,6 +310,7 @@ def getBase(self, response) -> BaseItemLoader: # "publisher", publisher['title'] # ) # ToDo: the 'publisher'-field in BaseItem will be removed in the future + base.add_value("status", self.get("recordStatus", json=response.meta["item"])) last_modified = self.get("updated", json=response.meta["item"]) if last_modified: base.add_value('lastModified', last_modified) From 2bdce3e7fae2588184d66d301d85a1cfa634c2a0 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 24 Oct 2022 09:26:34 +0200 Subject: [PATCH 185/590] fix:default pipeline lisum --- converter/spiders/sodix_spider.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index afcf1528..13dc5caf 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -11,6 +11,18 @@ from .. import env from ..items import LomLifecycleItemloader +import csv +import json + +# Opening JSON file +f = open('results.json') +# returns JSON object as +# a dictionary +data = json.load(f) +with open('mycsvfile.csv', 'w') as f: # You will need 'wb' mode in Python 2.x + w = csv.DictWriter(f, data.keys()) + w.writeheader() + w.writerow(data) def extract_eaf_codes_to_set(eaf_code_list: list[str]) -> set: """ @@ -313,6 +325,7 @@ def getBase(self, response) -> BaseItemLoader: base.add_value("status", self.get("recordStatus", json=response.meta["item"])) last_modified = self.get("updated", json=response.meta["item"]) if last_modified: + base.add_value('lastModified', last_modified) # ToDo: (optional feature) use 'source'-field from the GraphQL item for 'origin'? source_id: str = self.get("source.id", json=response.meta["item"]) @@ -712,7 +725,8 @@ def parse(self, response, **kwargs): if "UNTERRICHTSBAUSTEIN" in potential_lrts: general.add_value('aggregationLevel', 2) if "INTERAKTION" in potential_lrts and env.get(key='CUSTOM_PIPELINES', allow_null=True) is not None: - if "LisumPipeline" in env.get(key='CUSTOM_PIPELINES', allow_null=True): + # TODO: Do such logic in a pipeline, not in the crawler! + if "LisumPipeline" in env.get(key='CUSTOM_PIPELINES', allow_null=True, default=None): base.add_value('custom', {'sodix_lisum_lrt': 'interactive_material'}) technical = self.getLOMTechnical(response) From 2b4c927d541399a7d8a9bb61760935c9f3ee1161 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 24 Oct 2022 12:31:23 +0200 Subject: [PATCH 186/590] fix: sodix-lisum custom LRT "INTERAKTION" - moved handling of this LRT edge-case to LisumPipeline by accessing 'valuespaces_raw' - code cleanup --- converter/pipelines.py | 32 ++++++++++++++++++++++--------- converter/spiders/sodix_spider.py | 22 ++------------------- 2 files changed, 25 insertions(+), 29 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index d7b9eee5..0b164758 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -754,12 +754,6 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy for taxon_entry in taxon_entries: if taxon_entry in self.DISCIPLINE_TO_LISUM: discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM.get(taxon_entry)) - if "sodix_lisum_lrt" in custom_field: - # this is necessary for special edge-case values like "INTERAKTION" which have no equivalent in OEH LRT - sodix_lisum_lrt: list = custom_field.get("sodix_lisum_lrt") - for custom_lrt in sodix_lisum_lrt: - sodix_lisum_custom_lrts.add(custom_lrt) - del item["custom"]["sodix_lisum_lrt"] if base_item_adapter.get("valuespaces"): valuespaces = base_item_adapter.get("valuespaces") if valuespaces.get("discipline"): @@ -851,12 +845,32 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy lrt_temporary_list.append(lrt_w3id) lrt_list = lrt_temporary_list lrt_list.sort() - if sodix_lisum_custom_lrts: - # if there's any Sodix custom LRT values present (e.g. "INTERAKTION"), extend the lrt list - lrt_temporary_list.extend(lrt_list) # after everything is mapped and sorted, save the list: valuespaces["learningResourceType"] = lrt_list + # Mapping from valuespaces_raw["learningResourceType"]: "INTERAKTION" -> "interactive_material" + # (edge-cases like "INTERAKTION" don't exist in the OEH 'learningResourceType'-vocab, therfore wouldn't be + # available in valuespaces) + if base_item_adapter.get("valuespaces_raw"): + vs_raw: dict = base_item_adapter.get("valuespaces_raw") + if "learningResourceType" in vs_raw: + raw_lrt: list = vs_raw.get("learningResourceType") + for raw_lrt_item in raw_lrt: + if raw_lrt_item == "INTERAKTION": + sodix_lisum_custom_lrts.add("interactive_material") + if sodix_lisum_custom_lrts: + # if there's any Sodix custom LRT values present (e.g. "INTERAKTION"): + if valuespaces.get("learningResourceType"): + # extending the LRT-list if it was already available + lrt_list: list = valuespaces.get("learningResourceType") + lrt_list.extend(sodix_lisum_custom_lrts) + valuespaces["learningResourceType"] = lrt_list + else: + # since most of the time there will be no LRT field available (if "INTERAKTION" is the only + # LRT value, it needs to be created) + lrt_list = list(sodix_lisum_custom_lrts) + valuespaces["learningResourceType"] = lrt_list + if discipline_lisum_keys: discipline_lisum_keys = list(discipline_lisum_keys) discipline_lisum_keys.sort() diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 13dc5caf..c13a10a5 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -11,18 +11,6 @@ from .. import env from ..items import LomLifecycleItemloader -import csv -import json - -# Opening JSON file -f = open('results.json') -# returns JSON object as -# a dictionary -data = json.load(f) -with open('mycsvfile.csv', 'w') as f: # You will need 'wb' mode in Python 2.x - w = csv.DictWriter(f, data.keys()) - w.writeheader() - w.writerow(data) def extract_eaf_codes_to_set(eaf_code_list: list[str]) -> set: """ @@ -48,7 +36,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.7" # last update: 2022-10-21 + version = "0.2.7" # last update: 2022-10-24 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -325,9 +313,7 @@ def getBase(self, response) -> BaseItemLoader: base.add_value("status", self.get("recordStatus", json=response.meta["item"])) last_modified = self.get("updated", json=response.meta["item"]) if last_modified: - base.add_value('lastModified', last_modified) - # ToDo: (optional feature) use 'source'-field from the GraphQL item for 'origin'? source_id: str = self.get("source.id", json=response.meta["item"]) # ToDo: the crawler can't write description text to subfolder names yet # 'source.name' or 'source.description' could be used here to make the subfolders more human-readable @@ -701,7 +687,7 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: for potential_lrt in potential_lrts: if potential_lrt in self.MAPPING_LRT: potential_lrt = self.MAPPING_LRT.get(potential_lrt) - valuespaces.add_value('learningResourceType', potential_lrt) + valuespaces.add_value('learningResourceType', potential_lrt) return valuespaces def parse(self, response, **kwargs): @@ -724,10 +710,6 @@ def parse(self, response, **kwargs): if potential_lrts: if "UNTERRICHTSBAUSTEIN" in potential_lrts: general.add_value('aggregationLevel', 2) - if "INTERAKTION" in potential_lrts and env.get(key='CUSTOM_PIPELINES', allow_null=True) is not None: - # TODO: Do such logic in a pipeline, not in the crawler! - if "LisumPipeline" in env.get(key='CUSTOM_PIPELINES', allow_null=True, default=None): - base.add_value('custom', {'sodix_lisum_lrt': 'interactive_material'}) technical = self.getLOMTechnical(response) if self.get("author", json=response.meta["item"]): From c51ff620a7de41c42d9ec0d15c4f28869a067e51 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 25 Oct 2022 18:09:16 +0200 Subject: [PATCH 187/590] fix:sodix spider prevent duplicate, identical locations --- converter/spiders/sodix_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index c13a10a5..374babec 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -500,7 +500,7 @@ def getLOMTechnical(self, response) -> LomTechnicalItemLoader: "location", self.getUri(response) ) original = self.get("media.originalUrl", json=response.meta["item"]) - if original: + if original and self.getUri(response) != original: technical.add_value( "location", original ) From 8c174824c1ce2810ee57ab64f9f8be8a2ca0bb96 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Tue, 25 Oct 2022 18:17:09 +0200 Subject: [PATCH 188/590] fix:sodix spider map copyright licenses into proper internal field --- converter/spiders/sodix_spider.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 374babec..96dd6aa1 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -556,7 +556,10 @@ def getLicense(self, response) -> LicenseItemLoader: # e.g. a license pointing to v3.0 and v4.0 at the same time) pass else: - license_loader.add_value('url', license_mapped_url) + if license_mapped_url in [Constants.LICENSE_COPYRIGHT_LAW]: + license_loader.add_value('internal', license_mapped_url) + else: + license_loader.add_value('url', license_mapped_url) if not license_description: # "name"-fields with the "Copyright, freier Zugang"-value don't have "text"-fields, therefore # we're carrying over the custom description, just in case From b9c9c0db51708b5fff3d4c57c53284b05e09bec1 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 26 Oct 2022 09:49:12 +0200 Subject: [PATCH 189/590] fix:docker support kw args + use entrypoint for startup --- Dockerfile | 5 +++-- docker-compose.yml | 2 ++ entrypoint.sh | 9 +++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) create mode 100755 entrypoint.sh diff --git a/Dockerfile b/Dockerfile index 579af2a5..c553d8a7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,10 @@ FROM python:3.10.0-slim-buster -ENV CRAWLER wirlernenonline_spider +# ENV CRAWLER wirlernenonline_spider WORKDIR / +COPY entrypoint.sh entrypoint.sh COPY requirements.txt requirements.txt COPY scrapy.cfg scrapy.cfg COPY setup.cfg setup.cfg @@ -14,4 +15,4 @@ COPY valuespace_converter/ valuespace_converter/ RUN pip3 install -r requirements.txt -CMD scrapy crawl "$CRAWLER" +ENTRYPOINT ["/entrypoint.sh"] diff --git a/docker-compose.yml b/docker-compose.yml index 6d2c4792..8827408a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -43,6 +43,8 @@ services: - "PLAYWRIGHT_WS_ENDPOINT=ws://headless_chrome:3000" - "SPLASH_URL=http://splash:8050" - "CRAWLER=${CRAWLER}" + # optional keyword args, e.g. cleanrun=true + - "ARGS=${ARGS}" - "DRY_RUN=False" - "LOG_LEVEL=${LOG_LEVEL:-INFO}" - "EDU_SHARING_BASE_URL=${EDU_SHARING_BASE_URL}" diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 00000000..9be1348f --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,9 @@ +#!/bin/sh + +if [ -z "$ARGS" ] +then + scrapy crawl "$CRAWLER" +else + scrapy crawl -a "$ARGS" "$CRAWLER" +fi + From 8585407d8072feb5a68329a2b43a0cdc7faaa9b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Wed, 26 Oct 2022 11:49:07 +0200 Subject: [PATCH 190/590] fix: sodix_spider init no longer requires .env key - default is now set to False for 'SODIX_SPIDER_OER_FILTER' --- converter/spiders/sodix_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 96dd6aa1..6fbca3a7 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -127,7 +127,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): } def __init__(self, oer_filter: str = "False", **kwargs): - if oer_filter.lower() == "true" or env.get_bool(key='SODIX_SPIDER_OER_FILTER') is True: + if oer_filter.lower() == "true" or env.get_bool(key='SODIX_SPIDER_OER_FILTER', default=False) is True: # Scrapy arguments are always handled as Strings, even if you try to set a boolean # see: https://docs.scrapy.org/en/latest/topics/spiders.html#spider-arguments self.OER_FILTER = True From c72d5b66fe51bea63a9d3c79c8d714cc94414764 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 7 Nov 2022 09:42:45 +0100 Subject: [PATCH 191/590] fix:es connector nlp if identifier not set --- converter/es_connector.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 51c33edc..a45d0973 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -323,9 +323,10 @@ def transformItem(self, uuid, spider, item): if "location" in item["lom"]["technical"] else None, "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] if "aggregationLevel" in item["lom"]["general"] else None, - "cclom:title": item["lom"]["general"]["title"], - "cclom:general_identifier": item["lom"]["general"]["identifier"] + "cclom:title": item["lom"]["general"]["title"] } + if "identifier" in item["lom"]["general"]: + spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] if "notes" in item: spaces["ccm:notes"] = item["notes"] if "status" in item: From 162d34bd3743466fc77499beb7dc81aa8d561491 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 7 Nov 2022 09:42:58 +0100 Subject: [PATCH 192/590] fix:oeh spider not working with new edu api --- converter/spiders/base_classes/edu_sharing_base.py | 11 +++++++++-- converter/spiders/oeh_spider.py | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 56ed6796..1e1259ca 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -15,6 +15,8 @@ class EduSharingBase(Spider, LomBase): friendlyName = "Edu-Sharing repository spider" # the location of the edu-sharing rest api apiUrl = "http://localhost/edu-sharing/rest/" + searchUrl = "search/v1/queriesV2/-home-/" + searchToken = "*" # the mds to use for the search request mdsId = "-default-" @@ -24,7 +26,7 @@ def __init__(self, **kwargs): def buildUrl(self, offset=0): return ( self.apiUrl - + "search/v1/queriesV2/-home-/" + + self.searchUrl + self.mdsId + "/ngsearch?contentType=FILES&maxItems=100&skipCount=" + str(offset) @@ -32,9 +34,14 @@ def buildUrl(self, offset=0): ) def search(self, offset=0): + criteria = [] + if "queriesV2" in self.searchUrl: + criteria = [({"property": "ngsearchword", "values": [self.searchToken]} )] return JsonRequest( url=self.buildUrl(offset), - data={"criterias": [{"property": "ngsearchword", "values": ["*"]}]}, + data={ + ("criterias" if "queriesV2" in self.searchUrl else "criteria"): criteria + }, callback=self.parse, ) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 9479046a..159d7e2b 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -9,6 +9,7 @@ class OEHSpider(EduSharingBase): friendlyName = "Open Edu Hub" url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" + searchUrl = "search/v1/queries/-home-/" version = "0.1.1" mdsId = "mds_oeh" importWhitelist: [str] = None @@ -21,7 +22,6 @@ def __init__(self, **kwargs): def getBase(self, response): base = EduSharingBase.getBase(self, response) - base.replace_value("type", self.getProperty("ccm:objecttype", response)) return base From ec4a7613e8caeab63e8a88c209e12053eb689b24 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 14 Sep 2022 16:43:32 +0200 Subject: [PATCH 193/590] =?UTF-8?q?remove:=20"Sch=C3=BClerhilfe"=20from=20?= =?UTF-8?q?YouTube=20crawling?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - due to a decision made on 2021-09-23 (for context: check the "Fachredaktionsleitungskonferenz"-protocol) the YT-channel "Schülerhilfe" will no longer be crawled by the youtube_spider -- this decision was brought up and confirmed on 2022-09-14 ("Inhaltekonferenz") --- csv/youtube.csv | 1 - 1 file changed, 1 deletion(-) diff --git a/csv/youtube.csv b/csv/youtube.csv index 5cd28b13..38f4ad5c 100755 --- a/csv/youtube.csv +++ b/csv/youtube.csv @@ -35,7 +35,6 @@ https://www.youtube.com/channel/UCNOsl2b57wNgN7l13TOzNJQ/featured,Jule Sommersbe https://www.youtube.com/channel/UCV9AcuxBK-W3ejgsDm35W1w/feed,seguGeschichte,video,240,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,18,de,, https://www.youtube.com/channel/UCJWn5X9X50U0kcNCgBou7EA/featured,YoungBusinessSchool,video,380; 04003; 700,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,18,de,, https://www.youtube.com/channel/UCHCip3cKoCIkpfeYDV7-aLg/featured,Lernkiste - Mathematik,video,380,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,18,de,, -https://www.youtube.com/channel/UCpdCp55K5WwVYiRRKyxKoWQ/featured,Schülerhilfe,video,380; 120; 20001,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,18,de,, https://www.youtube.com/channel/UCy0FxMgGUlRnkxCoNZUNRQQ/featured,Lehrerschmidt ,video,380,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,18,de,, https://www.youtube.com/channel/UCPVQdTsc9O0VM41vAhdlVLw/featured,ObachtMathe,video,380,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,18,de,, https://www.youtube.com/channel/UCO7ksLK-11tVxalfPKf7BTQ/featured,Primartorium,video,380,Primarstufe; Sekundarstufe 1,learner; teacher,6,13,de,, From 9b00eb3b223cc47f38baeeff8dc2b79b8dfdc1d1 Mon Sep 17 00:00:00 2001 From: Criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 9 Nov 2022 12:13:05 +0100 Subject: [PATCH 194/590] Lehrer-Online (v0.0.5) Themenportale + fix 'sourceContentType' (#59) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * lehreronline_spider v0.0.5 - add: distinction between "Lehrer-Online Themenportale" -- Lehrer-Online implemented a feature in their API that returns the "Themenportal"-URL within the "quelle_homepage_url"-field --- by using this value, we can prefix the 'origin'-folder-name to make it easier to check only this specific subset of Lehrer-Online materials in later steps of the "Rohdatenprüfung" - fix: "valuespaces.sourceContentType" is no longer set by the crawler * remove: 'sourceContentType' from crawlers - since 'sourceContentType' is meant for sources, not individually crawled materials, crawlers should not set this field -- by setting 'sourceContentType' the individual items would be recognized as a "source" in edu-sharing, this was never intentional - version-bump all crawlers that (accidentally) set this field in past versions --- converter/items.py | 2 ++ converter/spiders/grundschulkoenig_spider.py | 3 +-- converter/spiders/lehreronline_spider.py | 20 ++++++++++++++++--- .../spiders/sample_spider_alternative.py | 2 -- converter/spiders/science_in_school_spider.py | 3 +-- converter/spiders/serlo_spider.py | 4 +--- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/converter/items.py b/converter/items.py index dea1330c..d92164c1 100644 --- a/converter/items.py +++ b/converter/items.py @@ -130,6 +130,8 @@ class ValuespaceItem(Item): learningResourceType = Field(output_processor=JoinMultivalues()) new_lrt = Field(output_processor=JoinMultivalues()) sourceContentType = Field(output_processor=JoinMultivalues()) + # ToDo: sourceContentType is no longer used in edu-sharing + # DO NOT SET this field in crawlers for individual materials! toolCategory = Field(output_processor=JoinMultivalues()) conditionsOfAccess = Field(output_processor=JoinMultivalues()) diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index 9b6de2dc..d7d9e1c5 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -19,7 +19,7 @@ class GrundSchulKoenigSpider(CrawlSpider, LomBase): start_urls = ['https://www.grundschulkoenig.de/sitemap.xml?sitemap=pages&cHash=b8e1a6633393d69093d0ebe93a3d2616'] name = 'grundschulkoenig_spider' - version = "0.0.6" # last update: 2022-08-03 + version = "0.0.7" # last update: 2022-08-26 custom_settings = { "ROBOTSTXT_OBEY": False, # while there is no robots.txt, there is a 404-forward-page that gets misinterpreted by Scrapy @@ -178,7 +178,6 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry vs.add_value('discipline', "Religionsunterricht") vs.add_value('discipline', 'Allgemein') vs.add_value('educationalContext', 'Primarstufe') - vs.add_value('sourceContentType', "Unterrichtsmaterial- und Aufgaben-Sammlung") # vs.add_value('learningResourceType', 'other_asset_type') # ToDo: new_lrt if "/vorschule/" in response.url: diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 5a48ecc9..7a332147 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -19,7 +19,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] - version = "0.0.4" # last update: 2022-08-02 + version = "0.0.5" # last update: 2022-08-26 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -411,7 +411,22 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # quelle_id currently holds just the abbreviation "LO" for all elements, check again later # quelle_logo_url is different from bild_url, always holds (the same) URL to the Lehrer-Online logo - # quelle_homepage_url always holds a link to "https://www.lehrer-online.de" + + source_homepage_url: str = selector.xpath('quelle_homepage_url/text()').get() + # Lehrer-Online offers several sub-portals to topic-specific materials. Distinction is possible by using the + # quelle_homepage_url field in the API. Possible values: + # "https://www.lehrer-online.de" (main website) + # "https://lo-recht.lehrer-online.de" (Schulrecht) + # "https://www.handwerk-macht-schule.de" + # "https://pubertaet.lehrer-online.de" (is a "cooperation" with "Always" (Procter & Gamble) for sex education, + # needs to be individually checked for advertorials or other product placement) + match source_homepage_url: + case "https://www.handwerk-macht-schule.de": + origin_prefixed = f"Themenportal_Handwerk_-_{metadata_dict.get('origin_folder_name')}" + metadata_dict.update({'origin_folder_name': origin_prefixed}) + case "https://pubertaet.lehrer-online.de": + origin_prefixed = f"Themenportal_Pubertaet_-_{metadata_dict.get('origin_folder_name')}" + metadata_dict.update({'origin_folder_name': origin_prefixed}) # self.logger.info(f"metadata_dict = {metadata_dict}") if material_url: @@ -518,7 +533,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) if "price" in metadata_dict.keys(): vs.add_value('price', metadata_dict.get("price")) - vs.add_value('sourceContentType', '004') # "Unterrichtsmaterial- und Aufgaben-Sammlung" base.add_value('valuespaces', vs.load_item()) license_loader = LicenseItemLoader() diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index c3ea5b3b..2c0f1f3a 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -176,8 +176,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/price.ttl) # - educationalContext optional # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/educationalContext.ttl) - # - sourceContentType optional - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/sourceContentType.ttl) # - toolCategory optional # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/toolCategory.ttl) # - accessibilitySummary optional diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index 6ead6ec2..7dbaa1b9 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -17,7 +17,7 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): start_urls = [ "https://www.scienceinschool.org/issue/" ] - version = "0.0.3" # last update: 2022-07-11 + version = "0.0.4" # last update: 2022-08-26 custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True @@ -311,7 +311,6 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: vs = ValuespaceItemLoader() vs.add_value('discipline', disciplines) vs.add_value('intendedEndUserRole', 'teacher') - vs.add_value('sourceContentType', 'Lehrkräftefortbildung') vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') # see: https://www.embl.de/aboutus/privacy_policy/ vs.add_value('new_lrt', [Constants.NEW_LRT_MATERIAL, diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index e365a5b9..cf0980cc 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -17,7 +17,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.2" # last update: 2022-07-29 + version = "0.2.3" # last update: 2022-08-26 custom_settings = settings.BaseSettings({ # playwright cause of issues with thumbnails+text for serlo "WEB_TOOLS": WebEngine.Playwright @@ -316,8 +316,6 @@ def parse(self, response, **kwargs): if graphql_json["learningResourceType"] is not None: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) vs.add_value('learningResourceType', graphql_json["learningResourceType"]) - vs.add_value('sourceContentType', "Lernportal") - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/sourceContentType.ttl) base.add_value('valuespaces', vs.load_item()) From dbc4f9af2c47e90603ee78c31579030fc4d251d2 Mon Sep 17 00:00:00 2001 From: Criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 9 Nov 2022 12:34:16 +0100 Subject: [PATCH 195/590] oersi_spider - Open Music Academy only! (#64) * oersi_spider v0.0.1 (squashed) - the first version of oersi_spider is specifically hardcoded for Open Music Academy -- before extending the crawl to other metadata providers from OERSI, the topic of "Hochschulfaechersystematik" needs to be handled within edu-sharing and our crawler back-end --- add: hard-coded 'music'-value for 'discipline' (ToDo: to be removed in later versions!) - add: oersi_spider run configuration (pyCharm) - add: lifecycle contributors -- OERSI lists multiple contributors per item for OMA (and other) metadata providers -- since the 'contributor'-field doesn't specify in which capacity the person contributed, it is mapped to edu-sharings lifecycle role 'unknown' - add: mapping for HCRT -> (old) LRT / new_lrt -- only necessary if the HCRT value provided doesn't exist in the old LRT - add: missing providers (which were integrated into OERSI in the past few weeks) -- new providers ("KI Campus" and "TU Delft OpenCourseWare") for future crawler-versions * update "es_connector.py" mapping to support 'lifecycle' role 'unknown' - if a contributor contributed to a material in an unknown capacity, this person can be mapped to the role 'unknown' -- it is mapped to edu-sharing's "ccm:lifecyclecontributer_unknown"-field * add: multiple lifecycle vCards per 'lifecycle.role' - while working on oersi_spider it occurred that multiple contributors were returned by the API, but only the last person in that list was saved as a vCard - optimize imports * chore: update dateparser / playwright * oersi_spider v0.0.2 - using playwright instead of Splash because Splash (seemingly randomly) throws LUA Error 400 sometimes while crawling OMA URLs Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> * oersi_spider v0.0.3 - add: 'valuespaces.conditionsOfAccess' Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .run/oersi_spider.run.xml | 25 + converter/es_connector.py | 11 +- converter/spiders/oersi_spider.py | 821 ++++++++++++++++++++++++++++++ requirements.txt | 4 +- 4 files changed, 855 insertions(+), 6 deletions(-) create mode 100644 .run/oersi_spider.run.xml create mode 100644 converter/spiders/oersi_spider.py diff --git a/.run/oersi_spider.run.xml b/.run/oersi_spider.run.xml new file mode 100644 index 00000000..15051650 --- /dev/null +++ b/.run/oersi_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/converter/es_connector.py b/converter/es_connector.py index a45d0973..ee5cb6a4 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -1,5 +1,4 @@ import base64 -import collections import json import logging import time @@ -36,11 +35,12 @@ class EduSharingConstants: MEDIACENTER_PREFIX = "MEDIA_CENTER_" MEDIACENTER_PROXY_PREFIX = "MEDIA_CENTER_PROXY_" LIFECYCLE_ROLES_MAPPING = { - "publisher": "ccm:lifecyclecontributer_publisher", "author": "ccm:lifecyclecontributer_author", "editor": "ccm:lifecyclecontributer_editor", "metadata_creator": "ccm:metadatacontributer_creator", "metadata_provider": "ccm:metadatacontributer_provider", + "publisher": "ccm:lifecyclecontributer_publisher", + "unknown": "ccm:lifecyclecontributer_unknown", # (= contributor in an unknown capacity ("Mitarbeiter")) } @@ -360,7 +360,6 @@ def transformItem(self, uuid, spider, item): pass spaces["cclom:duration"] = duration - # TODO: this does currently not support multiple values per role if "lifecycle" in item["lom"]: for person in item["lom"]["lifecycle"]: if not "role" in person: @@ -408,7 +407,11 @@ def transformItem(self, uuid, spider, item): vcard.add("url").value = url if email: vcard.add("EMAIL;TYPE=PREF,INTERNET").value = email - spaces[mapping] = [vcard.serialize()] + if mapping in spaces: + # checking if a vcard already exists for this role: if so, extend the list + spaces[mapping].append(vcard.serialize()) + else: + spaces[mapping] = [vcard.serialize()] valuespaceMapping = { "discipline": "ccm:taxonid", diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py new file mode 100644 index 00000000..6d454ad2 --- /dev/null +++ b/converter/spiders/oersi_spider.py @@ -0,0 +1,821 @@ +import datetime +import logging +from typing import Optional + +import requests +import scrapy + +from converter.constants import Constants +from converter.es_connector import EduSharing +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + LomClassificationItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + ResponseItemLoader, +) +from converter.spiders.base_classes import LomBase +from converter.web_tools import WebEngine, WebTools + + +class OersiSpider(scrapy.Spider, LomBase): + """ + Crawls OERSI.org for metadata from different OER providers. + + You can control which metadata provider should be crawled by commenting/uncommenting their name within the + ELASTIC_PROVIDERS_TO_CRAWL list. + """ + + name = "oersi_spider" + # start_urls = ["https://oersi.org/"] + friendlyName = "OERSI" + version = "0.0.3" # last update: 2022-11-08 + allowed_domains = "oersi.org" + custom_settings = { + "CONCURRENT_REQUESTS": 32, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 3, + "WEB_TOOLS": WebEngine.Playwright, + } + + ELASTIC_PARAMETER_KEEP_ALIVE: str = "1m" + # for reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units + ELASTIC_PARAMETER_REQUEST_SIZE: int = 1000 # maximum: 10.000, but responses for bigger request sizes take significantly longer + + ELASTIC_PIT_ID: dict = dict() + # the provider-filter at https://oersi.org/resources/ shows you which String values can be used as a provider-name + # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) + ELASTIC_PROVIDERS_TO_CRAWL: list = [ + # "detmoldMusicTools", + # "digiLL", + # "DuEPublico", + # "eaDNURT", + # "eGov-Campus", + # "HessenHub", + # "HHU Mediathek", + # "HOOU", + # "iMoox", + # "KI Campus", + # "oncampus", + # "openHPI", + # "OpenLearnWare", + "Open Music Academy" + # "OpenRub", + # "ORCA.nrw", + # "RWTH Aachen GitLab", + # "twillo", + # "TIB AV-Portal", + # "TU Delft OpenCourseWare", + # "vhb", + # "Virtual Linguistics Campus", + # "ZOERR" + ] + # ToDo: DO NOT activate other providers until 'Hochschulfaechersystematik'-values are possible within edu-sharing! + ELASTIC_ITEMS_ALL = list() + + MAPPING_HCRT_TO_NEW_LRT = { + "diagram": "f7228fb5-105d-4313-afea-66dd59b1b6f8", # "Graph, Diagramm und Charts" + "portal": "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9", # "Webseite und Portal (stabil)" + "questionnaire": "d31a5b68-611f-4015-8be9-56bd5eb44c64", # "Fragebogen und Umfragen" + "reference_work": "c022c920-c236-4234-bae1-e264a3e2bdf6", # "Nachschlagewerk und Glossar" + "script": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" + "sheet_music": "f7e92628-4132-4985-bcf5-93c285e300a8", # "Noten" + "textbook": "a5897142-bf57-4cd0-bcd9-7d0f1932e87a", # "Lehrbuch und Grundlagenwerk (auch E-Book)" + } + + def __init__(self, **kwargs): + super().__init__(**kwargs) + # Fetching a "point in time"-id for the subsequent ElasticSearch queries + self.ELASTIC_PIT_ID = self.elastic_pit_get_id(self.elastic_pit_create()) + # querying the ElasticSearch API for metadata-sets of specific providers, this allows us to control which + # providers we want to include/exclude by using the "ELASTIC_PROVIDERS_TO_CRAWL"-list + self.ELASTIC_ITEMS_ALL = self.elastic_fetch_all_provider_pages() + # after all items have been collected, delete the ElasticSearch PIT + json_response = self.elastic_pit_delete() + if json_response: + logging.info( + f"ElasticSearch API response (upon PIT delete): {json_response}" + ) + + def start_requests(self): + for elastic_item in self.ELASTIC_ITEMS_ALL: + main_entity_of_page: list[dict] = elastic_item.get("_source").get( + "mainEntityOfPage" + ) + if main_entity_of_page: + item_url = main_entity_of_page[0].get("id") + yield scrapy.Request( + url=item_url, cb_kwargs={"elastic_item": elastic_item} + ) + + def elastic_pit_create(self) -> dict: + """ + Creates an ElasticSearch PIT (point-in-time), which is needed for iterating through the API results. + See: https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html + """ + url = ( + f"https://oersi.org/resources/api-internal/search/oer_data/_pit?keep_alive=" + f"{self.ELASTIC_PARAMETER_KEEP_ALIVE}&pretty" + ) + headers = {"accept": "application/json"} + request = requests.post( + url=url, + headers=headers, + ) + return request.json() + + @staticmethod + def elastic_pit_get_id(pit_json_response) -> dict: + response_json: dict = pit_json_response + return response_json + + def elastic_pit_delete(self) -> dict: + """ + Deletes the ElasticSearch PIT once it's no longer needed for page iteration. See: + https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html#close-point-in-time-api + """ + url = f"https://oersi.org/resources/api-internal/search/_pit" + delete_request = requests.delete(url=url, json=self.ELASTIC_PIT_ID) + logging.debug(f"Deleting ElasticSearch PIT: {self.ELASTIC_PIT_ID}") + return delete_request.json() + + def elastic_query_provider_metadata(self, provider_name, search_after=None): + """ + Queries OERSI's ElasticSearch API for a metadata from a specific provider. + See: https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#paginate-search-results + """ + url = "https://oersi.org/resources/api-internal/search/_search" + if search_after is None: + payload = { + "size": self.ELASTIC_PARAMETER_REQUEST_SIZE, + "query": { + "match": {"mainEntityOfPage.provider.name": f"{provider_name}"} + }, + "pit": { + "id": self.ELASTIC_PIT_ID.get("id"), + "keep_alive": self.ELASTIC_PARAMETER_KEEP_ALIVE, + }, + "sort": [{"id": "asc"}], + "track_total_hits": f"true", + } + else: + payload = { + "size": self.ELASTIC_PARAMETER_REQUEST_SIZE, + "query": { + "match": {"mainEntityOfPage.provider.name": f"{provider_name}"} + }, + "pit": { + "id": self.ELASTIC_PIT_ID.get("id"), + "keep_alive": self.ELASTIC_PARAMETER_KEEP_ALIVE, + }, + "sort": [{"id": "asc"}], + "track_total_hits": f"true", + "search_after": search_after, + } + headers = {"Content-Type": "application/json", "accept": "application/json"} + response = requests.post(url=url, json=payload, headers=headers) + # logging.debug(response.text) + return response.json() + + def elastic_fetch_all_provider_pages(self): + """ + Iterates through ElasticSearch result pages and collects each item within a list for further parsing. See: + https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#search-after + """ + all_items: list = list() + has_next_page = True + for provider_name in self.ELASTIC_PROVIDERS_TO_CRAWL: + pagination_parameter = None + while has_next_page: + current_page_json_response: dict = self.elastic_query_provider_metadata( + provider_name=provider_name, search_after=pagination_parameter + ) + if "pit_id" in current_page_json_response: + if current_page_json_response.get( + "pit_id" + ) != self.ELASTIC_PIT_ID.get("id"): + self.ELASTIC_PIT_ID = current_page_json_response.get("pit_id") + logging.info( + f"ElasticSearch: pit_id changed between queries, using the new pit_id " + f"{current_page_json_response.get('pit_id')} for subsequent queries." + ) + if "hits" in current_page_json_response: + total_count = ( + current_page_json_response.get("hits").get("total").get("value") + ) + logging.info(f"Expecting {total_count} items for {provider_name}") + if "hits" in current_page_json_response.get("hits"): + provider_items: list = current_page_json_response.get("hits").get( + "hits" + ) + if provider_items: + logging.info( + f"The provider_items list has {len(provider_items)} entries" + ) + all_items.extend(provider_items) + last_entry: dict = provider_items[-1] + # ToDo: pagination documentation + if "sort" in last_entry: + last_sort_result: list = last_entry.get("sort") + if last_sort_result: + logging.info( + f"The last_sort_result is {last_sort_result}" + ) + has_next_page = True + pagination_parameter = last_sort_result + else: + has_next_page = False + break + else: + logging.info( + f"reached the end of the ElasticSearch results for {provider_name} // " + f"Total amount of items collected: {len(all_items)}" + ) + break + return all_items + + def getId(self, response=None, elastic_item: dict = dict) -> str: + """ + Uses OERSI's ElasticSearch "_id"-field to collect an uuid. See: + https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-id-field.html + """ + return elastic_item["_id"] + + def getHash(self, response=None, elastic_item: dict = dict) -> str: + """ + Creates a hash-value by combining a date + the crawler version number within a string. + Since OERSI's date-fields are not always available, this method has several fallbacks: + 1) OERSI "datePublished"-field + 2) OERSI "dateCreated"-field + 3) if neither of the above are available: combine the current datetime + crawler version + """ + date_published: str = elastic_item["_source"]["datePublished"] + date_created: str = elastic_item["_source"]["dateCreated"] + if date_published: + hash_temp: str = f"{date_published}{self.version}" + elif date_created: + hash_temp: str = f"{date_created}{self.version}" + else: + hash_temp: str = f"{datetime.datetime.now().isoformat()}{self.version}" + return hash_temp + + def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: + elastic_item = elastic_item + if self.forceUpdate: + return True + if self.uuid: + if self.getUUID(response) == self.uuid: + logging.info(f"matching requested id: {self.uuid}") + return True + return False + if self.remoteId: + if str(self.getId(response, elastic_item=elastic_item)) == self.remoteId: + logging.info(f"matching requested id: {self.remoteId}") + return True + return False + db = EduSharing().findItem( + self.getId(response, elastic_item=elastic_item), self + ) + changed = db is None or db[1] != self.getHash( + response, elastic_item=elastic_item + ) + if not changed: + logging.info( + f"Item {self.getId(response, elastic_item=elastic_item)} (uuid: {db[0]}) has not changed" + ) + return changed + + def get_lifecycle_author( + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + date_created: Optional[str] = None, + date_published: Optional[str] = None, + ): + """ + If a "creator"-field is available in the OERSI API for a specific '_source'-item, creates an 'author'-specific + LifecycleItemLoader and fills it up with available metadata. + + :param lom_base_item_loader: LomBaseItemLoader where the collected metadata should be saved to + :param elastic_item_source: the '_source'-field of the currently parsed OERSI elastic item + :param date_created: OERSI 'dateCreated' value (if available) + :param date_published: OERSI 'datePublished' value (if available) + :returns: list[str] - list of authors (names) for later usage in the LicenseItemLoader + """ + authors: list[str] = list() + if "creator" in elastic_item_source: + creators: list[dict] = elastic_item_source.get("creator") + # creator.honorificPrefix might appear in a future version of the API within a "creator"-array; + # doesn't seem to be implemented in OERSI (yet) + for creator_item in creators: + lifecycle_author = LomLifecycleItemloader() + if date_published: + lifecycle_author.add_value("date", date_published) + elif date_created: + lifecycle_author.add_value("date", date_created) + if "affiliation" in creator_item: + affiliation_item = creator_item.get("affiliation") + # ToDo: affiliation.type (e.g. Organization) + if "name" in affiliation_item: + affiliation_name = affiliation_item.get("name") + lifecycle_author.add_value("organization", affiliation_name) + if "id" in affiliation_item: + affiliation_url = affiliation_item.get("id") + lifecycle_author.add_value("url", affiliation_url) + if creator_item.get("type") == "Person": + lifecycle_author.add_value( + "role", "author" + ) # supported roles: "author" / "editor" / "publisher" + author_name: str = creator_item.get("name") + authors.append( + author_name + ) # this string is going to be used in the license field "author" + self.split_names_if_possible_and_add_to_lifecycle( + name_string=author_name, lifecycle_item_loader=lifecycle_author + ) + self.lifecycle_save_oersi_identifier_to_url_or_uuid( + person_dictionary=creator_item, + lifecycle_item_loader=lifecycle_author, + ) + lom_base_item_loader.add_value( + "lifecycle", lifecycle_author.load_item() + ) + elif creator_item.get("type") == "Organization": + creator_organization_name = creator_item.get("name") + lifecycle_author.add_value("role", "author") + lifecycle_author.add_value( + "organization", creator_organization_name + ) + lom_base_item_loader.add_value( + "lifecycle", lifecycle_author.load_item() + ) + return authors + + def get_lifecycle_contributor( + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + author_list: Optional[list[str]] = None, + ): + """ + Collects metadata from the OERSI "contributor"-field and stores it within a LomLifecycleItemLoader. + """ + if "contributor" in elastic_item_source: + contributors: list[dict] = elastic_item_source.get("contributor") + # the OERSI field 'contributor' is OPTIONAL: https://dini-ag-kim.github.io/amb/draft/#contributor and might + # contain several Persons or Organizations + for contributor_item in contributors: + lifecycle_contributor = LomLifecycleItemloader() + lifecycle_contributor.add_value("role", "unknown") + contributor_name: str = contributor_item.get("name") + if contributor_name: + if author_list: + if contributor_name in author_list: + # OMA lists one author, but also lists the same person as a "contributor", + # therefore causing the same person to appear both as author and unknown contributor in + continue + # removing trailing whitespaces before further processing of the string + contributor_name = contributor_name.strip() + if "type" in contributor_item: + if contributor_item.get("type") == "Person": + self.split_names_if_possible_and_add_to_lifecycle( + name_string=contributor_name, + lifecycle_item_loader=lifecycle_contributor, + ) + elif contributor_item.get("type") == "Organization": + lifecycle_contributor.add_value( + "organization", contributor_name + ) + if "id" in contributor_item: + # id points to a URI reference of ORCID, GND, WikiData or ROR + # (while this isn't necessary for OMA items yet (as they have no 'id'-field), it will be necessary + # for other metadata providers once we extend the crawler) + self.lifecycle_save_oersi_identifier_to_url_or_uuid( + person_dictionary=contributor_item, + lifecycle_item_loader=lifecycle_contributor, + ) + if "affiliation" in contributor_item: + # ToDo: in future versions of the crawler, this field needs to be handled + # (the 'affiliation'-field currently ONLY appears in items from provider "ORCA.nrw") + # - affiliation + # - id + # - name + # - type + pass + lom_base_item_loader.add_value( + "lifecycle", lifecycle_contributor.load_item() + ) + + @staticmethod + def get_lifecycle_metadata_provider( + lom_base_item_loader: LomBaseItemloader, oersi_main_entity_of_page_item: dict + ): + """ + Collects metadata from OERSI's "provider"-field and stores it within a LomLifecycleItemLoader. + """ + # each provider-item has 3 fields: + # - 'id' (= URL of the Metadata provider, e.g. 'https://openmusic.academy') + # - 'name' (= human readable name, e.g. "Open Music Academy") + # - 'type' (= String 'Service' in 100% of cases) + provider_dict: dict = oersi_main_entity_of_page_item.get("provider") + if "name" in provider_dict: + lifecycle_metadata_provider = LomLifecycleItemloader() + lifecycle_metadata_provider.add_value("role", "metadata_provider") + metadata_provider_name: str = oersi_main_entity_of_page_item.get( + "provider" + ).get("name") + lifecycle_metadata_provider.add_value( + "organization", metadata_provider_name + ) + if "id" in provider_dict: + # unique URL to the landing-page of the metadata, e.g.: "id"-value for a typical + # 'Open Music Academy'-item looks like: "https://openmusic.academy/docs/26vG1SR17Zqf5LXpVLULqb" + metadata_provider_url: str = oersi_main_entity_of_page_item.get( + "provider" + ).get("id") + lifecycle_metadata_provider.add_value("url", metadata_provider_url) + lom_base_item_loader.add_value( + "lifecycle", lifecycle_metadata_provider.load_item() + ) + + def get_lifecycle_publisher( + self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict + ): + """ + Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. + """ + if "publisher" in elastic_item_source: + # see: https://dini-ag-kim.github.io/amb/draft/#publisher + publisher_list: list[dict] = elastic_item_source.get("publisher") + if publisher_list: + for publisher_item in publisher_list: + lifecycle_publisher = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + publisher_type: str = publisher_item.get("type") + publisher_name: str = publisher_item.get("name") + if publisher_type == "Organization": + lifecycle_publisher.add_value("organization", publisher_name) + elif publisher_type == "Person": + self.split_names_if_possible_and_add_to_lifecycle( + name_string=publisher_name, + lifecycle_item_loader=lifecycle_publisher, + ) + if "id" in publisher_item: + publisher_url = publisher_item.get("id") + if publisher_url: + lifecycle_publisher.add_value("url", publisher_url) + lom_base_item_loader.add_value( + "lifecycle", lifecycle_publisher.load_item() + ) + + @staticmethod + def lifecycle_save_oersi_identifier_to_url_or_uuid( + person_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader + ): + """ + OERSI's author 'id'-field delivers both URLs and uuids in the same field. Since edu-sharing expects URLs and + uuids to be saved in separate fields, this method checks if the 'id'-field is available at all, and if it is, + determines if the string should be saved to the 'url' or 'uuid'-field of LomLifecycleItemLoader. + """ + if "id" in person_dictionary: + author_uuid_or_url = person_dictionary.get("id") + # ToDo: If this "lazy" approach yields messy results, RegEx differentiate between uuids and URLs + if ( + "orcid.org" in author_uuid_or_url + or "dnb.de" in author_uuid_or_url + or "wikidata.org" in author_uuid_or_url + or "ror.org" in author_uuid_or_url + ): + lifecycle_item_loader.add_value("url", author_uuid_or_url) + else: + lifecycle_item_loader.add_value("uuid", author_uuid_or_url) + + @staticmethod + def split_names_if_possible_and_add_to_lifecycle( + name_string: str, lifecycle_item_loader: LomLifecycleItemloader + ): + """ + Splits a string containing a person's name - if there's a whitespace within that string - + into two parts: first_name and last_name. + Afterwards saves the split-up values to their respective 'lifecycle'-fields or saves the string as a whole. + """ + if " " in name_string: + name_parts = name_string.split(maxsplit=1) + first_name = name_parts[0] + last_name = name_parts[1] + lifecycle_item_loader.add_value("firstName", first_name) + lifecycle_item_loader.add_value("lastName", last_name) + else: + lifecycle_item_loader.add_value("firstName", name_string) + + def parse(self, response: scrapy.http.Response, **kwargs): + elastic_item: dict = kwargs.get("elastic_item") + elastic_item_source: dict = elastic_item.get("_source") + # _source is the original JSON body passed for the document at index time + # see: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html + if self.shouldImport(response) is False: + logging.debug( + "Skipping entry {} because shouldImport() returned false".format( + str(self.getId(response)) + ) + ) + return None + if ( + self.getId(response=response, elastic_item=elastic_item) is not None + and self.getHash(response=response, elastic_item=elastic_item) is not None + ): + if not self.hasChanged(response, elastic_item=elastic_item): + return None + + # ToDo: look at these (sometimes available) properties later: + # - encoding (see: https://dini-ag-kim.github.io/amb/draft/#encoding - OPTIONAL field) + + # ToDo: The following keys DON'T EXIST (yet?) in the OERSI ElasticSearch API, + # but could appear in the future as possible metadata fields according to the AMB metadata draft: + # - affiliation (OERSI uses their own 'sourceOrganization'-field instead) + # - assesses + # - audience (might be suitable for "valuespaces.intendedEndUserRole") + # - competencyRequired + # - duration (for audio/video: will be suitable for "technical.location") + # - educationalLevel (might be suitable for 'valuespaces.educationalContext') + # - hasPart + # - isBasedOn + # - isPartOf + # - teaches + + # noinspection DuplicatedCode + base = BaseItemLoader() + lom = LomBaseItemloader() + general = LomGeneralItemloader() + + provider_name = str() + if "mainEntityOfPage" in elastic_item_source: + main_entity_of_page: list[dict] = elastic_item_source.get( + "mainEntityOfPage" + ) + if main_entity_of_page: + if "provider" in main_entity_of_page[0]: + provider_name: str = ( + main_entity_of_page[0].get("provider").get("name") + ) + # the first provider_name is used for saving individual items to edu-sharing sub-folders + # via 'base.origin' later + for maeop_item in main_entity_of_page: + # ToDo: according to the AMB spec, there could be a 'dateCreated'-field and 'dateModified'-field + # appearing in the future. Regularly check the API if it was implemented (this could be used for + # 'lifecycle.date') + # a random sample showed that there can be multiple "mainEntityOfPage"-objects + # this only occurred once within 55438 items in the API, but might happen more often in the future + if "provider" in maeop_item: + self.get_lifecycle_metadata_provider( + lom_base_item_loader=lom, + oersi_main_entity_of_page_item=maeop_item, + ) + + # if "about" in elastic_item_source: + # about = elastic_item_source.get("about") + # # about is OPTIONAL + # for about_item in about: + # # ToDo: disciplines are available as a list (according to the 'Hochschulfaechersystematik') + # # - 'de'-field: human-readable German String + # # - 'id'-field: URL of the entry (e.g. "https://w3id.org/kim/hochschulfaechersystematik/n78") + # pass + # # see: https://dini-ag-kim.github.io/amb/draft/#about + # # ToDo: DISCIPLINES! + # # - prefLabel + # # - de: German description (Schulfach / Studienfach) + # # - en: English ... + # # - uk: Ukrainian ... + # # - etc. (depending on the provider, several more languages + descriptions are listed) + # # - id + + date_created = str() + if "dateCreated" in elastic_item_source: + date_created: str = elastic_item_source.get("dateCreated") + date_published = str() + if "datePublished" in elastic_item_source: + date_published: str = elastic_item_source.get("datePublished") + + base.add_value("sourceId", self.getId(response, elastic_item=elastic_item)) + base.add_value("hash", self.getHash(response, elastic_item=elastic_item)) + if "image" in elastic_item_source: + thumbnail_url = elastic_item_source.get("image") # thumbnail + if thumbnail_url: + base.add_value("thumbnail", thumbnail_url) + if provider_name: + # every item gets sorted into a //-subfolder to make QA more feasable + base.add_value("origin", provider_name) + + general.add_value("identifier", response.url) + if "keywords" in elastic_item_source: + keywords: list = elastic_item_source.get("keywords") + if keywords: + general.add_value("keyword", keywords) + if "description" in elastic_item_source: + description: str = elastic_item_source.get("description") + general.add_value("description", description) + title: str = elastic_item_source.get("name") + general.add_value("title", title) + + in_languages = list() + if "inLanguage" in elastic_item_source: + in_languages: list[str] = elastic_item_source.get("inLanguage") + # list of language codes, e.g. ["de", "en"]. (even if it's just a single language) + if in_languages: + for language_value in in_languages: + general.add_value("language", language_value) + + # noinspection DuplicatedCode + lom.add_value("general", general.load_item()) + + technical = LomTechnicalItemLoader() + technical.add_value( + "format", "text/html" + ) # e.g. if the learning object is a web-page + if "id" in elastic_item_source: + identifier_url: str = elastic_item_source.get( + "id" + ) # this URL REQUIRED and should always be available + # see https://dini-ag-kim.github.io/amb/draft/#id + if identifier_url: + technical.add_value("location", identifier_url) + # the identifier_url should be more stable/robust than the current response.url + # navigated by the crawler + else: + technical.add_value("location", response.url) + lom.add_value("technical", technical.load_item()) + + authors = self.get_lifecycle_author( + lom_base_item_loader=lom, + elastic_item_source=elastic_item_source, + date_created=date_created, + date_published=date_published, + ) + + self.get_lifecycle_contributor( + lom_base_item_loader=lom, + elastic_item_source=elastic_item_source, + author_list=authors, + ) + + self.get_lifecycle_publisher( + lom_base_item_loader=lom, elastic_item_source=elastic_item_source + ) + + # ToDo: 'sourceOrganization' doesn't appear in OMA results, but will be available for other providers + # each item can have multiple 'soureOrganization' dictionaries attached to it, which typically look like + # { + # "type": "Organization", + # "name": "Universität Innsbruck" + # } + # if "sourceOrganization" in elastic_item_source: + # # attention: the "sourceOrganization"-field is not part of the AMB draft + # # see: https://github.com/dini-ag-kim/amb/issues/110 + # # it is used by OERSI to express affiliation to an organization (instead of the AMB 'affiliation'-field) + # lifecycle_org = LomLifecycleItemloader() + # source_organizations: list = elastic_item_source.get('sourceOrganization') + # for source_org_item in source_organizations: + # if "id" in source_org_item: + # source_org_url = source_org_item.get('id') + # lifecycle_org.add_value('url', source_org_url) + # if "name" in source_org_item: + # source_org_name = source_org_item.get('name') + # lifecycle_org.add_value('organization', source_org_name) + # # source_org_type = source_org_item.get('type') # e.g.: "Organization", "CollegeOrUniversity" etc. + # lom.add_value('lifecycle', lifecycle_org.load_item()) + + educational = LomEducationalItemLoader() + if in_languages: + for language_value in in_languages: + educational.add_value("language", language_value) + # noinspection DuplicatedCode + lom.add_value("educational", educational.load_item()) + + classification = LomClassificationItemLoader() + lom.add_value("classification", classification.load_item()) + + base.add_value("lom", lom.load_item()) + + vs = ValuespaceItemLoader() + vs.add_value("discipline", "420") # Musik + # ToDo: remove this hardcoded value in the future! (oersi_spider v0.0.1 is hardcoded for 'Open Music Academy') + # ToDo: future versions of the crawler need to use 'Hochschulfaechersystematik'-values! + vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) + is_accessible_for_free: bool = elastic_item_source.get("isAccessibleForFree") + if is_accessible_for_free: + vs.add_value("price", "no") + else: + vs.add_value("price", "yes") + if "conditionsOfAccess" in elastic_item_source: + conditions_of_access: dict = elastic_item_source.get("conditionsOfAccess") + if "id" in conditions_of_access: + conditions_of_access_id: str = conditions_of_access["id"] + # the "id"-field can hold one of two URLs. Either: + # https://w3id.org/kim/conditionsOfAccess/login or https://w3id.org/kim/conditionsOfAccess/no_login + # which is equal to our OEH vocab: + # https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/conditionsOfAccess.ttl + if "/conditionsOfAccess/" in conditions_of_access_id: + conditions_of_access_value = conditions_of_access_id.split("/")[-1] + if conditions_of_access_value: + vs.add_value('conditionsOfAccess', conditions_of_access_value) + + hcrt_types = dict() + oeh_lrt_types = dict() + learning_resource_types = list() + if "learningResourceType" in elastic_item_source: + learning_resource_types: list[dict] = elastic_item_source.get( + "learningResourceType" + ) + # see: https://dini-ag-kim.github.io/amb/draft/#learningresourcetype - a typical LRT-dict looks like this: + # { + # "prefLabel": { + # "nl": "Webpagina", + # "fr": "Page Web", + # "da": "Hjemmeside", + # "de": "Webseite", + # "en": "Web Page", + # "es": "Página Web", + # "fi": "Verkkosivu", + # "uk": "Веб-сайт" + # }, + # "id": "https://w3id.org/kim/hcrt/web_page" + # } + if learning_resource_types: + # while the AMB specification allows vocabularies from either HCRT or OEH, + # currently the OERSI API only serves HCRT LRTs + for lrt_item in learning_resource_types: + if "id" in lrt_item: + if "/hcrt/" in lrt_item.get("id"): + hcrt_type_url = lrt_item.get("id") + hcrt_type = lrt_item.get("prefLabel").get("en") + hcrt_types.update({hcrt_type: hcrt_type_url}) + elif "/openeduhub/" in lrt_item.get("id"): + oeh_lrt_url = lrt_item.get("id") + oeh_lrt_type = lrt_item.get("prefLabel").get("en") + oeh_lrt_types.update({oeh_lrt_type: oeh_lrt_url}) + if hcrt_types: + for hcrt_url in hcrt_types.values(): + # hcrt_urls will typically look like this: "https://w3id.org/kim/hcrt/drill_and_practice" + hcrt_key: str = hcrt_url.split("/")[-1] + if hcrt_key in self.MAPPING_HCRT_TO_NEW_LRT: + # some values in the HCRT Vocab don't exist in the (old) learningResourceType + # therefore they get mapped directly to a new_lrt + # ToDo: we are setting learningResourceType and new_lrt at the same time here! + # - while Open Music Academy only uses a single LRT per item (100% of cases are "web_page") + # - this might not be desired crawler behaviour in later versions of the crawler! + hcrt_key = self.MAPPING_HCRT_TO_NEW_LRT.get(hcrt_key) + vs.add_value("new_lrt", hcrt_key) + else: + vs.add_value("learningResourceType", hcrt_key) + if oeh_lrt_types: + vs.add_value("learningResourceType", list(oeh_lrt_types.keys())) + + base.add_value("valuespaces", vs.load_item()) + + license_loader = LicenseItemLoader() + if "license" in elastic_item_source: + license_url: str = elastic_item_source.get("license").get("id") + if license_url: + # ToDo: from some providers (e.g. twillo) license URLs end with "deed.de", confirm if licenses get + # properly recognized in edu-sharing + license_loader.add_value("url", license_url) + if authors: + license_loader.add_value("author", authors) + # noinspection DuplicatedCode + base.add_value("license", license_loader.load_item()) + + permissions = super().getPermissions(response) + base.add_value("permissions", permissions.load_item()) + + response_loader = ResponseItemLoader(response=response) + # for future maintenance, during debugging the following problems occurred one day, + # but disappeared the next day: + # - OMA URLs cause HTTP Error 400 in Splash + response_loader.add_value("status", response.status) + url_data = WebTools.getUrlData( + url=response.url, engine=WebEngine.Playwright + ) + if "html" in url_data: + response_loader.add_value("html", url_data["html"]) + if "text" in url_data: + response_loader.add_value("text", url_data["text"]) + if "cookies" in url_data: + response_loader.add_value("cookies", url_data["cookies"]) + if "har" in url_data: + response_loader.add_value("har", url_data["har"]) + if "screenshot_bytes" in url_data: + # ToDo: optional thumbnail feature (toggleable via a list?) + # -> OMA serves generic thumbnails, which is why a screenshot of the + # website will always be more interesting to users than the same generic image across ~650 materials + base.add_value("screenshot_bytes", url_data["screenshot_bytes"]) + response_loader.add_value("headers", response.headers) + response_loader.add_value("url", response.url) + base.add_value("response", response_loader.load_item()) + + yield base.load_item() diff --git a/requirements.txt b/requirements.txt index fa905c25..abe6ab52 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ wheel==0.37.1 image -dateparser==1.1.1 +dateparser==1.1.3 isodate==0.6.1 pyppeteer==1.0.2 html2text~=2020.1.16 @@ -24,5 +24,5 @@ itemadapter==0.5.0 six==1.16.0 certifi==2021.10.8 urllib3~=1.26.09 -playwright==1.24.1 +playwright==1.27.1 pyOpenSSL==22.0.0 \ No newline at end of file From 70388cc540fa6188b250c2c0828d251d278df40a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 13 Sep 2022 12:34:20 +0200 Subject: [PATCH 196/590] ZUM version-bumps ("defective/offline"-filter false-positives) - the last "MediaWikiBase"-update was necessary to fix URLs with special characters and whitespaces (URL-encoding for "technical.location" was necessary since ZUM only delivers an article name as a string, but not a fully assembled URL) - during testing of SD_WLO-430 it was discovered that items which were scraped in early 2020 are carrying malformed URLs and cause items in edu-sharing to be detected as "defective/offline materials" (cclom:location_status 900): bumping the versions of these (problematic) crawlers should fix the items in question --- converter/spiders/zum_deutschlernen.py | 2 +- converter/spiders/zum_klexikon.py | 2 +- converter/spiders/zum_spider.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/converter/spiders/zum_deutschlernen.py b/converter/spiders/zum_deutschlernen.py index 4e89fa51..b6e554b9 100644 --- a/converter/spiders/zum_deutschlernen.py +++ b/converter/spiders/zum_deutschlernen.py @@ -11,7 +11,7 @@ class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_deutschlernen_spider" friendlyName = "ZUM-Deutsch-Lernen" url = "https://deutsch-lernen.zum.de/" - version = "0.1.0" + version = "0.1.1" # last update: 2022-09-13 license = Constants.LICENSE_CC_BY_40 def parse_page_query(self, response: scrapy.http.Response): diff --git a/converter/spiders/zum_klexikon.py b/converter/spiders/zum_klexikon.py index 6b0fc30c..699f8b63 100644 --- a/converter/spiders/zum_klexikon.py +++ b/converter/spiders/zum_klexikon.py @@ -14,7 +14,7 @@ class ZUMKlexikonSpider(MediaWikiBase, scrapy.Spider): name = "zum_klexikon_spider" friendlyName = "ZUM-Klexikon" url = "https://klexikon.zum.de/" - version = "0.1.2" # last update: 2022-02-16 + version = "0.1.3" # last update: 2022-09-13 license = Constants.LICENSE_CC_BY_SA_30 def parse_page_query(self, response: scrapy.http.Response): diff --git a/converter/spiders/zum_spider.py b/converter/spiders/zum_spider.py index b603d785..6f449df7 100644 --- a/converter/spiders/zum_spider.py +++ b/converter/spiders/zum_spider.py @@ -11,7 +11,7 @@ class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_spider" friendlyName = "ZUM-Unterrichten" url = "https://unterrichten.zum.de/" - version = "0.1.0" + version = "0.1.1" # last update: 2022-09-13 license = Constants.LICENSE_CC_BY_SA_40 def technical_item(self, response=None) -> LomTechnicalItem: From e097ff97e79db218c1aba15f8cf6874ce1f08c4f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 13 Sep 2022 13:24:08 +0200 Subject: [PATCH 197/590] add: zum_spider pyCharm run/debug configuration --- .run/zum_spider.run.xml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .run/zum_spider.run.xml diff --git a/.run/zum_spider.run.xml b/.run/zum_spider.run.xml new file mode 100644 index 00000000..18f634dd --- /dev/null +++ b/.run/zum_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file From ceb75576218eee0f7946981acc930dcf54edbe5b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 13 Sep 2022 13:39:42 +0200 Subject: [PATCH 198/590] fix: ZUM URL encoding - in: 'response.url' and 'technical.location' - ZUM MediaWikis only provide "title"-fields (string) which have to be manually assembled into URLs -- URLs with special characters were malformed in the past. to mitigate this in the future, we're using URL-encoding by default Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/mediawiki_base.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index 2866cbe0..568a831e 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -206,7 +206,10 @@ def getHash(self, response=None): def mapResponse(self, response, fetchData=True): mr = super().mapResponse(response, fetchData=False) data = json.loads(response.body) - mr.replace_value('url', f'{self.url}wiki/{jmes_title.search(data)}') + title = jmes_title.search(data) + mr.replace_value('url', f"{self.url}{urllib.parse.quote('wiki/')}{urllib.parse.quote(title)}") + # response.url can't be used for string concatenation here since it would point to "/api.php" + # self.url is overwritten by the children of MediaWikiBase with the URL root return mr def getBase(self, response=None) -> BaseItemLoader: @@ -240,7 +243,7 @@ def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: loader.replace_value('format', 'text/html') data = response.meta['item'] title = jmes_title.search(data) - loader.replace_value('location', f'{self.url}wiki/{urllib.parse.quote(title)}') + loader.replace_value('location', f"{self.url}{urllib.parse.quote('wiki/')}{urllib.parse.quote(title)}") return loader def getValuespaces(self, response): From fe961ad1fab869a29458c6602d4d029e1bd8f1d8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 14 Dec 2022 17:01:11 +0100 Subject: [PATCH 199/590] fix: memucho_spider "base.sourceId" - rollback: getId()-method uses "TopicId" instead of response.url -- this was accidentally changed back in April (and changing the identifier would cause duplicate entries between old crawls and new crawls) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/memucho_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/memucho_spider.py b/converter/spiders/memucho_spider.py index 47026634..1130a347 100644 --- a/converter/spiders/memucho_spider.py +++ b/converter/spiders/memucho_spider.py @@ -44,7 +44,7 @@ def mapResponse(self, response): return LomBase.mapResponse(self, response) def getId(self, response): - return response.meta["item"].get("ItemUrl") + return response.meta["item"].get("TopicId") def getHash(self, response): date_modified = response.meta["item"].get("DateModified") From 3ecc7a153e30cd4d65f96300ae9da6d6d960ce1b Mon Sep 17 00:00:00 2001 From: Criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Dec 2022 17:22:47 +0100 Subject: [PATCH 200/590] change: zum_dwu_spider to dwu_spider (v0.0.6) (#65) * zum_dwu_spider becomes dwu_spider (v0.0.5, squashed) - changes due to SD_WLO-525: -- this crawler update was necessary due to a request from DWU who informed us that DWU learning materials won't be available anymore on ZUM servers in the near future -- therefore changing URLs (from ZUM URLs to DWU's private website offering) - fix: additional check for empty "title"-strings - improve: license.description, lifecycle.organization, lifecycle.url -- hardcoded some values since they are not occurring regularly on every HTML, but are (most probably) meant to be - fix: slightly more generous XPath expressions for sections and subtopics -- the DOM structure seems to have slightly changed since the last time the crawler ran, therefore I changed the XPaths to be slightly more generous during link collection - version bump to v0.0.5 * add run/debug configuration for dwu_spider Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> * dwu_spider v0.0.6 - add: crawling of the overview-pages (instead of materials-only) -- add: description-addendum which points towards the overview-page to which a material belongs to - add: collect URLs to the higher-level overview-pages and add these to the description text -- this is a workaround (due to a request from DWU) for users to find their way back to the overview-lists (for the materials where it isn't possible to do so due to JS-Buttons that point to "history.back()") - change: slightly improve the wording in "license.description" Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- ..._dwu_spider.run.xml => dwu_spider.run.xml} | 6 +- .../{zum_dwu_spider.py => dwu_spider.py} | 88 +++++++++++++------ 2 files changed, 65 insertions(+), 29 deletions(-) rename .run/{zum_dwu_spider.run.xml => dwu_spider.run.xml} (76%) rename converter/spiders/{zum_dwu_spider.py => dwu_spider.py} (72%) diff --git a/.run/zum_dwu_spider.run.xml b/.run/dwu_spider.run.xml similarity index 76% rename from .run/zum_dwu_spider.run.xml rename to .run/dwu_spider.run.xml index 8918164a..34785873 100644 --- a/.run/zum_dwu_spider.run.xml +++ b/.run/dwu_spider.run.xml @@ -1,6 +1,6 @@ - - + + {overview_title}" \ + f"\n" description_raw = response.xpath('//descendant::td[@class="t1fbs"]').getall() description_raw: str = ''.join(description_raw) if description_raw is not None: description_raw = w3lib.html.remove_tags(description_raw) description_raw = w3lib.html.strip_html5_whitespace(description_raw) clean_description = w3lib.html.replace_escape_chars(description_raw) + if description_addendum: + clean_description = f"{description_addendum}{clean_description}" general.add_value('description', clean_description) if len(description_raw) == 0: # Fallback for exercise-pages where there's only 1 title field and 1 short instruction sentence # e.g.: http://www.zum.de/dwu/depothp/hp-phys/hppme24.htm description_fallback = response.xpath('//descendant::div[@id="InstructionsDiv"]/descendant' '::*/text()').get() + if description_addendum: + description_fallback = f"{description_addendum}{description_fallback}" general.replace_value('description', description_fallback) # most of the time the title is stored directly title: str = response.xpath('/html/head/title/text()').get() @@ -130,7 +158,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): title = response.xpath('//td[@class="tt1math"]/text()').get() if title is not None: title = title.strip() - # desc_list = response.xpath('/html/body/table[2]/tr/td/table/tr[1]/td[1]/text()').getall() desc_list = response.xpath('//td[@class="t1fbs"]/text()').getall() if desc_list is not None and len(desc_list) == 0: # if the first attempt at grabbing a description fails, we try it at another place @@ -142,15 +169,18 @@ def parse(self, response: scrapy.http.Response, **kwargs): clean_description = w3lib.html.replace_escape_chars(description_raw) general.replace_value('description', clean_description) - if title is not None: + if title: title = w3lib.html.replace_escape_chars(title) - if title is not None: - # this double-check is necessary for broken headings that ONLY consisted of escape-chars + if title: + # checking if the title is still valid, which necessary for broken headings that ONLY consisted of + # escape-chars if title == '': - # there's some pages (Exercises) that only hold escape chars or whitespaces as their title + # there's some pages (exercises) that only hold escape chars or whitespaces as their title # the title is simply bold text hidden within a div container title = response.xpath('//div[@class="Titles"]/h3[@class="ExerciseSubtitle"]/b/text()').get() - title = title.strip() + if title: + # checking once more for valid titles, since we might get an empty string from "ExerciseSubtitle" + title = title.strip() # Since we're grabbing titles from headings, a lot of them have a trailing ":" if len(title) > 0 and title.endswith(":"): # replacing the string with itself right up to the point of the colon @@ -162,8 +192,12 @@ def parse(self, response: scrapy.http.Response, **kwargs): # on the vast majority of .htm pages the keywords sit in the http-equiv content tag keyword_string = response.xpath('/html/head/meta[@http-equiv="keywords"]/@content').get() if keyword_string is None: - # but on some sub-pages, especially the interactive javascript pages, the keywords are in another container - keyword_string = response.xpath('/html/head/meta[@name="keywords"]/@content').get() + # 1st workaround: some overview-pages have their keywords in a capitalized Keywords container: + keyword_string = response.xpath('/html/head/meta[@http-equiv="Keywords"]/@content').get() + if keyword_string is None: + # but on some sub-pages, especially the interactive javascript pages, the keywords can be found in + # another element of the DOM + keyword_string = response.xpath('/html/head/meta[@name="keywords"]/@content').get() if keyword_string is not None: keyword_list = keyword_string.rsplit(", ") # trying to catch the completely broken keyword strings to clean them up manually @@ -196,9 +230,8 @@ def parse(self, response: scrapy.http.Response, **kwargs): lifecycle.add_value('role', 'author') lifecycle.add_value('firstName', 'Dieter') lifecycle.add_value('lastName', 'Welz') - lifecycle.add_value('url', 'dwu@zum.de') - lifecycle.add_value('organization', - response.xpath('/html/head/meta[@http-equiv="organization"]/@content').get()) + lifecycle.add_value('url', 'mail@dwu-unterrichtsmaterialien.de') + lifecycle.add_value('organization', 'dwu-Unterrichtsmaterialien') lom.add_value('lifecycle', lifecycle.load_item()) educational = LomEducationalItemLoader() @@ -227,7 +260,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): vs.add_value('conditionsOfAccess', 'no login') lic = LicenseItemLoader() - lic.add_value('description', 'http://www.zum.de/dwu/hilfe.htm') + lic.add_value('description', 'Bitte ' + 'Copyright-Hinweise' + ' und Nutzungsbedingungen beachten! (siehe auch: ' + 'Hilfe)') lic.add_value('internal', Constants.LICENSE_CUSTOM) lic.add_value('author', response.xpath('/html/head/meta[@http-equiv="author"]/@content').get()) From 93b550f082ba5e07925a35e2e319a3d444f8d6d5 Mon Sep 17 00:00:00 2001 From: Criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Dec 2022 12:20:26 +0100 Subject: [PATCH 201/590] youtube_spider: (optional) feature - limited crawls to a specific URL (#66) * youtube_spider v0.2.2 - feat: limited crawling mode -- if '.env'-parameter 'YOUTUBE_LIMITED_CRAWL_URL' is set, the crawler will ONLY crawl a singular URL - fix: 'license.internal' is no longer set by the crawler (which would get thrown away anyway by the pipeline) -- YouTube only offers two distinct license values (returned as strings) which are already handled correctly by setting a "license.url" or by setting a custom license and its description Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> * style: add missing whitespace in debug message for "license.internal" Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> * docs: .env.example update for singular YouTube crawls - explains the '.env'-variable "YOUTUBE_LIMITED_CRAWL_URL" -- (optional) variable for controlling whether 'youtube_spider' should only crawl a specified URL (string value) -- if the environment variable is not set (or left empty), it will default to doing a complete run (according to the entries in 'csv/youtube.csv' Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/.env.example | 7 ++- converter/es_connector.py | 2 +- converter/spiders/youtube_spider.py | 70 ++++++++++++++++++++++------- 3 files changed, 60 insertions(+), 19 deletions(-) diff --git a/converter/.env.example b/converter/.env.example index 3753cf93..a54e1d44 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -40,8 +40,11 @@ DRY_RUN = True # Use this if you e.g. want to do custom property mapping for any crawler before storing the data # CUSTOM_PIPELINES = "converter.pipelines.ExampleLoggingPipeline:100" -# your youtube api key (required for youtube crawler) -YOUTUBE_API_KEY = "" +# Your YouTube API key (required for running the youtube crawler 'youtube_spider'): +YOUTUBE_API_KEY="" +# If you only want to crawl a single YouTube channel/playlist, activate the LIMITED crawl mode by setting its URL here: +#YOUTUBE_LIMITED_CRAWL_URL="" +# (Please make sure that your 'csv/youtube.csv' contains the same URL!) # only for oeh spider: select the sources you want to fetch from oeh (comma seperated) # OEH_IMPORT_SOURCES = 'oeh,wirlernenonline_spider,serlo_spider,youtube_spider' diff --git a/converter/es_connector.py b/converter/es_connector.py index ee5cb6a4..7b6cdd40 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -300,7 +300,7 @@ def mapLicense(self, spaces, license): if "description" in license: spaces["cclom:rights_description"] = license["description"] case _: - logging.warning(f"Received a value for license['internal'] that is not recognized by es_connector." + logging.warning(f"Received a value for license['internal'] that is not recognized by es_connector. " f"Please double-check if the provided value {license['internal']} is correctly " f"mapped within Constants AND es_connector.") diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index 60ecdc32..a2e5b0f5 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -27,7 +27,7 @@ class YoutubeSpider(Spider): """ - Parse a CSV file with Youtube channels and playlists and crawl them. + Parse a CSV file with YouTube channels and playlists and crawl them. The CSV file was manually exported from https://docs.google.com/spreadsheets/d/1VsGyb4mrbzq45qIGVt-j6_ut4_VGJPRA39oBhi5SxGk to @@ -37,7 +37,7 @@ class YoutubeSpider(Spider): name = "youtube_spider" friendlyName = "Youtube" url = "https://www.youtube.com/" - version = "0.2.1" # last update: 2022-04-07 + version = "0.2.2" # last update: 2022-12-15 @staticmethod def get_video_url(item: dict) -> str: @@ -70,12 +70,50 @@ def __init__(self, **kwargs): @overrides # Spider def start_requests(self): if env.get("YOUTUBE_API_KEY", False) == "": - logging.error("YOUTUBE_API_KEY is required for youtube_spider") + logging.error("YOUTUBE_API_KEY is required for youtube_spider. Please check your '.env'-settings!") return - for row in YoutubeSpider.get_csv_rows("youtube.csv"): - request = self.request_row(row) - if request is not None: - yield request + if env.get(key="YOUTUBE_LIMITED_CRAWL_URL", allow_null=True, default=None) == "": + # If no value is set, this serves as a reminder that you can disable the '.env'-variable altogether + logging.debug("The '.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' was detected, but no URL was set. \n" + "If you meant to start a LIMITED crawl, please check your '.env'-file and restart the " + "crawler. The crawler is now commencing with a COMPLETE crawl according to the " + "'csv/youtube.csv'-table.") + if env.get(key="YOUTUBE_LIMITED_CRAWL_URL", allow_null=True, default=None): + # the OPTIONAL .env parameter is used to crawl from a SINGULAR URL ONLY + logging.debug("'.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' recognized. LIMITED crawling mode activated!\n" + "(This mode WILL NOT crawl the complete 'csv/youtube.csv'-file, but only a SINGLE YouTube " + "channel or playlist!)\n" + "If you actually wanted to start a complete/full crawl, please disable the variable in your " + "'.env'-file.") + singular_crawl_target_url: str = env.get(key="YOUTUBE_LIMITED_CRAWL_URL", default=None) + if singular_crawl_target_url: + logging.debug(f"'.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' is set to: {singular_crawl_target_url} \n" + f"Searching for {singular_crawl_target_url} within 'csv/youtube.csv' for metadata values.") + match_found: bool = False + for row in YoutubeSpider.get_csv_rows("youtube.csv"): + if row["url"] == singular_crawl_target_url: + # ToDo (optional): several YouTube URLs (youtu.be, youtube.com / youtube.de) can point to the same + # channel or playlist. Providing some leniency by resolving an URL to the "real" target might + # provide some Quality of Life while using this feature. + match_found = True + logging.debug(f"Match found in 'csv/youtube.csv' for {singular_crawl_target_url}! Commencing" + f"SINGULAR crawl process.") + request = self.request_row(row) + if request: + # we are expecting exactly one result, therefore we can stop looking after the first match + yield request + break + if match_found is False: + logging.error(f"Could not find a match for {singular_crawl_target_url} within 'csv/youtube.csv'. " + f"Please confirm that the EXACT specified URL can be found in a row of the CSV and " + f"restart the crawler.") + return + else: + # this is where the COMPLETE crawl happens: requests are yielded row-by-row from 'csv/youtube.csv' + for row in YoutubeSpider.get_csv_rows("youtube.csv"): + request = self.request_row(row) + if request is not None: + yield request def request_row(self, row: dict) -> Request: if row["url"].startswith("https://www.youtube.com"): @@ -87,11 +125,11 @@ def request_row(self, row: dict) -> Request: channel_id = url.path.split("/")[2] return self.request_channel(channel_id, meta={"row": row}) else: - # Youtube offers custom URLs to popular channels of the form + # YouTube offers custom URLs to popular channels of the form # - https://www.youtube.com/c/ # - https://www.youtube.com/ # - https://www.youtube.com/user/ - # - https://www.youtube.com/ + # - https://www.youtube.com/ # # All of these lead to an ordinary channel, but we need to read its ID from the page # body. @@ -330,19 +368,19 @@ def getChannelUrl(self, response: Response) -> str: @overrides # LomBase def getLicense(self, response: Response) -> items.LicenseItemLoader: - license = LomBase.getLicense(self, response) - license.add_value("internal", response.meta["item"]["status"]["license"]) - # possible values: "youtube", "creativeCommon" + license_loader = LomBase.getLicense(self, response) + # there are only two possible values according to https://developers.google.com/youtube/v3/docs/videos: + # "youtube", "creativeCommon" if response.meta["item"]["status"]["license"] == "creativeCommon": - license.add_value( + license_loader.add_value( "url", Constants.LICENSE_CC_BY_30 ) elif response.meta["item"]["status"]["license"] == "youtube": - license.replace_value("internal", Constants.LICENSE_CUSTOM) - license.add_value("description", "Youtube-Standardlizenz") + license_loader.replace_value("internal", Constants.LICENSE_CUSTOM) + license_loader.add_value("description", "Youtube-Standardlizenz") else: logging.warning("Youtube element {} has no license".format(self.getId())) - return license + return license_loader @overrides # LomBase def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: From 7110be3e5ba4c12bb61c3c1031c2f90ac1e64496 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 5 Jan 2023 16:24:58 +0100 Subject: [PATCH 202/590] change: use playwright for ZUM (MediaWikiBase) crawlers - see: SD_WLO-419 - during testing of the recent ZUM URL-encoding fixes (SD_WLO-419) it was noticed that crawlers waited for Splash containers (which never recovered from hanging) indefinitely in the ranger environment -- therefore added custom_settings to use Playwright instead - version bumped zum_deutschlernen.py, zum_spider.py and zum_klexikon.py to refresh their metadata during the next crawl zum_klexikon: - fix: changed license from CC-BY-SA 3.0 to 4.0 -- Klexikon seems to have switched its license version sometime in the past, according to its DOM (footer) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/zum_deutschlernen.py | 6 +++++- converter/spiders/zum_klexikon.py | 10 +++++++--- converter/spiders/zum_spider.py | 7 +++++-- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/converter/spiders/zum_deutschlernen.py b/converter/spiders/zum_deutschlernen.py index b6e554b9..af076a33 100644 --- a/converter/spiders/zum_deutschlernen.py +++ b/converter/spiders/zum_deutschlernen.py @@ -5,14 +5,18 @@ import scrapy from ..constants import Constants +from ..web_tools import WebEngine class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_deutschlernen_spider" friendlyName = "ZUM-Deutsch-Lernen" url = "https://deutsch-lernen.zum.de/" - version = "0.1.1" # last update: 2022-09-13 + version = "0.1.2" # last update: 2023-01-05 license = Constants.LICENSE_CC_BY_40 + custom_settings = { + "WEB_TOOLS": WebEngine.Playwright + } def parse_page_query(self, response: scrapy.http.Response): """ diff --git a/converter/spiders/zum_klexikon.py b/converter/spiders/zum_klexikon.py index 699f8b63..ff1a712c 100644 --- a/converter/spiders/zum_klexikon.py +++ b/converter/spiders/zum_klexikon.py @@ -5,17 +5,21 @@ import w3lib.html from scrapy import Selector -from converter.items import LomTechnicalItem, LicenseItem, LomGeneralItemloader, ValuespaceItemLoader, ValuespaceItem +from converter.items import LomTechnicalItem, LicenseItem, LomGeneralItemloader, ValuespaceItem from .base_classes.mediawiki_base import MediaWikiBase, jmes_pageids, jmes_title, jmes_links, jmes_continue from ..constants import Constants +from ..web_tools import WebEngine class ZUMKlexikonSpider(MediaWikiBase, scrapy.Spider): name = "zum_klexikon_spider" friendlyName = "ZUM-Klexikon" url = "https://klexikon.zum.de/" - version = "0.1.3" # last update: 2022-09-13 - license = Constants.LICENSE_CC_BY_SA_30 + version = "0.1.4" # last update: 2023-01-05 + license = Constants.LICENSE_CC_BY_SA_40 + custom_settings = { + "WEB_TOOLS": WebEngine.Playwright + } def parse_page_query(self, response: scrapy.http.Response): """ diff --git a/converter/spiders/zum_spider.py b/converter/spiders/zum_spider.py index 6f449df7..70786a7d 100644 --- a/converter/spiders/zum_spider.py +++ b/converter/spiders/zum_spider.py @@ -5,14 +5,18 @@ from converter.constants import Constants from converter.items import LicenseItem, LomTechnicalItem, ValuespaceItem, LomGeneralItem from converter.spiders.base_classes import MediaWikiBase +from converter.web_tools import WebEngine class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_spider" friendlyName = "ZUM-Unterrichten" url = "https://unterrichten.zum.de/" - version = "0.1.1" # last update: 2022-09-13 + version = "0.1.2" # last update: 2023-01-05 license = Constants.LICENSE_CC_BY_SA_40 + custom_settings = { + "WEB_TOOLS": WebEngine.Playwright + } def technical_item(self, response=None) -> LomTechnicalItem: """ @@ -41,4 +45,3 @@ def valuespace_item(self, response) -> ValuespaceItem: @scrapes discipline educationalContext intendedEndUserRole """ return self.getValuespaces(response).load_item() - From d94d515dfc675052867d227227be05f455383d1e Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 6 Jan 2023 11:00:57 +0100 Subject: [PATCH 203/590] fix:pyOpenSSL deps issues --- Dockerfile | 2 +- requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index c553d8a7..344e6d54 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.0-slim-buster +FROM python:3.10.9-slim-buster # ENV CRAWLER wirlernenonline_spider diff --git a/requirements.txt b/requirements.txt index abe6ab52..170d26fb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ html2text~=2020.1.16 scrapy-splash==0.8.0 python-dateutil==2.8.2 python-dotenv==0.20.0 -Scrapy==2.6.1 +Scrapy==2.6.3 requests==2.28.1 vobject==0.9.6.1 xmltodict~=0.12.0 @@ -25,4 +25,4 @@ six==1.16.0 certifi==2021.10.8 urllib3~=1.26.09 playwright==1.27.1 -pyOpenSSL==22.0.0 \ No newline at end of file +pyOpenSSL==22.1.0 \ No newline at end of file From 03617794ede346d15114806e96b8b1c0fb04329a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 9 Jan 2023 16:47:59 +0100 Subject: [PATCH 204/590] sodix_spider v0.2.8 - update license_is_oer()-method to reflect all possible versions of the CC licenses Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/sodix_spider.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 6fbca3a7..6e8fc8bd 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -36,7 +36,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.7" # last update: 2022-10-24 + version = "0.2.8" # last update: 2022-01-09 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -524,8 +524,12 @@ def license_is_oer(self, response) -> bool: if license_name in self.MAPPING_LICENSE_NAMES: license_internal_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) return license_internal_mapped in [ + Constants.LICENSE_CC_BY_20, + Constants.LICENSE_CC_BY_25, Constants.LICENSE_CC_BY_30, Constants.LICENSE_CC_BY_40, + Constants.LICENSE_CC_BY_SA_20, + Constants.LICENSE_CC_BY_SA_25, Constants.LICENSE_CC_BY_SA_30, Constants.LICENSE_CC_BY_SA_40, Constants.LICENSE_CC_ZERO_10, From 837fa8eea07fbd6a15434639a8b4524d3da459a2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 9 Jan 2023 20:57:47 +0100 Subject: [PATCH 205/590] zum_klexikon_spider v0.1.5 / mediawiki_base 'aplimit' (100 -> 500) - change: set mediawiki_base 'aplimit' to 500 -- MediaWikiBase is currently used for 3 ZUM Crawlers (zum_spider, zum_deutschlernen_spider, zum_klexikon_spider) and iterates through the ZUM API in unnecessarily small steps of 100 increments per HTTP Request -- ZUM Klexikon currently holds 2000+ objects in its API, which would cause a lot (20+) of unnecessary Requests in the beginning of a crawl, which get throttled and dropped (therefore we're missing whole pages of results and the crawl-process would finish prematurely) --- by increasing the 'aplimit'-parameter to 500, we're cutting down on unnecessary requests against the API (and hopefully don't end up losing pages of API responses due to "ResponseNeverReceived"-Exceptions in the future) - enable Scrapy's AUTOTHROTTLE setting for all ZUM crawlers Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/mediawiki_base.py | 5 ++--- converter/spiders/zum_deutschlernen.py | 6 ++++-- converter/spiders/zum_klexikon.py | 6 ++++-- converter/spiders/zum_spider.py | 6 ++++-- 4 files changed, 14 insertions(+), 9 deletions(-) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index 568a831e..d60e2f45 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -65,7 +65,6 @@ class PossibleTests: jmes_text = jmespath.compile('parse.text."*"') jmes_pageid = jmespath.compile('parse.pageid') jmes_revid = jmespath.compile('parse.revid') -log = logging.getLogger(__name__) def _api_url(url) -> str: @@ -95,7 +94,7 @@ class MediaWikiBase(LomBase, metaclass=SpiderBase): _query_params = _default_params | { 'action': 'query', 'list': 'allpages', - 'aplimit': '100', + 'aplimit': '500', # Values between 1 and 500 are allowed by MediaWiki APIs 'apfilterredir': 'nonredirects' # ignore redirection pages } @@ -187,7 +186,7 @@ def parse_page_data(self, response: scrapy.http.Response, extra=None): response.meta['item'] = data response.meta['item_extra'] = extra if error := data.get('error', None): - log.error(f""" + logging.error(f""" | Wiki Error: {error} | for request {response.request.body} | extra data: {extra} diff --git a/converter/spiders/zum_deutschlernen.py b/converter/spiders/zum_deutschlernen.py index af076a33..89143f5a 100644 --- a/converter/spiders/zum_deutschlernen.py +++ b/converter/spiders/zum_deutschlernen.py @@ -12,10 +12,12 @@ class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_deutschlernen_spider" friendlyName = "ZUM-Deutsch-Lernen" url = "https://deutsch-lernen.zum.de/" - version = "0.1.2" # last update: 2023-01-05 + version = "0.1.3" # last update: 2023-01-09 license = Constants.LICENSE_CC_BY_40 custom_settings = { - "WEB_TOOLS": WebEngine.Playwright + "WEB_TOOLS": WebEngine.Playwright, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True } def parse_page_query(self, response: scrapy.http.Response): diff --git a/converter/spiders/zum_klexikon.py b/converter/spiders/zum_klexikon.py index ff1a712c..b03cca83 100644 --- a/converter/spiders/zum_klexikon.py +++ b/converter/spiders/zum_klexikon.py @@ -15,10 +15,12 @@ class ZUMKlexikonSpider(MediaWikiBase, scrapy.Spider): name = "zum_klexikon_spider" friendlyName = "ZUM-Klexikon" url = "https://klexikon.zum.de/" - version = "0.1.4" # last update: 2023-01-05 + version = "0.1.5" # last update: 2023-01-09 license = Constants.LICENSE_CC_BY_SA_40 custom_settings = { - "WEB_TOOLS": WebEngine.Playwright + "WEB_TOOLS": WebEngine.Playwright, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True } def parse_page_query(self, response: scrapy.http.Response): diff --git a/converter/spiders/zum_spider.py b/converter/spiders/zum_spider.py index 70786a7d..651b7d0c 100644 --- a/converter/spiders/zum_spider.py +++ b/converter/spiders/zum_spider.py @@ -12,10 +12,12 @@ class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_spider" friendlyName = "ZUM-Unterrichten" url = "https://unterrichten.zum.de/" - version = "0.1.2" # last update: 2023-01-05 + version = "0.1.3" # last update: 2023-01-09 license = Constants.LICENSE_CC_BY_SA_40 custom_settings = { - "WEB_TOOLS": WebEngine.Playwright + "WEB_TOOLS": WebEngine.Playwright, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True } def technical_item(self, response=None) -> LomTechnicalItem: From 280054a7266d0851043a55b30c65800f88ddd198 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 10 Jan 2023 12:37:28 +0100 Subject: [PATCH 206/590] LisumPipeline (educationalContext-Mapping "fortbildung", typo) - fix: Mapping "fortbildung" was previously not possible due to the LISUM valuespace -- in accordance to the new LISUM valuespace, "fortbildung" maps to "professional development" -- typo: the debug-message that made me aware of this circumstance was missing a whitespace for better readability Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 0b164758..70ebd741 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -719,7 +719,7 @@ class LisumPipeline(BasicPipeline): "sekundarstufe_1": "lower secondary school", "sekundarstufe_2": "upper secondary school", "berufliche_bildung": "vocational education", - # "fortbildung": "", # does not exist in Lisum valuespace + "fortbildung": "professional development", "erwachsenenbildung": "continuing education", "foerderschule": "special education", # "fernunterricht": "" # does not exist in Lisum valuespace @@ -800,7 +800,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy educational_context_w3id_key) educational_context_lisum_keys.add(educational_context_w3id_key) case _: - logging.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key}" + logging.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key} " f"not found in mapping table.") educational_context_list = list(educational_context_lisum_keys) educational_context_list.sort() From 31226f1e83fb1de60e82a100581a3b118d3b018a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 10 Jan 2023 17:46:44 +0100 Subject: [PATCH 207/590] LisumPipeline Mapping (sodix_spider v0.2.9) - add: Mapping for eafCodes "260", "400", "560" and "660" - fix: Mapping for SODIX-LRT "RECHERCHE" - fix: LRT to Lisum Mapping for "demonstration" and "text" - version-bump sodix_spider Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 8 ++++++-- converter/spiders/sodix_spider.py | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 70ebd741..0190adb5 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -685,14 +685,17 @@ class LisumPipeline(BasicPipeline): "200": "C-FS", # Fremdsprachen "220": "C-GEO", # Geographie, "240": "C-GE", # Geschichte + "260": "B-GES", # Gesundheit -> Gesundheitsförderung "380": "C-MA", # Mathematik + "400": "B-BCM", # Medienerziehung / Medienpädagogik -> Basiscurriculum Medienbildung "420": "C-MU", # Musik "450": "C-Phil", # Philosophie "460": "C-Ph", # Physik "480": "C-PB", # Politische Bildung "510": "C-Psy", # Psychologie "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde - # ToDo: 560 -> "C-NW56-3-8" ? (Sexualerziehung) + "560": "B-SE", # Sexualerziehung + "660": "B-MB", # Verkehrserziehung -> "Mobilitätsbildung und Verkehrserziehung" "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater "20001": "C-EN", # Englisch @@ -730,7 +733,8 @@ class LisumPipeline(BasicPipeline): "audiovisual_medium": ["audio", "video"], "open_activity": "", # exists in 2 out of 60.000 items "broadcast": "audio", - "demonstration": "image", # "Veranschaulichung" + "demonstration": ["demonstration", "image"], # "Veranschaulichung" + "text": "teaching_aids", # "Arbeitsmaterial" } def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy.Item]: diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 6e8fc8bd..4ea7b947 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -36,7 +36,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.8" # last update: 2022-01-09 + version = "0.2.9" # last update: 2022-01-10 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -73,7 +73,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): "PROJECT": "project", "QUELLE": "reference", "RADIO": "broadcast", - "RECHERCHE": "enquiry-oriented activity", + "RECHERCHE": "enquiry_oriented_activity", "RESSOURCENTYP": "other", # "Anderer Ressourcentyp" "ROLLENSPIEL": "role play", "SIMULATION": "simulation", From f1f9a831e45e0e70c2aca32d3302d2d38f0b399d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 12 Jan 2023 16:58:27 +0100 Subject: [PATCH 208/590] change: edu_sharing_base API pagination (from 100 to 500 items / page) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/edu_sharing_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 1e1259ca..565c8637 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -28,7 +28,7 @@ def buildUrl(self, offset=0): self.apiUrl + self.searchUrl + self.mdsId - + "/ngsearch?contentType=FILES&maxItems=100&skipCount=" + + "/ngsearch?contentType=FILES&maxItems=500&skipCount=" + str(offset) + "&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-" ) From 2dff7ec59d276a68e29bc95e30155310fe4b8b58 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 12 Jan 2023 16:59:05 +0100 Subject: [PATCH 209/590] oeh_spider v0.1.2 - feat: import sources by using "ccm:oeh_publisher_combined"-values -- this is a hacky workaround until the same functionality via edu-sharing's "saved searches"-feature/API can be implemented - breaking: splitting of .env variable "OEH_IMPORT_SOURCES" by semicolon instead of commas -- this was necessary due to freetext entries in "ccm:oeh_publisher_combined", which have commas in them -- splitting this environment variable by semicolon should be more future-proof - style: code cleanup / formatting Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oeh_spider.py | 38 +++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 159d7e2b..4fe6128b 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -1,7 +1,7 @@ import logging -from .base_classes import EduSharingBase import converter.env as env +from .base_classes import EduSharingBase class OEHSpider(EduSharingBase): @@ -10,21 +10,21 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.1" + version = "0.1.2" # last update: 2023-01-12 mdsId = "mds_oeh" importWhitelist: [str] = None + def __init__(self, **kwargs): EduSharingBase.__init__(self, **kwargs) - importWhitelist = env.get("OEH_IMPORT_SOURCES", True, None) - if importWhitelist: - self.importWhitelist = importWhitelist.split(",") + import_whitelist = env.get("OEH_IMPORT_SOURCES", True, None) + if import_whitelist: + self.importWhitelist = import_whitelist.split(";") logging.info("Importing only whitelisted sources: {}".format(self.importWhitelist)) def getBase(self, response): base = EduSharingBase.getBase(self, response) return base - def getLOMTechnical(self, response): technical = EduSharingBase.getLOMTechnical(self, response) if "ccm:wwwurl" in response.meta["item"]["properties"]: @@ -32,18 +32,38 @@ def getLOMTechnical(self, response): technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) return technical - def shouldImport(self, response=None): if self.importWhitelist: source = "oeh" + publisher_combined = str() if "ccm:replicationsource" in response.meta["item"]["properties"]: source = response.meta["item"]["properties"]["ccm:replicationsource"] source = source[0] if source and source[0] else "oeh" - if source not in self.importWhitelist: + if "ccm:oeh_publisher_combined" in response.meta["item"]["properties"]: + publisher_combined = response.meta["item"]["properties"]["ccm:oeh_publisher_combined"] + publisher_combined = publisher_combined[0] if publisher_combined and publisher_combined[0] else "oeh" + whitelist_hit_source = False + whitelist_hit_publisher_combined = False + if source in self.importWhitelist: + whitelist_hit_source = True + if publisher_combined in self.importWhitelist: + whitelist_hit_publisher_combined = True + if whitelist_hit_source or whitelist_hit_publisher_combined: + # Item is detected on one whitelist (either 'ccm:replicationsource' or 'ccm:oeh_publisher_combined') + if whitelist_hit_source: + logging.info("Item {} was found on whitelist for 'ccm:replicationsource: {}".format( + response.meta["item"]["ref"]["id"], source)) + if whitelist_hit_publisher_combined: + logging.info("Item {} was found on whitelist for 'ccm:oeh_publisher_combined': {}".format( + response.meta["item"]["ref"]["id"], publisher_combined)) + elif whitelist_hit_source is False and whitelist_hit_publisher_combined is False: logging.info( - "Skipping item {} because it has no whitelisted source {}".format( + "Skipping item {} because it has no whitelisted 'ccm:replicationsource'-value: {}".format( response.meta["item"]["ref"]["id"], source) ) + logging.info( + "Skipping item {} because it has no whitelisted 'ccm:oeh_publisher_combined'-value: {}".format( + response.meta["item"]["ref"]["id"], publisher_combined)) return False if "ccm:collection_io_reference" in response.meta["item"]["aspects"]: logging.info( From e2226f71bfd1439140e84581d320ed9c6ea97851 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 12 Jan 2023 18:51:43 +0100 Subject: [PATCH 210/590] add: pyCharm debug/run configuration for oeh_spider - change: oeh_spider ignores robots.txt (stops one unnecessary API request from happening) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .run/oeh_spider.run.xml | 25 +++++++++++++++++++++++++ converter/spiders/oeh_spider.py | 3 +++ 2 files changed, 28 insertions(+) create mode 100644 .run/oeh_spider.run.xml diff --git a/.run/oeh_spider.run.xml b/.run/oeh_spider.run.xml new file mode 100644 index 00000000..17b8161a --- /dev/null +++ b/.run/oeh_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 4fe6128b..8a9ffe3f 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -13,6 +13,9 @@ class OEHSpider(EduSharingBase): version = "0.1.2" # last update: 2023-01-12 mdsId = "mds_oeh" importWhitelist: [str] = None + custom_settings = { + "ROBOTSTXT_OBEY": False + } def __init__(self, **kwargs): EduSharingBase.__init__(self, **kwargs) From c19d55e8350f71cfbbb5af02cd23fbf201c29c17 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 12 Jan 2023 21:35:07 +0100 Subject: [PATCH 211/590] "publisher_combined"-fix, code cleanup, fix typos Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oeh_spider.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 8a9ffe3f..00313cb3 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -44,7 +44,8 @@ def shouldImport(self, response=None): source = source[0] if source and source[0] else "oeh" if "ccm:oeh_publisher_combined" in response.meta["item"]["properties"]: publisher_combined = response.meta["item"]["properties"]["ccm:oeh_publisher_combined"] - publisher_combined = publisher_combined[0] if publisher_combined and publisher_combined[0] else "oeh" + if publisher_combined and publisher_combined[0]: + publisher_combined = publisher_combined[0] whitelist_hit_source = False whitelist_hit_publisher_combined = False if source in self.importWhitelist: @@ -52,14 +53,15 @@ def shouldImport(self, response=None): if publisher_combined in self.importWhitelist: whitelist_hit_publisher_combined = True if whitelist_hit_source or whitelist_hit_publisher_combined: - # Item is detected on one whitelist (either 'ccm:replicationsource' or 'ccm:oeh_publisher_combined') + # If item is detected in one whitelist (either 'ccm:replicationsource' or 'ccm:oeh_publisher_combined') if whitelist_hit_source: - logging.info("Item {} was found on whitelist for 'ccm:replicationsource: {}".format( + logging.info("Item {} was detected in whitelist for 'ccm:replicationsource: {}".format( response.meta["item"]["ref"]["id"], source)) if whitelist_hit_publisher_combined: - logging.info("Item {} was found on whitelist for 'ccm:oeh_publisher_combined': {}".format( + logging.info("Item {} was detected in whitelist for 'ccm:oeh_publisher_combined': {}".format( response.meta["item"]["ref"]["id"], publisher_combined)) elif whitelist_hit_source is False and whitelist_hit_publisher_combined is False: + # if the item is on neither whitelist, it will be skipped logging.info( "Skipping item {} because it has no whitelisted 'ccm:replicationsource'-value: {}".format( response.meta["item"]["ref"]["id"], source) From 3f64a84b32e4c210db5c29bd697ac7bf87a623e6 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 18 Jan 2023 12:49:14 +0100 Subject: [PATCH 212/590] fix:call configure_logging to get logging params when crawling via cmdline --- converter/settings.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/converter/settings.py b/converter/settings.py index 141cf018..377f6d13 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -1,6 +1,11 @@ # -*- coding: utf-8 -*- +import logging from pathlib import Path # python3 only + +import scrapy + import converter.env as env +from scrapy.utils.log import configure_logging # Scrapy settings for project # @@ -20,6 +25,12 @@ LOG_LEVEL = env.get("LOG_LEVEL", default="INFO") LOG_FORMATTER = "converter.custom_log_formatter.CustomLogFormatter" +configure_logging(settings = { + "LOG_FILE": LOG_FILE, + "LOG_LEVEL": LOG_LEVEL, + "LOG_FORMATTER": LOG_FORMATTER +}) + # Default behaviour for regular crawlers of non-license-controlled content # When set True, every item will have GROUP_EVERYONE attached in edu-sharing # When set False, no permissions are set at all, which can be helpful if you want to control them later (e.g. via inherition) From db895b6c5c9feaf5967049feded4db482bf796e5 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 18 Jan 2023 14:17:46 +0100 Subject: [PATCH 213/590] feat:support for saved searches for edu_sharing_base spider --- converter/.env.example | 3 ++ .../spiders/base_classes/edu_sharing_base.py | 32 +++++++++++++++++-- converter/spiders/oeh_spider.py | 9 +++--- 3 files changed, 38 insertions(+), 6 deletions(-) diff --git a/converter/.env.example b/converter/.env.example index a54e1d44..d12ef8ea 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -49,6 +49,9 @@ YOUTUBE_API_KEY="" # only for oeh spider: select the sources you want to fetch from oeh (comma seperated) # OEH_IMPORT_SOURCES = 'oeh,wirlernenonline_spider,serlo_spider,youtube_spider' +# only for spiders based on edu_sharing: Use a saved search (object must be published to everyone in edu-sharing) to query from +# EDU_SHARING_IMPORT_SEARCH_ID = "" + # Sodix Spider login data # SODIX_SPIDER_USERNAME = "" # SODIX_SPIDER_PASSWORD = "" \ No newline at end of file diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 565c8637..7506a9c5 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -9,34 +9,61 @@ import json import vobject from converter.es_connector import EduSharingConstants +import converter.env as env class EduSharingBase(Spider, LomBase): + # max items per request, recommended value between 100-1000 + maxItems = 500 friendlyName = "Edu-Sharing repository spider" # the location of the edu-sharing rest api apiUrl = "http://localhost/edu-sharing/rest/" + savedSearchUrl = "search/v1/queries/load/" searchUrl = "search/v1/queriesV2/-home-/" searchToken = "*" # the mds to use for the search request mdsId = "-default-" + # searchId to import from, if empty, whole repository will be fetched + importSearchId = '' def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) + importSearchId = env.get("EDU_SHARING_IMPORT_SEARCH_ID", True, None) + + if importSearchId: + self.importSearchId = importSearchId + logging.info("Importing only data based on the search query: {}".format(self.importSearchId)) def buildUrl(self, offset=0): + if self.importSearchId: + return ( + self.apiUrl + + self.savedSearchUrl + + self.importSearchId + + "?contentType=FILES&propertyFilter=-all-" + + "&maxItems=" + str(self.maxItems) + "&skipCount=" + str(offset) + ) return ( self.apiUrl + self.searchUrl + self.mdsId - + "/ngsearch?contentType=FILES&maxItems=500&skipCount=" + + "/ngsearch?contentType=FILES&propertyFilter=-all-" + "&maxItems=" + str(self.maxItems) + "&skipCount=" + str(offset) - + "&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-" + + "&sortProperties=cm%3Acreated&sortAscending=true" ) def search(self, offset=0): criteria = [] if "queriesV2" in self.searchUrl: criteria = [({"property": "ngsearchword", "values": [self.searchToken]} )] + data = {} + if self.importSearchId: + return JsonRequest( + url=self.buildUrl() + ) + + # criterias only required for regular endpoint return JsonRequest( url=self.buildUrl(offset), data={ @@ -45,6 +72,7 @@ def search(self, offset=0): callback=self.parse, ) + def getProperty(self, name, response): return ( response.meta["item"]["properties"][name] diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 00313cb3..2729d279 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -19,10 +19,11 @@ class OEHSpider(EduSharingBase): def __init__(self, **kwargs): EduSharingBase.__init__(self, **kwargs) - import_whitelist = env.get("OEH_IMPORT_SOURCES", True, None) - if import_whitelist: - self.importWhitelist = import_whitelist.split(";") - logging.info("Importing only whitelisted sources: {}".format(self.importWhitelist)) + if not EduSharingBase.importSearchId: + import_whitelist = env.get("OEH_IMPORT_SOURCES", True, None) + if import_whitelist: + self.importWhitelist = import_whitelist.split(";") + logging.info("Importing only whitelisted sources: {}".format(self.importWhitelist)) def getBase(self, response): base = EduSharingBase.getBase(self, response) From 908a6415cdf09d620dedb626bd9b42825d57ae1e Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 18 Jan 2023 18:05:25 +0100 Subject: [PATCH 214/590] fix:edu sharing base crawler do only crawl content for rep. source links --- converter/spiders/base_classes/edu_sharing_base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 7506a9c5..07581a58 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -99,7 +99,10 @@ def getBase(self, response): base.replace_value( "origin", self.getProperty("ccm:replicationsource", response) ) - if self.getProperty("ccm:replicationsource", response): + if ( + self.getProperty("ccm:replicationsource", response) and + self.getProperty("ccm:wwwurl", response) + ): # imported objects usually have the content as binary text # TODO: Sometimes, edu-sharing redirects if no local content is found, and this should be html-parsed if response.meta["item"]["downloadUrl"]: From 783014e2ecbced2fb2c6156748f5b3951eb922d8 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Wed, 18 Jan 2023 18:06:27 +0100 Subject: [PATCH 215/590] fix:pagination was broken for saved search es spider --- converter/spiders/base_classes/edu_sharing_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 07581a58..ec1bb949 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -60,7 +60,7 @@ def search(self, offset=0): data = {} if self.importSearchId: return JsonRequest( - url=self.buildUrl() + url=self.buildUrl(offset) ) # criterias only required for regular endpoint From 52b5b79047dd0de6eed826c227420dff99436b67 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 19 Jan 2023 13:24:26 +0100 Subject: [PATCH 216/590] fix:es base class reduce max items to prevent session timeouts --- converter/spiders/base_classes/edu_sharing_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index ec1bb949..8bd328c5 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -14,7 +14,7 @@ class EduSharingBase(Spider, LomBase): # max items per request, recommended value between 100-1000 - maxItems = 500 + maxItems = 200 friendlyName = "Edu-Sharing repository spider" # the location of the edu-sharing rest api apiUrl = "http://localhost/edu-sharing/rest/" From 0ac876965405d2a29714268e96352d63386f4700 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 20 Jan 2023 09:44:29 +0100 Subject: [PATCH 217/590] fix:oeh spider map oeh specific publisher combined field to generic contributor publisher --- converter/spiders/oeh_spider.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 2729d279..af3cf4e4 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -10,7 +10,7 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.2" # last update: 2023-01-12 + version = "0.1.4" # last update: 2023-01-20 mdsId = "mds_oeh" importWhitelist: [str] = None custom_settings = { @@ -36,6 +36,15 @@ def getLOMTechnical(self, response): technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) return technical + + def getLOMLifecycle(self, response): + lifecycle = EduSharingBase.getLOMLifecycle(self, response) + if "ccm:oeh_publisher_combined" in response.meta["item"]["properties"]: + lifecycle.add_value("role", "publisher") + lifecycle.add_value("organization", response.meta["item"]["properties"]["ccm:oeh_publisher_combined"][0]) + return lifecycle + + def shouldImport(self, response=None): if self.importWhitelist: source = "oeh" From a378745f7a3fe820c59a275986a4f13a16e572d7 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 20 Jan 2023 09:52:12 +0100 Subject: [PATCH 218/590] fix:es_connector detect+map known internal licenses --- converter/es_connector.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 7b6cdd40..6d69d078 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -295,6 +295,8 @@ def mapLicense(self, spaces, license): match license["internal"]: case Constants.LICENSE_COPYRIGHT_LAW: spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" + case "CC_BY" | "CC_BY_SA" | "CC_BY_NC" | "CC_BY_ND": + spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" if "description" in license: From 35c435a79f1d1617efe0d2517671ae547386d0bd Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 20 Jan 2023 09:55:10 +0100 Subject: [PATCH 219/590] fix:es_connector detect+map known internal licenses --- converter/es_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 6d69d078..6916378d 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -295,7 +295,7 @@ def mapLicense(self, spaces, license): match license["internal"]: case Constants.LICENSE_COPYRIGHT_LAW: spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" - case "CC_BY" | "CC_BY_SA" | "CC_BY_NC" | "CC_BY_ND": + case "CC_BY" | "CC_BY_SA" | "CC_BY_NC" | "CC_BY_ND" | "CC_0" | "PDM": spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" From 4ea51e586bb13027cf00af3ca2641c97586ca577 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Fri, 20 Jan 2023 10:18:40 +0100 Subject: [PATCH 220/590] fix:es base use yield for multiple lifecycle objects, oeh spider specific oeh lifecycle handling --- .../spiders/base_classes/edu_sharing_base.py | 2 +- converter/spiders/oeh_spider.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 8bd328c5..98aa2430 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -176,7 +176,7 @@ def getLOMLifecycle(self, response): lifecycle.add_value("role", role) lifecycle.add_value("firstName", given) lifecycle.add_value("lastName", family) - return lifecycle + yield lifecycle def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index af3cf4e4..27ac2129 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -2,6 +2,7 @@ import converter.env as env from .base_classes import EduSharingBase +from ..items import LomLifecycleItemloader class OEHSpider(EduSharingBase): @@ -10,7 +11,7 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.4" # last update: 2023-01-20 + version = "0.1.5" # last update: 2023-01-20 mdsId = "mds_oeh" importWhitelist: [str] = None custom_settings = { @@ -38,11 +39,17 @@ def getLOMTechnical(self, response): def getLOMLifecycle(self, response): - lifecycle = EduSharingBase.getLOMLifecycle(self, response) - if "ccm:oeh_publisher_combined" in response.meta["item"]["properties"]: + has_publisher = False + for lifecycle in EduSharingBase.getLOMLifecycle(self, response): + if lifecycle.load_item()["role"] == "publisher": + has_publisher = True + yield lifecycle + + if not has_publisher and "ccm:oeh_publisher_combined" in response.meta["item"]["properties"]: + lifecycle = LomLifecycleItemloader(response=response) lifecycle.add_value("role", "publisher") lifecycle.add_value("organization", response.meta["item"]["properties"]["ccm:oeh_publisher_combined"][0]) - return lifecycle + yield lifecycle def shouldImport(self, response=None): From 68b58e8f68692fea06488e1e160bcfbb7b0a69e3 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 23 Jan 2023 09:23:59 +0100 Subject: [PATCH 221/590] fix:es base sort by creation also via query search --- converter/spiders/base_classes/edu_sharing_base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 98aa2430..d7528708 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -42,6 +42,7 @@ def buildUrl(self, offset=0): + self.importSearchId + "?contentType=FILES&propertyFilter=-all-" + "&maxItems=" + str(self.maxItems) + "&skipCount=" + str(offset) + + "&sortProperties=cm%3Acreated&sortAscending=true" ) return ( self.apiUrl From 208514d4318bbfcf6797a28e6195f3097c0536f0 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 23 Jan 2023 09:33:40 +0100 Subject: [PATCH 222/590] fix:add debug logging es connector mds --- converter/es_connector.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 6916378d..e92e7e51 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -460,6 +460,10 @@ def transformItem(self, uuid, spider, item): if mdsId != "default": spaces["cm:edu_metadataset"] = mdsId spaces["cm:edu_forcemetadataset"] = "true" + logging.debug("Using metadataset " + mdsId) + else: + logging.debug("Using default metadataset") + for key in spaces: if type(spaces[key]) is tuple: spaces[key] = list([x for y in spaces[key] for x in y]) From 557b1503129b033a9c550cd7c75b071078b3093d Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Mon, 23 Jan 2023 10:08:57 +0100 Subject: [PATCH 223/590] logs:custom pipeline --- converter/settings.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/settings.py b/converter/settings.py index 377f6d13..ccd6cc80 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -135,6 +135,7 @@ ADDITIONAL_PIPELINES = env.get("CUSTOM_PIPELINES", True) if ADDITIONAL_PIPELINES: for pipe in map(lambda p: p.split(":"), ADDITIONAL_PIPELINES.split(",")): + logging.info("Enabling custom pipeline: " + pipe[0]) ITEM_PIPELINES[pipe[0]] = int(pipe[1]) # Enable and configure the AutoThrottle extension (disabled by default) From 87321b7304b23bc856e5cc94168d860d68b404a6 Mon Sep 17 00:00:00 2001 From: ralfnellescap Date: Tue, 24 Jan 2023 17:40:11 +0100 Subject: [PATCH 224/590] [OPS-3824] add trivy action --- .github/workflows/trivy-cron.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .github/workflows/trivy-cron.yaml diff --git a/.github/workflows/trivy-cron.yaml b/.github/workflows/trivy-cron.yaml new file mode 100644 index 00000000..21dd2a32 --- /dev/null +++ b/.github/workflows/trivy-cron.yaml @@ -0,0 +1,18 @@ +--- +name: Docker Image Trivy Image Vulnerability Scan Cron Job +on: + push: + branch: + - 'OPS-3814-add-Image-Vulnerability-Scanning' + schedule: + # Runs "at 2 a.m. past every day" (see https://crontab.guru) + # - cron: '0 2 * * *' + - cron: '2 13 * * *' + +# Template Single Image Repro GHA +jobs: + trivy_image_scan_cron: + uses: hpi-schul-cloud/infra-tools/.github/workflows/trivy-scan.yaml@OPS-3814-add-Image-Vulnerability-Scanning + # uses: hpi-schul-cloud/infra-tools/.github/workflows/trivy-scan.yaml@master + with: + image-ref: 'ghcr.io/hpi-schul-cloud/oeh-search-etl:latest' From 6e9776255d1d6e96def353b57928aa05a25ce3dc Mon Sep 17 00:00:00 2001 From: ralfnellescap Date: Wed, 25 Jan 2023 13:21:56 +0100 Subject: [PATCH 225/590] [OPS-3814] add workflow permissions --- .github/workflows/trivy-cron.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/trivy-cron.yaml b/.github/workflows/trivy-cron.yaml index 21dd2a32..20108500 100644 --- a/.github/workflows/trivy-cron.yaml +++ b/.github/workflows/trivy-cron.yaml @@ -9,6 +9,13 @@ on: # - cron: '0 2 * * *' - cron: '2 13 * * *' +permissions: + # required for all workflows + security-events: write + # only required for workflows in private repositories + actions: read + contents: read + # Template Single Image Repro GHA jobs: trivy_image_scan_cron: From 57468755784278bee4b0a1b28302ebeaddaba107 Mon Sep 17 00:00:00 2001 From: ralfnellescap Date: Thu, 26 Jan 2023 08:46:06 +0100 Subject: [PATCH 226/590] [OPS-3814] fix trivy workflow permissions --- .github/workflows/trivy-cron.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/trivy-cron.yaml b/.github/workflows/trivy-cron.yaml index 20108500..726b9b91 100644 --- a/.github/workflows/trivy-cron.yaml +++ b/.github/workflows/trivy-cron.yaml @@ -6,10 +6,8 @@ on: - 'OPS-3814-add-Image-Vulnerability-Scanning' schedule: # Runs "at 2 a.m. past every day" (see https://crontab.guru) - # - cron: '0 2 * * *' - - cron: '2 13 * * *' - -permissions: + - cron: '0 2 * * *' + permissions: # required for all workflows security-events: write # only required for workflows in private repositories From abba2b302f384fd82735469d000c882fea233253 Mon Sep 17 00:00:00 2001 From: ralfnellescap Date: Thu, 26 Jan 2023 12:04:15 +0100 Subject: [PATCH 227/590] [OPS-3814] fix workflow --- .github/workflows/trivy-cron.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/trivy-cron.yaml b/.github/workflows/trivy-cron.yaml index 726b9b91..372ec68e 100644 --- a/.github/workflows/trivy-cron.yaml +++ b/.github/workflows/trivy-cron.yaml @@ -2,12 +2,12 @@ name: Docker Image Trivy Image Vulnerability Scan Cron Job on: push: - branch: + branches: - 'OPS-3814-add-Image-Vulnerability-Scanning' schedule: # Runs "at 2 a.m. past every day" (see https://crontab.guru) - cron: '0 2 * * *' - permissions: +permissions: # required for all workflows security-events: write # only required for workflows in private repositories From 147a6b0158a539463e0e599647dc850642d6a756 Mon Sep 17 00:00:00 2001 From: ralfnellescap Date: Thu, 26 Jan 2023 17:41:54 +0100 Subject: [PATCH 228/590] [OPS-3814] tidy for merge --- .github/workflows/trivy-cron.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/trivy-cron.yaml b/.github/workflows/trivy-cron.yaml index 372ec68e..89f42ad1 100644 --- a/.github/workflows/trivy-cron.yaml +++ b/.github/workflows/trivy-cron.yaml @@ -1,9 +1,6 @@ --- name: Docker Image Trivy Image Vulnerability Scan Cron Job on: - push: - branches: - - 'OPS-3814-add-Image-Vulnerability-Scanning' schedule: # Runs "at 2 a.m. past every day" (see https://crontab.guru) - cron: '0 2 * * *' @@ -17,7 +14,6 @@ permissions: # Template Single Image Repro GHA jobs: trivy_image_scan_cron: - uses: hpi-schul-cloud/infra-tools/.github/workflows/trivy-scan.yaml@OPS-3814-add-Image-Vulnerability-Scanning - # uses: hpi-schul-cloud/infra-tools/.github/workflows/trivy-scan.yaml@master + uses: hpi-schul-cloud/infra-tools/.github/workflows/trivy-scan.yaml@master with: image-ref: 'ghcr.io/hpi-schul-cloud/oeh-search-etl:latest' From 1f7c9278e0c4637c19c58d3d7d53c2ed6060f9e4 Mon Sep 17 00:00:00 2001 From: ralfnellescap Date: Tue, 31 Jan 2023 09:39:02 +0100 Subject: [PATCH 229/590] [OPS-3814] tidy trivy workflow --- .github/workflows/trivy-cron.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/trivy-cron.yaml b/.github/workflows/trivy-cron.yaml index 89f42ad1..561d7dc1 100644 --- a/.github/workflows/trivy-cron.yaml +++ b/.github/workflows/trivy-cron.yaml @@ -2,16 +2,12 @@ name: Docker Image Trivy Image Vulnerability Scan Cron Job on: schedule: - # Runs "at 2 a.m. past every day" (see https://crontab.guru) - cron: '0 2 * * *' permissions: - # required for all workflows + # security-events required for all workflows; action, contents only required for workflows in private repositories security-events: write - # only required for workflows in private repositories actions: read contents: read - -# Template Single Image Repro GHA jobs: trivy_image_scan_cron: uses: hpi-schul-cloud/infra-tools/.github/workflows/trivy-scan.yaml@master From 4708f99175e04084a74cfbdd8dc79daf9cbe01ec Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 20 Jan 2023 12:39:18 +0100 Subject: [PATCH 230/590] fix: missing edu-sharing properties - feat: multiple lifecycle entities per role -- change how getLOMLifecycle iterates through the 'ccm:lifecycle...'-properties - feat: vCard parsing of additional lifecycle data -- lifecycles values for 'email', 'organization', 'url' and 'uuid' are parsed from the edu-sharing vCard and written to the corresponding lifecycle field - fix: "valuespaces.conditionsOfAccess" fallback (from "ccm:oeh_quality_login") - WIP: identify ToDos for potentially missing property fields (squashed) --- .../spiders/base_classes/edu_sharing_base.py | 106 +++++++++++++++--- 1 file changed, 93 insertions(+), 13 deletions(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index d7528708..bf007825 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -10,6 +10,7 @@ import vobject from converter.es_connector import EduSharingConstants import converter.env as env +from ...items import LomLifecycleItemloader class EduSharingBase(Spider, LomBase): @@ -100,6 +101,9 @@ def getBase(self, response): base.replace_value( "origin", self.getProperty("ccm:replicationsource", response) ) + # ToDo: base.origin is used for creating subfolders in "SYNC_OBJ//..." + # - currently only subfolders for learning objects that were gathered by crawlers are created? + # - base.origin could be set for (safe) values from 'ccm:oeh_publisher_combined' as well if ( self.getProperty("ccm:replicationsource", response) and self.getProperty("ccm:wwwurl", response) @@ -151,6 +155,9 @@ def getLOMGeneral(self, response): general.add_value( "description", self.getProperty("cclom:general_description", response) ) + general.add_value('identifier', self.getProperty("cclom:general_identifier", response)) + general.add_value('language', self.getProperty("cclom:general_language", response)) + general.add_value('aggregationLevel', self.getProperty("cclom:aggregationLevel", response)) return general def getLOMEducational(self, response): @@ -162,22 +169,64 @@ def getLOMEducational(self, response): range.add_value("fromRange", tar_from) range.add_value("toRange", tar_to) educational.add_value("typicalAgeRange", range.load_item()) + educational.add_value("typicalLearningTime", self.getProperty("cclom:typicallearningtime", response)) return educational def getLOMLifecycle(self, response): - lifecycle = LomBase.getLOMLifecycle(self, response) - for role in EduSharingConstants.LIFECYCLE_ROLES_MAPPING.keys(): - entry = self.getProperty("ccm:lifecyclecontributer_" + role, response) - if entry and entry[0]: - # TODO: we currently only support one author per role - vcard = vobject.readOne(entry[0]) - if hasattr(vcard, "n"): - given = vcard.n.value.given - family = vcard.n.value.family - lifecycle.add_value("role", role) - lifecycle.add_value("firstName", given) - lifecycle.add_value("lastName", family) - yield lifecycle + for role, edu_sharing_lifecycle_property in EduSharingConstants.LIFECYCLE_ROLES_MAPPING.items(): + # there can be multiple authors or contributors per role + vcard_list: list = self.getProperty(edu_sharing_lifecycle_property, response) + # vCards are returned by the edu-sharing API as a list of strings + if vcard_list: + # making sure that we only create lifecycle items when there's actual vCards to parse + for vcard_entry in vcard_list: + # each vCard-String needs its own LOM Lifecycle Item + lifecycle: LomLifecycleItemloader = LomBase.getLOMLifecycle(self, response) + if vcard_entry: + yield from self.get_lifecycle_from_vcard_string(lifecycle, role, vcard_entry) + + @staticmethod + def get_lifecycle_from_vcard_string(lifecycle: LomLifecycleItemloader, role, vcard_entry: str): + """ + This method parses a vCard from a string and saves its values to LifecycleItem's fields if possible. + """ + vcard: vobject.base.Component = vobject.readOne(vcard_entry) + if hasattr(vcard, "n"): + given = vcard.n.value.given + family = vcard.n.value.family + lifecycle.add_value("role", role) + lifecycle.add_value("firstName", given) + lifecycle.add_value("lastName", family) + if hasattr(vcard, "email"): + # ToDo: recognize multiple emails + vcard_email: str = vcard.email.value + lifecycle.add_value("email", vcard_email) + if hasattr(vcard, "url"): + # ToDo: recognize multiple URLs + vcard_url: str = vcard.url.value + lifecycle.add_value("url", vcard_url) + if hasattr(vcard, "org"): + vcard_org: str = vcard.org.value + lifecycle.add_value("organization", vcard_org) + if hasattr(vcard, "x-es-lom-contribute-date"): + # copy the contribution date only if available + vcard_es_date: list = vcard.contents.get("x-es-lom-contribute-date") # edu-sharing contributor date + # has its own vCard extension. By calling vcard.contents.get() we'll receive: + # a list of + if vcard_es_date: + # -> we only need the date itself + vcard_es_date_value: str = vcard_es_date[0].value + if vcard_es_date_value: + # some (malformed) vCards with the 'x-es-lom-contribute-date'-key look like this: + # which means they are missing the actual date itself. + # By checking if the string is True-ish, empty strings '' won't be saved to Lifecycle + lifecycle.add_value("date", vcard_es_date_value) + # ToDo: this might be a good place for an 'else'-statement to catch malformed vCards + # by their node-ID + if hasattr(vcard, "uid"): + vcard_uid: str = vcard.uid.value + lifecycle.add_value("uuid", vcard_uid) + yield lifecycle def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) @@ -192,11 +241,36 @@ def getLicense(self, response): license.add_value( "internal", self.getProperty("ccm:commonlicense_key", response) ) + # ToDo: setting 'internal' here like this might be problematic in regards to CC-Versions: + # need to double-check if this might (wrongfully) turn CC x.0 licenses into other versions + # - "ccm:commonlicense_cc_version" license.add_value("author", self.getProperty("ccm:author_freetext", response)) + license.add_value("description", self.getProperty("cclom:rights_description", response)) + license.add_value("expirationDate", self.getProperty("ccm:license_to", response)) return license def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value("accessibilitySummary", self.getProperty("ccm:accessibilitySummary", response)) + if self.getProperty("ccm:conditionsOfAccess", response): + valuespaces.add_value("conditionsOfAccess", self.getProperty("ccm:conditionsOfAccess", response)) + elif self.getProperty("ccm:oeh_quality_login", response): + # this fallback will lose metadata in the long run since the "conditionsOfAccess"-Vocab has 3 values, while + # "ccm:oeh_quality_login" returns only binary string values: + # - "0": login required + # - "1": no login required + oeh_quality_login_value: list = self.getProperty("ccm:oeh_quality_login", response) + if oeh_quality_login_value: + oeh_quality_login_value: str = oeh_quality_login_value[0] + match oeh_quality_login_value: + case "1": + valuespaces.add_value("conditionsOfAccess", "no_login") + case "2": + valuespaces.add_value("conditionsOfAccess", "login") + case _: + logging.warning(f"edu-sharing property 'ccm:oeh_quality_login' returned an unexpected value: " + f"{oeh_quality_login_value} for node-ID {response.meta['item']['ref']['id']}") + valuespaces.add_value("dataProtectionConformity", self.getProperty("ccm:dataProtectionConformity", response)) valuespaces.add_value("discipline", self.getProperty("ccm:taxonid", response)) valuespaces.add_value( "intendedEndUserRole", @@ -205,10 +279,16 @@ def getValuespaces(self, response): valuespaces.add_value( "educationalContext", self.getProperty("ccm:educationalcontext", response) ) + valuespaces.add_value("fskRating", self.getProperty("ccm:fskRating", response)) valuespaces.add_value( "learningResourceType", self.getProperty("ccm:educationallearningresourcetype", response), ) + valuespaces.add_value('new_lrt', self.getProperty("ccm:oeh_lrt", response)) + valuespaces.add_value("oer", self.getProperty("ccm:license_oer", response)) + valuespaces.add_value("price", self.getProperty("ccm:price", response)) + # ToDo: confirm if 'sourceContentType' & 'toolCategory' should be used at all, + # since they are already obsolete in WLO crawlers (might be obsolete here as well) valuespaces.add_value( "sourceContentType", self.getProperty("ccm:sourceContentType", response) ) From 78fa88dda0f3cdf38f624394725d3a5987e7a528 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Jan 2023 13:35:47 +0100 Subject: [PATCH 231/590] fix: OER-compatible license versions --- converter/pipelines.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 0190adb5..973adcda 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -175,14 +175,20 @@ def process_item(self, raw_item, spider): break if "url" in item["license"] and "oer" not in item["license"]: - if ( - item["license"]["url"] == Constants.LICENSE_CC_BY_40 - or item["license"]["url"] == Constants.LICENSE_CC_BY_30 - or item["license"]["url"] == Constants.LICENSE_CC_BY_SA_30 - or item["license"]["url"] == Constants.LICENSE_CC_BY_SA_40 - or item["license"]["url"] == Constants.LICENSE_CC_ZERO_10 - ): - item["license"]["oer"] = OerType.ALL + match item["license"]["url"]: + case Constants.LICENSE_CC_BY_20 | \ + Constants.LICENSE_CC_BY_25 | \ + Constants.LICENSE_CC_BY_30 | \ + Constants.LICENSE_CC_BY_40 | \ + Constants.LICENSE_CC_BY_SA_20 | \ + Constants.LICENSE_CC_BY_SA_25 | \ + Constants.LICENSE_CC_BY_SA_30 | \ + Constants.LICENSE_CC_BY_SA_40 | \ + Constants.LICENSE_CC_ZERO_10: + item["license"]["oer"] = OerType.ALL + case _: + # ToDo: log default case if not too spammy + pass if "internal" in item["license"] and "oer" not in item["license"]: internal = item["license"]["internal"].lower() From d935a1f351654de470143e9094b8ef29bc19bdb3 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 9 Nov 2022 13:48:32 +0100 Subject: [PATCH 232/590] update sample_spider_alternative - docs: 'valuespaces.discpline' explanations to use 'skos:Concept' keys in crawlers - docs: 'lifecycle.role' values for "metadata_contributor" / "metadata_provider" and "unknown" to 'LomLifecycleItemloader' docs --- converter/spiders/sample_spider_alternative.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 2c0f1f3a..5215c8e9 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -21,8 +21,10 @@ class SampleSpiderAlternative(CrawlSpider, LomBase): start_urls = ["https://edu-sharing.com"] # starting point of your crawler, e.g. a sitemap, index, rss-feed etc. version = "0.0.1" # this is used for timestamping your crawler results (if a source changes its layout/data, # make sure to increment this value to force a clear distinction between old and new crawler results) - WEB_TOOLS = WebEngine.Playwright # OPTIONAL: this attribute controls which tool is used for taking Screenshots - # you can skip this attribute altogether if you want to use the default Settings (Splash) + custom_settings = { + 'WEB_TOOLS': WebEngine.Playwright # OPTIONAL: this attribute controls which tool is used for taking Screenshots + # you can skip this attribute altogether if you want to use the default Settings (Splash) + } def getId(self, response=None) -> str: # You have two choices here: @@ -131,8 +133,10 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - organization optional # - email optional # - uuid optional - lifecycle.add_value('role', 'author') # supported roles: "author" / "editor" / "publisher" - # for available roles mapping, please take a look at converter/es_connector.py + lifecycle.add_value('role', 'author') + # supported roles: + # "author" / "editor" / "publisher" / "metadata_contributor" / "metadata_provider" / "unknown" + # for further available role mappings, please take a look at converter/es_connector.py educational = LomEducationalItemLoader() # TODO: fill "educational"-keys with values for @@ -159,9 +163,12 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: vs = ValuespaceItemLoader() # for possible values, either consult https://vocabs.openeduhub.de # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs + # wherever possible, please use the skos:Concept instead of literal strings + # (since they are more stable over a longer period of time) # TODO: fill "valuespaces"-keys with values for # - discipline recommended # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) + # (please set discipline-values by their unique vocab-identifier: e.g. '060' for "Art education") # - intendedEndUserRole recommended # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) # - learningResourceType recommended From 14904e28867e76705703276e0752a131643a16fa Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 27 Jan 2023 18:51:50 +0100 Subject: [PATCH 233/590] style: logging for unexpected 'technical.duration'-values - code cleanup / improve readability --- converter/es_connector.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index e92e7e51..72398daa 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -359,6 +359,8 @@ def transformItem(self, uuid, spider, item): # edusharing requires milliseconds duration = int(float(duration) * 1000) except: + logging.debug(f"The supplied 'technical.duration'-value {duration} could not be converted from " + f"seconds to milliseconds. ('cclom:duration' expects ms)") pass spaces["cclom:duration"] = duration @@ -403,7 +405,7 @@ def transformItem(self, uuid, spider, item): spaces["ccm:published_date"] = date.isoformat() if organization: vcard.add("org") - # fix a bug of splitted org values + # fix a bug of split org values vcard.org.behavior = VCardBehavior.defaultBehavior vcard.org.value = organization vcard.add("url").value = url @@ -416,20 +418,20 @@ def transformItem(self, uuid, spider, item): spaces[mapping] = [vcard.serialize()] valuespaceMapping = { + "accessibilitySummary": "ccm:accessibilitySummary", + "conditionsOfAccess": "ccm:conditionsOfAccess", + "containsAdvertisement": "ccm:containsAdvertisement", + "dataProtectionConformity": "ccm:dataProtectionConformity", "discipline": "ccm:taxonid", - "intendedEndUserRole": "ccm:educationalintendedenduserrole", "educationalContext": "ccm:educationalcontext", + "fskRating": "ccm:fskRating", + "intendedEndUserRole": "ccm:educationalintendedenduserrole", "learningResourceType": "ccm:educationallearningresourcetype", "new_lrt": "ccm:oeh_lrt", + "oer": "ccm:license_oer", + "price": "ccm:price", "sourceContentType": "ccm:sourceContentType", "toolCategory": "ccm:toolCategory", - "conditionsOfAccess": "ccm:conditionsOfAccess", - "containsAdvertisement": "ccm:containsAdvertisement", - "price": "ccm:price", - "accessibilitySummary": "ccm:accessibilitySummary", - "dataProtectionConformity": "ccm:dataProtectionConformity", - "fskRating": "ccm:fskRating", - "oer": "ccm:license_oer", } for key in item["valuespaces"]: spaces[valuespaceMapping[key]] = item["valuespaces"][key] From 535eb415c536c67e2d07c02f11c7a61c4aeed865 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 31 Jan 2023 14:11:06 +0100 Subject: [PATCH 234/590] docs: items.py attributes and edu-sharing fields - docs: try to connect our Scrapy item model with their respective edu-sharing fields to improve future maintainability - style: change DocStrings (according to PEP257) to the triple-quoted format Thanks: - the foundational work for this documentation was laid during the OER hackathon in July 2022 (see: PR #55) by @MRuecklCC -- (on this note: thank you for taking the time to do the necessary 'detective'-work and document these metadata-fields with me, Martin!) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/items.py | 315 ++++++++++++++++++++++++++++++++------------- 1 file changed, 229 insertions(+), 86 deletions(-) diff --git a/converter/items.py b/converter/items.py index d92164c1..af151468 100644 --- a/converter/items.py +++ b/converter/items.py @@ -32,17 +32,46 @@ class MutlilangItem(Item): class LomGeneralItem(Item): + """ + General requirements: + - 'description' + - 'keyword' + - 'title' + + (If neither 'description' nor 'keyword' are provided, the whole item gets dropped by the pipeline.) + """ + aggregationLevel = Field() + """Corresponding edu-sharing property: 'cclom:aggregationlevel'""" + coverage = Field() + # ToDo: 'coverage' is currently not used; no equivalent edu-sharing property + description = Field() + """Corresponding edu-sharing property: 'cclom:general_description'""" identifier = Field(output_processor=JoinMultivalues()) - title = Field() - language = Field() + """Corresponding edu-sharing property: 'cclom:general_identifier' """ keyword = Field(output_processor=JoinMultivalues()) - coverage = Field() + """Corresponding edu-sharing property: 'cclom:general_keyword'""" + language = Field() + """Corresponding edu-sharing property: 'cclom:general_language'""" structure = Field() - aggregationLevel = Field() - description = Field() + # ToDo: 'structure' is currently not used; no equivalent edu-sharing property + title = Field() + """Corresponding edu-sharing properties: 'cm:title' & 'cclom:title'""" class LomLifecycleItem(Item): + """ + Depending on the 'role'-value that is chosen for a LomLifecycleItem, values are written to a VCARD-string and mapped + to either one of these corresponding edu-sharing properties: + + - 'ccm:lifecyclecontributer_publisher' ('role'-value = 'publisher') + - 'ccm:lifecyclecontributer_author' ('role'-value = 'author') + - 'ccm:lifecyclecontributer_editor' ('role'-value = 'editor') + - 'ccm:lifecyclecontributer_metadata_creator' ('role'-value = 'metadata_creator') + - 'ccm:lifecyclecontributer_metadata_provider' ('role'-value = 'metadata_provider') + - 'ccm:lifecyclecontributer_unknown' ('role'-value = 'unknown') + + The role 'unknown' is used for contributors in an unknown capacity ("Mitarbeiter"). + """ role = Field() firstName = Field() lastName = Field() @@ -51,42 +80,73 @@ class LomLifecycleItem(Item): url = Field() uuid = Field() date = Field() - "the date of contribution. Will be automatically transformed/parsed" + """The (publication) date of a contribution. Date values will be automatically transformed/parsed. + Corresponding edu-sharing property: 'ccm:published_date' + """ + class LomTechnicalItem(Item): + duration = Field() + """Duration of the element (e.g. for video or audio content). Supported formats for automatic transforming include + seconds, HH:MM:SS and ISO 8601 duration (PT0H0M0S). + Corresponding edu-sharing property: 'cclom:duration'""" format = Field() - size = Field() - location = Field(output_processor=JoinMultivalues()) - "URI/location of the element, multiple values are supported, the first entry is the primary location, while all others are secondary locations" - requirement = Field() + """'format' expects MIME-type as a string, e.g. "text/html" or "video/mp4". + Corresponding edu-sharing property: 'cclom:format'""" installationRemarks = Field() + # ToDo: 'installationRemarks' is an unused field + location = Field(output_processor=JoinMultivalues()) + """URI/location of the element; multiple values are supported. + The first entry is the primary location, while all others are secondary locations. + Corresponding edu-sharing properties: 'ccm:wwwurl' & 'cclom:location'""" otherPlatformRequirements = Field() - duration = Field() - "Duration of the element (e.g. for video or audio). Supported formats for automatic transforming include seconds, HH:MM:SS and ISO 8601 duration (PT0H0M0S)" + # ToDo: LOM.technical attribute 'otherPlatformRequirements' has no equivalent property in edu-sharing (and has never + # been provided by any of the crawled APIs, yet. + requirement = Field() + # ToDo: LOM.technical attribute 'requirement' has no equivalent property in edu-sharing + size = Field() + """Content size in bytes. (The value is automatically calculated by the edu-sharing back-end) + Corresponding edu-sharing property: 'cclom:size'""" class LomAgeRangeItem(Item): fromRange = Field() + """Corresponding edu-sharing property: 'ccm:educationaltypicalagerange_from'""" toRange = Field() + """Corresponding edu-sharing property: 'ccm:educationaltypicalagerange_to""" class LomEducationalItem(Item): - interactivityType = Field() - # Please use valuespaces.learningResourceType - # learningResourceType = Field() + """ + Item modeled after LOM-DE "Educational". Attention: Some fields which originally appear in "educational" are handled + by "ValuespaceItem" instead because of vocabularies which need to be mapped. + + Please DO NOT use/fill the following fields here in "educational", but rather use them in ValuespaceItem: + - intendedEndUserRole (see: 'valuespaces.intendedEndUserRole') + - learningResourceType (see: 'valuespaces.learningResourceType') + - context (see: 'valuespaces.educationalContext') + """ + description = Field() + # ToDo: 'description' isn't mapped to any field in edu-sharing + difficulty = Field() + """Corresponding edu-sharing property: 'ccm:educationaldifficulty'""" + # ToDo: 'ccm:educationaldifficulty' is currently not used in edu-sharing / WLO + # - either use this field or get rid of it + intendedEndUserRole = Field(serializer=MutlilangItem, output_processor=JoinMultivalues()) + # Please use valuespaces.intendedEndUserRole instead! interactivityLevel = Field() + # ToDo: 'interactivityLevel' is currently not used anywhere in edu-sharing + interactivityType = Field() + """Corresponding edu-sharing property: 'ccm:educationalinteractivitytype'""" + # ToDo: 'ccm:educationalinteractivitytype' is currently not used anywhere in edu-sharing + language = Field() + # ToDo: "Educational language" seems to be unused in edu-sharing. semanticDensity = Field() - # Please use valuespaces.intendedEndUserRole - intendedEndUserRole = Field( - serializer=MutlilangItem, output_processor=JoinMultivalues() - ) - # Please use valuespaces.educationalContext - # context = Field() + # ToDo: 'semanticDensity' is not used anywhere and there doesn't appear to be an edu-sharing property for it typicalAgeRange = Field(serializer=LomAgeRangeItem) - difficulty = Field() + """See LomAgeRangeItem. Corresponding edu-sharing properties: + 'ccm:educationaltypicalagerange_from' & 'ccm:educationaltypicalagerange_to'""" typicalLearningTime = Field() - description = Field() - language = Field() # please use the seperate license data @@ -97,114 +157,197 @@ class LomEducationalItem(Item): class LomClassificationItem(Item): + """ + LOM "Classification"-specific metadata. + (see: LOM-DE specifications: "Classification"-category) + """ cost = Field() - purpose = Field() - taxonPath = Field(output_processor=JoinMultivalues()) + # ToDo: no equivalent property in edu-sharing, might be obsolete (see: 'valuespaces.price') description = Field() + # ToDo: LOM classification 'description' has no equivalent property in edu-sharing keyword = Field() + # ToDo: 'ccm:classification_keyword' currently not used in edu-sharing + purpose = Field() + # ToDo: 'ccm:classification_purpose' not actively used in edu-sharing? + taxonPath = Field(output_processor=JoinMultivalues()) + # ToDo: LOM classification 'taxonPath' has no equivalent property in edu-sharing, might be obsolete class LomBaseItem(Item): + """ + LomBaseItem provides the nested structure for LOM (Sub-)Elements. No metadata is saved here. + (Please check the specific class definitions of the nested Items for more information.) + """ + classification = Field(serializer=LomClassificationItem) + educational = Field(serializer=LomEducationalItem) general = Field(serializer=LomGeneralItem) lifecycle = Field(serializer=LomLifecycleItem, output_processor=JoinMultivalues()) - technical = Field(serializer=LomTechnicalItem) - educational = Field(serializer=LomEducationalItem) # rights = Field(serializer=LomRightsItem) - classification = Field(serializer=LomClassificationItem) + technical = Field(serializer=LomTechnicalItem) class ResponseItem(Item): - status = Field() - url = Field() - html = Field() - text = Field() - headers = Field() + """ + Attributes of ResponseItem are populated by either Playwright or Splash when an item is processed by the pipelines. + """ cookies = Field() + headers = Field() har = Field() + html = Field() + status = Field() + text = Field() + url = Field() class ValuespaceItem(Item): - intendedEndUserRole = Field(output_processor=JoinMultivalues()) + """ + Values provided for attributes of ValuespaceItem are mapped against OEH (SKOS) vocabularies before saving them to + edu-sharing. (see: https://github.com/openeduhub/oeh-metadata-vocabs) + """ + accessibilitySummary = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:accessibilitysummary'""" + conditionsOfAccess = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:conditionsOfAccess'""" + containsAdvertisement = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:containsAdvertisement'""" + dataProtectionConformity = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:dataProtectionConformity'""" discipline = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:taxonid'""" educationalContext = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:educationalcontext'""" + fskRating = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:fskRating'""" + intendedEndUserRole = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:intendedEndUserRole'""" learningResourceType = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:educationallearningresourcetype'""" new_lrt = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:oeh_lrt'""" + oer = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:license_oer'""" + price = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:price'""" sourceContentType = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:sourceContentType'""" # ToDo: sourceContentType is no longer used in edu-sharing # DO NOT SET this field in crawlers for individual materials! toolCategory = Field(output_processor=JoinMultivalues()) - - conditionsOfAccess = Field(output_processor=JoinMultivalues()) - containsAdvertisement = Field(output_processor=JoinMultivalues()) - price = Field(output_processor=JoinMultivalues()) - accessibilitySummary = Field(output_processor=JoinMultivalues()) - dataProtectionConformity = Field(output_processor=JoinMultivalues()) - fskRating = Field(output_processor=JoinMultivalues()) - oer = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:toolCategory'""" class LicenseItem(Item): - url = Field() - "url to a license description" - internal = Field() - "a internal constants for this license" - description = Field() - "a custom, free-text license description. Will only be used if the internal constants is set to CUSTOM" - oer = Field() - "a value of OerType (if empty, will be mapped via the given url or internal value)" + """ + Metadata provided within LicenseItem is used to recognize and map specific licenses to edu-sharing's corresponding + properties. To make sure that licenses are properly recognized by edu-sharing, make sure to provide a valid + 'url'-string and if that's not possible, set a correct 'internal'-constant. (see: constants.py) + """ author = Field() - "an author freetext (basically, how the author should be named in case this is a by-license" + """An author freetext string. (Basically, how the author should be named in case this is a 'CC-BY'-license. + Corresponding edu-sharing property: 'ccm:author_freetext'""" + description = Field() + """A custom, free-text license description. Will only be used if the 'internal'-attribute (see: constants.py) is set + to 'CUSTOM'. + Corresponding edu-sharing property: 'cclom:rights_description'""" expirationDate = Field() - "a date at which any content license expires and the content shouldn't be delivered anymore" + """A date at which any content license expires and the content shouldn't be delivered anymore. + Corresponding edu-sharing property: 'ccm:license_to'""" + internal = Field() + """An internal (edu-sharing) constant for this license. + Corresponding edu-sharing property: 'ccm:commonlicense_key'""" + oer = Field() + """A value of OerType (if empty, will be mapped via the given url or internal value). + Corresponding edu-sharing property: 'ccm:oer'""" + url = Field() + """Expects a URL (String) to a license description. + Gets mapped to two corresponding edu-sharing properties: 'ccm:commonlicense_key' & 'ccm:commonlicense_version'""" class PermissionItem(Item): - public = Field() - "Should this item be public (accessible for anyone)" - groups = Field(output_processor=JoinMultivalues()) - "Global Groups that should have access to this object" - mediacenters = Field(output_processor=JoinMultivalues()) - "Mediacenters that should have access to this object" + """ + PermissionItem sets the edu-sharing permissions for a crawled item. + """ autoCreateGroups = Field() - "Should global groups be created if they don't exist" + """Should global groups be created if they don't already exist""" autoCreateMediacenters = Field() - "Should media centers be created if they don't exist" + """Should media centers be created if they don't already exist""" + groups = Field(output_processor=JoinMultivalues()) + """Global Groups that should have access to this object""" + mediacenters = Field(output_processor=JoinMultivalues()) + """Mediacenters that should have access to this object""" + public = Field() + """Determines if this item should be 'public' (= accessible by anyone)""" class BaseItem(Item): - sourceId = Field() - uuid = Field() - "explicit uuid of the target element, please only set this if you actually know the uuid of the internal document" - hash = Field() + """ + BaseItem provides the basic data structure for any crawled item. + + BaseItem requirements: + - 'sourceId' + - 'hash' + + Expected Items to be nested within BaseItem: + - LicenseItem + - LomBaseItem + - PermissionItem + - ResponseItem + - ValuespaceItem + """ + binary = Field() + """Binary data which should be uploaded to edu-sharing (= raw data, e.g. ".pdf"-files).""" collection = Field(output_processor=JoinMultivalues()) - "id of collections this entry should be placed into" - origin = Field() - "in case it was fetched from a referatorium, the real origin name may be included here" - response = Field(serializer=ResponseItem) - ranking = Field() + """id of edu-sharing collections this entry should be placed into""" + custom = Field() + """A field for custom data which can be used by the target transformer to store data in the native format + (i.e. 'ccm:'/'cclom:'-properties in edu-sharing).""" fulltext = Field() - thumbnail = Field() - "thumbnail data in base64" + """The 'fulltext'-attribute gets populated by a 'response.text'-call in the pipelines.""" + hash = Field() + """Corresponding edu-sharing property: 'ccm:replicationsourcehash'""" lastModified = Field() + # ToDo: 'lastModified' doesn't appear to be mapped to any edu-sharing property + license = Field(serializer=LicenseItem) lom = Field(serializer=LomBaseItem) - valuespaces = Field(serializer=ValuespaceItem) - "all items which are based on (skos) based valuespaces. The ProcessValuespacePipeline will automatically convert items inside here" - valuespaces_raw = Field(serializer=ValuespaceItem) - "this item is only used by the ProcessValuespacePipeline and holds the ""raw"" data which were given to the valuespaces. Please do not use it inside crawlers" + notes = Field() + """Editorial notes (e.g. as used in edu-sharing between editors (WLO: "FachredakteurInnen")). + Corresponding edu-sharing property: 'ccm:notes'""" + origin = Field() + """In case an item was fetched from a "referatorium", the real origin name may be included here. + Corresponding edu-sharing property: 'ccm:replicationsourceorigin'""" + # 'origin' is currently used to create crawler subfolders in edu-sharing's workspace view: + # e.g.: "SYNC_OBJ///..." permissions = Field(serializer=PermissionItem) - "permissions (access rights) for this entry" - license = Field(serializer=LicenseItem) + """edu-sharing permissions (access rights) for this entry""" publisher = Field() - notes = Field() - "editorial notes" + # ToDo: publisher is implemented as a part of Lifecycle. This field isn't used anywhere, is most probably an + # oversight and should be deleted. + ranking = Field() + # ToDo: ranking isn't used anywhere, might be obsolete + response = Field(serializer=ResponseItem) + sourceId = Field() + """Corresponding edu-sharing property: 'ccm:replicationsourceid'""" status = Field() - "status information of a given node, i.e. activated or deactivated" - binary = Field() - "binary data which should be uploaded (raw data)" - custom = Field() - "custom data, it can be used by the target transformer to store data in the native format (i.e. ccm/cclom properties in edu-sharing)" + """Status information of a given node, i.e. activated or deactivated. + Corresponding edu-sharing property: 'ccm:editorial_state'""" + thumbnail = Field() + """Expects a thumbnail URL which in turn is consumed by the thumbnail pipeline. If a valid URL is provided, + the resulting 'thumbnail'-dictionary consists of 3 key-value pairs after completion: + - 'mimetype' mimetype (String) + - 'small' image data in base64 + - 'large' image data in base64""" + uuid = Field() + """Explicit uuid of the target element. + Please ONLY set this manually IF you actually know the uuid of the internal document! + Corresponding edu-sharing property: 'ccm:replicationsourceuuid'""" + valuespaces = Field(serializer=ValuespaceItem) + """All items which are based on (SKOS) based valuespaces vocabularies. + The ProcessValuespacePipeline will automatically convert items inside here.""" + valuespaces_raw = Field(serializer=ValuespaceItem) + """This item is only used by the ProcessValuespacePipeline and holds the ""raw"" data which were given to the + valuespaces. Please DO NOT use it within normal crawlers""" screenshot_bytes = Field() - # this is a (temporary) field that gets deleted after the thumbnail pipeline processed its byte-data + """screenshot_bytes is a (temporary) field that gets deleted after the thumbnail pipeline processed its byte-data""" class BaseItemLoader(ItemLoader): From c05577e7dbbbb8e9404abd83ed9eedeecdcb1135 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 24 Jan 2023 18:31:42 +0100 Subject: [PATCH 235/590] remove: "Faszination Wissen"-RSS feed because the show has been discontinued in 2017 - the official successor "Gut zu Wissen" is already on the list, so this change only reduces errors --- csv/br_rss.csv | 1 - 1 file changed, 1 deletion(-) diff --git a/csv/br_rss.csv b/csv/br_rss.csv index 841290a7..95f894dc 100644 --- a/csv/br_rss.csv +++ b/csv/br_rss.csv @@ -15,7 +15,6 @@ https://feeds.br.de/die-entdeckungen-grosser-forscher/feed.xml,video,240,COPYRIG https://feeds.br.de/campus-talks/feed.xml,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, https://feeds.br.de/iq-das-magazin/feed.xml,audio,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, https://feeds.br.de/radiowissen/feed.xml,audio,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, -https://feeds.br.de/faszination-wissen/feed.xml,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, https://feeds.br.de/unkraut-ihr-umweltmagazin/feed.xml,video,640,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, https://feeds.br.de/mehr-wert/feed.xml,video,700,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, https://feeds.br.de/ich-mach-s/feed.xml,video,020; 040,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de, From c4990baee5e6b488fe100ca74f4815f2aa875a29 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 24 Jan 2023 19:06:37 +0100 Subject: [PATCH 236/590] add pyCharm run configurations for RSS-based crawlers - br_rss_spider - zdf_rss_spider Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .run/br_rss_spider.run.xml | 25 +++++++++++++++++++++++++ .run/zdf_rss_spider.run.xml | 25 +++++++++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 .run/br_rss_spider.run.xml create mode 100644 .run/zdf_rss_spider.run.xml diff --git a/.run/br_rss_spider.run.xml b/.run/br_rss_spider.run.xml new file mode 100644 index 00000000..68c0cb98 --- /dev/null +++ b/.run/br_rss_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file diff --git a/.run/zdf_rss_spider.run.xml b/.run/zdf_rss_spider.run.xml new file mode 100644 index 00000000..50814c1d --- /dev/null +++ b/.run/zdf_rss_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file From 642298e44b5c6d5afae5a656cfd53fc1cc24daf7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 24 Jan 2023 19:59:02 +0100 Subject: [PATCH 237/590] feat: RSS base covers more optional fields (squashed) - feat: fallbacks for thumbnail URLs - feat: using -tags for keyword extraction - feat: use -element for LOM technical 'location' -- fix: setting video values for 'technical.format' and 'technical.size' would cause broken thumbnails in edu-sharing - feat: fallbacks for publication dates - fix: always try to set custom license description when -tag is available - fix: no longer calling "remove_namespaces()"-method because namespaces are needed for fallback methods -- (otherwise fields like "" would conflict with "" - add: fallbacks for "general.description" - add: if "guid"-attribute "isPermaLink" is true, add URL to technical.location -- (this should be useful for recognizing duplicate entries in the future) --- converter/spiders/base_classes/rss_base.py | 144 +++++++++++++++++---- 1 file changed, 119 insertions(+), 25 deletions(-) diff --git a/converter/spiders/base_classes/rss_base.py b/converter/spiders/base_classes/rss_base.py index 64952fef..3467f4fa 100644 --- a/converter/spiders/base_classes/rss_base.py +++ b/converter/spiders/base_classes/rss_base.py @@ -1,5 +1,8 @@ from scrapy.spiders import CrawlSpider + from .lom_base import LomBase +from ...constants import Constants +from ...items import LicenseItemLoader class RSSBase(CrawlSpider, LomBase): @@ -11,7 +14,6 @@ def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def parse(self, response): - response.selector.remove_namespaces() # common properties self.commonProperties["language"] = response.xpath( "//rss/channel/language//text()" @@ -46,46 +48,138 @@ def mapResponse(self, response): def getBase(self, response): base = LomBase.getBase(self, response) - thumbnail = self.commonProperties["thumbnail"] - if thumbnail: - base.add_value("thumbnail", thumbnail) + thumbnail_channel = self.commonProperties["thumbnail"] + thumbnail_channel_itunes = response.meta["item"].xpath('//*[name()="itunes:image"]/@href').get() + thumbnail_item_itunes = response.meta["item"].xpath('*[name()="itunes:image"]/@href').get() + # according to Apple's RSS guidelines the -element should be used for a + # channel-thumbnail, but experience shows that some RSS Feeds also include this element within individual items. + if thumbnail_item_itunes: + # if the thumbnail URL for an individual episode is available, it will be the primary choice + base.add_value("thumbnail", thumbnail_item_itunes) + elif thumbnail_channel: + # otherwise the channel-thumbnail found within will be used as a fallback URL + base.add_value("thumbnail", thumbnail_channel) + elif thumbnail_channel_itunes: + # if the tag doesn't exist, we're using as a final fallback + base.add_value("thumbnail", thumbnail_channel_itunes) return base def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) - general.add_value( - "identifier", response.meta["item"].xpath("guid//text()").get() - ) + guid = response.meta["item"].xpath("guid//text()").get() # optional -Element can (optionally) have an + # "isPermaLink"-attribute, see: https://www.rssboard.org/rss-specification#ltguidgtSubelementOfLtitemgt + if guid: + # by default, all guids are treated as (local) identifiers + general.add_value("identifier", guid) general.add_value( "title", response.meta["item"].xpath("title//text()").get().strip() ) general.add_value("language", self.commonProperties["language"]) - description = response.meta["item"].xpath("description//text()").get() - if not description: - description = ( - response.meta["item"].xpath('//*[name()="summary"]//text()').get() - ) - general.add_value("description", description) + description: str = response.meta["item"].xpath("description//text()").get() + summary: str = response.meta["item"].xpath('*[name()="summary"]//text()').get() + itunes_summary: str = response.meta["item"].xpath('*[name()="itunes:summary"]//text()').get() + # in case that a RSS feed doesn't adhere to the RSS 2.0 spec (), we're using two fallbacks: + # or if that doesn't exist: + if description: + general.add_value('description', description) + elif summary: + general.add_value('description', summary) + elif itunes_summary: + general.add_value('description', itunes_summary) + rss_category_channel: list = response.xpath('//rss/channel/category/text()').getall() + rss_category_item: list = response.meta["item"].xpath('category/text()').getall() + # see: https://www.rssboard.org/rss-profile#element-channel-item-category + itunes_category: list = response.xpath('//*[name()="itunes:category"]/@text').getall() + keyword_set = set() + if rss_category_channel: + keyword_set.update(rss_category_channel) + if rss_category_item: + keyword_set.update(rss_category_item) + if itunes_category: + keyword_set.update(itunes_category) + if keyword_set: + keyword_list: list = list(keyword_set) + if keyword_list: + keyword_list.sort() + general.add_value('keyword', keyword_list) return general def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) - # technical.add_value('format', item.xpath('enclosure/@type').get()) - # technical.add_value('size', item.xpath('enclosure/@length').get()) - # technical.add_value('location', item.xpath('enclosure/@url').get()) technical.add_value("format", "text/html") - if response.meta["item"].xpath("duration//text()").get() is not None: - # not all RSS-Feeds hold a "duration"-field (e.g. text-based article-feeds don't) + # the -element should always have 3 attributes: 'size', 'type' and 'url' + # see https://www.rssboard.org/rss-specification#ltenclosuregtSubelementOfLtitemgt + # enclosure_size = response.meta["item"].xpath("enclosure/@type").get() # size in bytes + # enclosure_type = response.meta["item"].xpath("enclosure/@type").get() # MIME-type + enclosure_url = response.meta["item"].xpath("enclosure/@url").get() # URL (of the .mp3 / .mp4) + # if enclosure_type: + # # ToDo: setting format and size breaks edu-sharing's file preview and thumbnail generation + # technical.replace_value("format", enclosure_type) + # if enclosure_size: + # technical.replace_value("size", enclosure_size) + rss_duration: str = response.meta["item"].xpath("duration//text()").get() + itunes_duration: str = response.meta["item"].xpath("*[name()='itunes:duration']/text()").get() + # is valid in 3 different variations: + # hours:minutes:seconds // minutes:seconds // total_seconds + if rss_duration: + # not all RSS-Feeds hold a "duration"-field (e.g. text-based news-feeds typically do not) # therefore we need to make sure that duration is only set where it's appropriate - technical.add_value("duration", response.meta["item"].xpath("duration//text()").get().strip()) - technical.add_value( - "location", response.meta["item"].xpath("link//text()").get() - ) + technical.add_value("duration", rss_duration.strip()) + elif itunes_duration: + # fallback: if there's no -element, there might be an optional -tag + technical.add_value("duration", itunes_duration.strip()) + link_url = response.meta["item"].xpath("link//text()").get() + if link_url: + technical.add_value("location", link_url) + elif enclosure_url: + technical.add_value("location", enclosure_url) + guid_is_permalink: str = response.meta["item"].xpath("guid/@isPermaLink").get() + if guid_is_permalink: + guid: str = response.meta["item"].xpath("guid//text()").get() + # if 's "isPermaLink"-attribute is true, guid points to a URL + if guid_is_permalink.strip() == "true" and guid: + if guid != response.url: + technical.add_value("location", guid) return technical def getLOMLifecycle(self, response): lifecycle = LomBase.getLOMLifecycle(self, response) lifecycle.add_value('role', 'publisher') - lifecycle.add_value('organization', response.meta["item"].xpath("*[name()='itunes:author']/text()").get()) - lifecycle.add_value('date', response.meta["item"].xpath("pubDate//text()").get()) - return lifecycle \ No newline at end of file + channel_author: str = response.xpath("//rss/channel/*[name()='itunes:author']/text()").get() + # if appears in /rss/channel, it will carry publisher/organizational information + if "publisher" in self.commonProperties: + lifecycle.add_value('organization', self.commonProperties["publisher"]) + elif channel_author: + lifecycle.add_value('organization', channel_author) + # ToDo: optional -element in , as soon as we actually encounter a RSS feed to test it against + # see: https://www.rssboard.org/rss-profile#namespace-elements-dublin-creator + pub_date = response.meta["item"].xpath("pubDate//text()").get() # according to the RSS 2.0 specs + # see: https://www.rssboard.org/rss-specification#ltpubdategtSubelementOfLtitemgt + pub_date_variation2 = response.meta["item"].xpath("PubDate//text()").get() + # according to Apple RSS Guidelines, Newsfeeds might use + # see: https://support.apple.com/guide/news-publisher/rss-guidelines-for-apple-news-apdc2c7520ff/icloud + pub_date_variation3 = response.meta["item"].xpath("published//text()").get() + # according to Apple's RSS Guidelines, some (Atom-inspired) feeds might use instead + if pub_date: + # is an OPTIONAL sub-element of + lifecycle.add_value('date', pub_date) + elif pub_date_variation2: + # if isn't available, might be + lifecycle.add_value('date', pub_date_variation2) + elif pub_date_variation3: + # if the RSS feed differs from the RSS 2.0 specs, might be available + lifecycle.add_value('date', pub_date_variation3) + return lifecycle + + def getLicense(self, response=None) -> LicenseItemLoader: + license_item_loader = LomBase.getLicense(self, response) + copyright_description: str = response.xpath('//rss/channel/copyright/text()').get() + if copyright_description: + license_item_loader.add_value('internal', Constants.LICENSE_CUSTOM) + # 'internal' needs to be set to CUSTOM for 'description' to be read + license_item_loader.add_value('description', copyright_description) + item_author: str = response.meta["item"].xpath("*[name()='itunes:author']/text()").get() + if item_author: + # if the optional field is nested in /rss/channel/item, it will contain author information + license_item_loader.add_value('author', item_author) + return license_item_loader From 33c35d9549be83599cb09e0652be6612986bc730 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 24 Jan 2023 23:45:43 +0100 Subject: [PATCH 238/590] fix: rss_list_base "getLicense()"-call - RSSListBase called LomBase directly, basically skipping RSSBase completely and therefore missing metadata which might be available within RSS items - optimize imports Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/rss_list_base.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/converter/spiders/base_classes/rss_list_base.py b/converter/spiders/base_classes/rss_list_base.py index 4f9457bb..3501ef97 100644 --- a/converter/spiders/base_classes/rss_list_base.py +++ b/converter/spiders/base_classes/rss_list_base.py @@ -1,12 +1,12 @@ +import csv +import os + import scrapy -from converter.items import * +from converter.valuespace_helper import ValuespaceHelper +from .csv_base import CSVBase from .lom_base import LomBase from .rss_base import RSSBase -from .csv_base import CSVBase -from converter.valuespace_helper import ValuespaceHelper -import csv -import os # rss crawler with a list of entries to crawl and map @@ -67,7 +67,7 @@ def getLOMGeneral(self, response): return general def getLicense(self, response): - license = LomBase.getLicense(self, response) + license = RSSBase.getLicense(self, response) license.add_value( "internal", self.getCSVValue(response, CSVBase.COLUMN_LICENSE) ) From 8d8b6bf579f09a882916a2918b48eae860db6b5e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 1 Feb 2023 11:30:08 +0100 Subject: [PATCH 239/590] fix: zdf_rss.csv licenses and terra-x URL - fix: the .csv previously set "COPYRIGHT_FREE_ACCESS" for the "license.internal"-field, but "COPYRIGHT_FREE" is expected by edu-sharing -- fix: Constants.py 'internal'-value for Copyright - fix: Terra-X podcast URL - optimize imports - use Playwright for Screenshots if no thumbnail URL is available - fix: all ZDF RSS feeds use https -- during testing/debugging it was noticed that trying to reach the terra-x feed by its old http URL was increasingly unreliable between crawler runs --- converter/constants.py | 2 +- converter/spiders/zdf_rss_spider.py | 14 +++++++------- csv/zdf_rss.csv | 20 ++++++++++---------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index 836c54ac..eac55826 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -86,7 +86,7 @@ class Constants: "PDM": [LICENSE_PDM], } - LICENSE_COPYRIGHT_LAW: Final[str] = "COPYRIGHT_LAW" + LICENSE_COPYRIGHT_LAW: Final[str] = "COPYRIGHT_FREE" LICENSE_CUSTOM: Final[str] = "CUSTOM" # Custom License, use the license description field for arbitrary values LICENSE_NONPUBLIC: Final[str] = "NONPUBLIC" diff --git a/converter/spiders/zdf_rss_spider.py b/converter/spiders/zdf_rss_spider.py index 02eb987e..9aade2e2 100644 --- a/converter/spiders/zdf_rss_spider.py +++ b/converter/spiders/zdf_rss_spider.py @@ -4,24 +4,24 @@ import requests import scrapy -from .base_classes import RSSListBase, LomBase, CSVBase +from .base_classes import RSSListBase +from ..web_tools import WebEngine -# Spider to fetch RSS from planet schule class ZDFRSSSpider(RSSListBase): name = "zdf_rss_spider" friendlyName = "ZDF" url = "https://www.zdf.de/" - version = "0.1.0" + version = "0.1.1" # last update: 2023-02-01 + custom_settings = { + 'WEB_TOOLS': WebEngine.Playwright + } def __init__(self, **kwargs): RSSListBase.__init__(self, "../csv/zdf_rss.csv", **kwargs) # couldn't find file, had to move 1 folder upwards def getLicense(self, response): - license_info = LomBase.getLicense(self, response) - license_info.add_value( - "internal", self.getCSVValue(response, CSVBase.COLUMN_LICENSE) - ) + license_info = RSSListBase.getLicense(self, response) page_content = scrapy.Selector(requests.get(response.url)) date = self.get_expiration_date(page_content) if date: diff --git a/csv/zdf_rss.csv b/csv/zdf_rss.csv index 35217ff5..38d2589e 100644 --- a/csv/zdf_rss.csv +++ b/csv/zdf_rss.csv @@ -1,11 +1,11 @@ url,learningResourceType,discipline,license,intendedEndUserRole,typicalAgeRangeFrom,typicalAgeRangeTo,language,keyword,,,,,,,,,,,,,,,,, -http://www.zdf.de/rss/podcast/video/zdf/politik/auslandsjournal,video,340,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/nachrichten/heute-19-uhr,video,480,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,Nachrichten,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/nachrichten/heute-journal,video,480,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,Nachrichten,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/nachrichten/heute-plus,video,480,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,Nachrichten,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/wissen/frag-den-lesch,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,Lesch,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/wissen/leschs-kosmos,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,Lesch,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/wissen/lesch-to-go,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,Lesch,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/kinder/logo,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,6,18,de,Nachrichten,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/dokumentation/zdfzoom,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,,,,,,,,,,,,,,,,,,de -http://www.zdf.de/rss/podcast/video/zdf/dokumentation/terra-x/,video,720,COPYRIGHT_FREE_ACCESS,learner; teacher,10,18,de,,,,,,,,,,,,,,,,,,de \ No newline at end of file +https://www.zdf.de/rss/podcast/video/zdf/politik/auslandsjournal,video,340,COPYRIGHT_FREE,learner; teacher,10,18,de,,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/nachrichten/heute-19-uhr,video,480,COPYRIGHT_FREE,learner; teacher,10,18,de,Nachrichten,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/nachrichten/heute-journal,video,480,COPYRIGHT_FREE,learner; teacher,10,18,de,Nachrichten,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/nachrichten/heute-plus,video,480,COPYRIGHT_FREE,learner; teacher,10,18,de,Nachrichten,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/wissen/frag-den-lesch,video,720,COPYRIGHT_FREE,learner; teacher,10,18,de,Lesch,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/wissen/leschs-kosmos,video,720,COPYRIGHT_FREE,learner; teacher,10,18,de,Lesch,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/wissen/lesch-to-go,video,720,COPYRIGHT_FREE,learner; teacher,10,18,de,Lesch,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/kinder/logo,video,720,COPYRIGHT_FREE,learner; teacher,6,18,de,Nachrichten,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/dokumentation/zdfzoom,video,720,COPYRIGHT_FREE,learner; teacher,10,18,de,,,,,,,,,,,,,,,,,,de +https://www.zdf.de/rss/podcast/video/zdf/dokumentation/terra-x,video,720,COPYRIGHT_FREE,learner; teacher,10,18,de,,,,,,,,,,,,,,,,,,de \ No newline at end of file From 4dd006bbe391855d85110d0875fd83d676e5e33c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 2 Feb 2023 11:06:18 +0100 Subject: [PATCH 240/590] chore: dateparser 1.1.3 -> 1.1.6 - the dateparser package was updated recently, among other things were improvements to recognition of German date strings Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 170d26fb..921f5575 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ wheel==0.37.1 image -dateparser==1.1.3 +dateparser==1.1.6 isodate==0.6.1 pyppeteer==1.0.2 html2text~=2020.1.16 From a838a2bcb898284d3a5bd9b52607d8e9bb24f336 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Feb 2023 11:46:28 +0100 Subject: [PATCH 241/590] style: code formatting, typos, ToDos Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .../spiders/base_classes/edu_sharing_base.py | 23 +++++-------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index bf007825..1924b49d 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -149,12 +149,8 @@ def getHash(self, response=None) -> str: def getLOMGeneral(self, response): general = LomBase.getLOMGeneral(self, response) general.replace_value("title", response.meta["item"]["title"]) - general.add_value( - "keyword", self.getProperty("cclom:general_keyword", response) - ) - general.add_value( - "description", self.getProperty("cclom:general_description", response) - ) + general.add_value("keyword", self.getProperty("cclom:general_keyword", response)) + general.add_value("description", self.getProperty("cclom:general_description", response)) general.add_value('identifier', self.getProperty("cclom:general_identifier", response)) general.add_value('language', self.getProperty("cclom:general_language", response)) general.add_value('aggregationLevel', self.getProperty("cclom:aggregationLevel", response)) @@ -232,6 +228,7 @@ def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) technical.replace_value("format", "text/html") technical.replace_value("location", response.url) + # ToDo: 'cclom:location' supports multiple values (compare response.url <-> list of URLs) technical.replace_value("duration", self.getProperty("cclom:duration", response)) return technical @@ -272,18 +269,10 @@ def getValuespaces(self, response): f"{oeh_quality_login_value} for node-ID {response.meta['item']['ref']['id']}") valuespaces.add_value("dataProtectionConformity", self.getProperty("ccm:dataProtectionConformity", response)) valuespaces.add_value("discipline", self.getProperty("ccm:taxonid", response)) - valuespaces.add_value( - "intendedEndUserRole", - self.getProperty("ccm:educationalintendedenduserrole", response), - ) - valuespaces.add_value( - "educationalContext", self.getProperty("ccm:educationalcontext", response) - ) + valuespaces.add_value("educationalContext", self.getProperty("ccm:educationalcontext", response)) valuespaces.add_value("fskRating", self.getProperty("ccm:fskRating", response)) - valuespaces.add_value( - "learningResourceType", - self.getProperty("ccm:educationallearningresourcetype", response), - ) + valuespaces.add_value("intendedEndUserRole", self.getProperty("ccm:educationalintendedenduserrole", response)) + valuespaces.add_value("learningResourceType", self.getProperty("ccm:educationallearningresourcetype", response)) valuespaces.add_value('new_lrt', self.getProperty("ccm:oeh_lrt", response)) valuespaces.add_value("oer", self.getProperty("ccm:license_oer", response)) valuespaces.add_value("price", self.getProperty("ccm:price", response)) From db94b8407d22354470ea7a02a736c1aabf274f1a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Feb 2023 15:21:27 +0100 Subject: [PATCH 242/590] version bump to v0.1.6 - due to a lot of changes in the underlying edu_sharing_base, version bump oeh_spider to 0.1.6 --- converter/spiders/oeh_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 27ac2129..00de5f1e 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -11,7 +11,7 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.5" # last update: 2023-01-20 + version = "0.1.6" # last update: 2023-02-03 mdsId = "mds_oeh" importWhitelist: [str] = None custom_settings = { From 31e42cb05b84a4f0cecd395716385e7a78d5aee7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Feb 2023 15:51:15 +0100 Subject: [PATCH 243/590] logging: change LisumPipeline eafCode logging to debug - logging level 'warning' was slightly too distracting for the eafcode -> taxonid mapping Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 973adcda..962a62d2 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -789,8 +789,8 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy case _: # due to having the 'custom'-field as a (raw) list of all eafCodes, this mainly serves # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum - logging.warning(f"Lisum Pipeline failed to map from eafCode {discipline_eaf_code} " - f"to its corresponding ccm:taxonid short-handle") + logging.debug(f"Lisum Pipeline failed to map from eafCode {discipline_eaf_code} " + f"to its corresponding ccm:taxonid short-handle") logging.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") From 937d12565d447f81e238f387fa9516ddd267f2a5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Feb 2023 17:07:36 +0100 Subject: [PATCH 244/590] change: 'discipline'-mapping for "Natur und Umwelt" - Lehrer-Online mapping for "Natur und Umwelt" is now additionally mapped to "Sachunterricht" as well -- (this mapping-improvement was made due to a request of Romy in our meeting at 2022-01-24) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/lehreronline_spider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 7a332147..3b069351 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -19,7 +19,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] - version = "0.0.5" # last update: 2022-08-26 + version = "0.0.6" # last update: 2023-02-03 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -120,7 +120,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): 'Informationstechnik': 'Informatik', 'Klima, Umwelt, Nachhaltigkeit': 'Nachhaltigkeit', 'MINT: Mathematik, Informatik, Naturwissenschaften und Technik': 'MINT', - 'Natur und Umwelt': 'Environmental education', + 'Natur und Umwelt': ['Environmental education', 'Homeland lessons'], # Umwelterziehung, Sachunterricht 'Religion und Ethik': ['Religion', 'Ethik'], 'Sport und Bewegung': 'Sport', 'SoWi': ['Social education', 'Economics'], From 495208805ec230920ded9ee7bddd18d8bd9def32 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Feb 2023 12:24:47 +0100 Subject: [PATCH 245/590] fix: 'origin'-subfolder creation - subfolders in "SYNC_OBJ//..." weren't reliably created for "Themenportal"-materials (Handwerk-macht-Schule / Pubertaet) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/lehreronline_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 3b069351..fd988aae 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -465,7 +465,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: if thumbnail_url: base.add_value('thumbnail', thumbnail_url) if "origin_folder_name" in metadata_dict.keys(): - base.add_value('origin', metadata_dict.get("origin_folder_name")) + base.replace_value('origin', metadata_dict.get("origin_folder_name")) lom = LomBaseItemloader() From c0655213dd5b28047c0039c33a56eb6fb336a5f5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Feb 2023 19:07:32 +0100 Subject: [PATCH 246/590] feat: LisumPipeline eafCodes for "oeh_spider" - this feature is primarily intended for "oeh_spider": The pipeline will match eafCodes from 'valuespaces.discipline' to 'ccm:taxonentry' by using the 'base.custom'-field -- (this change is made due to KLISUM-205) - fix: removing an obsolete 'list.sort()'-call during 'learningResourceType'-mapping - version bump of "oeh_spider" to v0.1.7 to reflect these Pipeline changes Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 57 +++++++++++++++++++++++++++------ converter/spiders/oeh_spider.py | 2 +- 2 files changed, 49 insertions(+), 10 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 962a62d2..2956bf17 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -9,6 +9,7 @@ # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html import csv import logging +import re import time from abc import ABCMeta from io import BytesIO @@ -681,7 +682,7 @@ def process_item(self, item, spider): class LisumPipeline(BasicPipeline): - DISCIPLINE_TO_LISUM = { + DISCIPLINE_TO_LISUM_SHORTHANDLE = { "020": "C-WAT", # Arbeitslehre -> Wirtschaft, Arbeit, Technik "060": "C-KU", # Bildende Kunst "080": "C-BIO", # Biologie @@ -754,6 +755,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy """ base_item_adapter = ItemAdapter(item) discipline_lisum_keys = set() + discipline_eafcodes = set() sodix_lisum_custom_lrts = set() if base_item_adapter.get("custom"): custom_field = base_item_adapter.get("custom") @@ -762,8 +764,8 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # first round of mapping from (all) Sodix eafCodes to 'ccm:taxonid' if taxon_entries: for taxon_entry in taxon_entries: - if taxon_entry in self.DISCIPLINE_TO_LISUM: - discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM.get(taxon_entry)) + if taxon_entry in self.DISCIPLINE_TO_LISUM_SHORTHANDLE: + discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHANDLE.get(taxon_entry)) if base_item_adapter.get("valuespaces"): valuespaces = base_item_adapter.get("valuespaces") if valuespaces.get("discipline"): @@ -775,9 +777,10 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy if discipline_list: for discipline_w3id in discipline_list: discipline_eaf_code: str = discipline_w3id.split(sep='/')[-1] - match discipline_eaf_code in self.DISCIPLINE_TO_LISUM: + eaf_code_digits_only_regex: re.Pattern = re.compile(r'\d{3,}') + match discipline_eaf_code in self.DISCIPLINE_TO_LISUM_SHORTHANDLE: case True: - discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM.get(discipline_eaf_code)) + discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHANDLE.get(discipline_eaf_code)) # ToDo: there are no Sodix eafCode-values for these Lisum keys: # - Deutsche Gebärdensprache (C-DGS) # - Hebräisch (C-HE) @@ -789,8 +792,21 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy case _: # due to having the 'custom'-field as a (raw) list of all eafCodes, this mainly serves # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum - logging.debug(f"Lisum Pipeline failed to map from eafCode {discipline_eaf_code} " - f"to its corresponding ccm:taxonid short-handle") + logging.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " + f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") + if eaf_code_digits_only_regex.search(discipline_eaf_code): + # each numerical eafCode must have a length of (minimum) 3 digits + logging.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " + f"used later for 'ccm:taxonentry').") + discipline_eafcodes.add(discipline_eaf_code) + else: + # our 'discipline.ttl'-vocab holds custom keys (e.g. 'niederdeutsch', 'oeh04010') which + # shouldn't be saved into 'ccm:taxonentry' (since they are not part of the regular + # "EAF Sachgebietssystematik" + logging.debug(f"LisumPipeline eafCode fallback for {discipline_eaf_code} to " + f"'ccm:taxonentry' was not possible. Only eafCodes with a minimum length " + f"of 3+ digits are valid. (Please confirm if the provided value is part of " + f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))") logging.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") @@ -854,8 +870,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # making sure to exclude '' strings from populating the list lrt_temporary_list.append(lrt_w3id) lrt_list = lrt_temporary_list - lrt_list.sort() - # after everything is mapped and sorted, save the list: + # after everything is mapped, we're saving the (updated) list back to our LRT: valuespaces["learningResourceType"] = lrt_list # Mapping from valuespaces_raw["learningResourceType"]: "INTERAKTION" -> "interactive_material" @@ -885,4 +900,28 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy discipline_lisum_keys = list(discipline_lisum_keys) discipline_lisum_keys.sort() valuespaces["discipline"] = discipline_lisum_keys + if discipline_eafcodes: + # Fallback: saving 'discipline.ttl'-Vocab keys to eafCodes ('ccm:taxonentry') + if base_item_adapter.get("custom"): + custom_field = base_item_adapter.get("custom") + if "ccm:taxonentry" in custom_field: + taxon_entries: list = custom_field.get("ccm:taxonentry") + if taxon_entries: + # if eafCodes already exist in the custom filed (e.g.: sodix_spider), we're making sure that + # there are no double entries of the same eafCode + taxon_set = set(taxon_entries) + taxon_set.update(discipline_eafcodes) + taxon_entries = list(taxon_set) + logging.debug(f"LisumPipeline: Saving eafCodes {taxon_entries} to 'ccm:taxonentry'.") + base_item_adapter["custom"]["ccm:taxonentry"] = taxon_entries + else: + # oeh_spider typically won't have neither the 'custom'-field nor the 'ccm:taxonentry'-field + # Therefore we have to create and fill it with the eafCodes that we gathered from our + # 'discipline'-vocabulary-keys. + discipline_eafcodes_list = list(discipline_eafcodes) + logging.debug(f"LisumPipeline: Saving eafCodes {discipline_eafcodes_list} to 'ccm:taxonentry'.") + base_item_adapter.update( + {'custom': { + 'ccm:taxonentry': discipline_eafcodes_list}}) + base_item_adapter["custom"]["ccm:taxonentry"] = discipline_eafcodes_list return item diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index 00de5f1e..df6a3576 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -11,7 +11,7 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.6" # last update: 2023-02-03 + version = "0.1.7" # last update: 2023-02-07 mdsId = "mds_oeh" importWhitelist: [str] = None custom_settings = { From bd147ae7d9ba59ffcbbb844b3614e895f046a461 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 8 Feb 2023 13:47:08 +0100 Subject: [PATCH 247/590] fix: LisumPipeline eafCode exclusions - eafCodes that are not part of the standard eafsys.txt will be excluded from being saved to the Lisum repository -- this should fix entries like '900' appearing as raw values in the web-interface - version bump oeh_spider to v0.1.8 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 26 ++++++++++++++++++++++++-- converter/spiders/oeh_spider.py | 2 +- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 2956bf17..00e94c28 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -723,6 +723,21 @@ class LisumPipeline(BasicPipeline): "2800506": "C-PL", # Polnisch } + EAFCODE_EXCLUSIONS = [ + # eafCodes in this list are used as keys in + # https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl + # but are not part of the (standard) http://agmud.de/wp-content/uploads/2021/09/eafsys.txt + '20090', # "Esperanto" ToDo: remove this entry after the vocab has been corrected + '44099', # "Open Educational Resources" + '64018', # "Nachhaltigkeit" + '72001', # "Zeitgemäße Bildung" + '900', # Medienbildung + '999', # Sonstiges + 'niederdeutsch', + 'oeh01', # "Arbeit, Ernährung, Soziales" + 'oeh04010' # Mechatronik + ] + EDUCATIONALCONTEXT_TO_LISUM = { "elementarbereich": "pre-school", "grundschule": "primary school", @@ -748,6 +763,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy """ Takes a BaseItem and transforms its metadata-values to Lisum-metadataset-compatible values. Touches the following fields within the BaseItem: + - base.custom - valuespaces.discipline - valuespaces.educationalContext - valuespaces.intendedEndUserRole @@ -795,10 +811,16 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy logging.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") if eaf_code_digits_only_regex.search(discipline_eaf_code): - # each numerical eafCode must have a length of (minimum) 3 digits + # each numerical eafCode must have a length of (minimum) 3 digits to be considered valid logging.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " f"used later for 'ccm:taxonentry').") - discipline_eafcodes.add(discipline_eaf_code) + if discipline_eaf_code not in self.EAFCODE_EXCLUSIONS: + # making sure to only save eafCodes that are part of the standard eafsys.txt + discipline_eafcodes.add(discipline_eaf_code) + else: + logging.debug(f"LisumPipeline: eafCode {discipline_eaf_code} is not part of 'EAF " + f"Sachgebietssystematik' (see: eafsys.txt), therefore skipping this " + f"value.") else: # our 'discipline.ttl'-vocab holds custom keys (e.g. 'niederdeutsch', 'oeh04010') which # shouldn't be saved into 'ccm:taxonentry' (since they are not part of the regular diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index df6a3576..de796bbf 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -11,7 +11,7 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.7" # last update: 2023-02-07 + version = "0.1.8" # last update: 2023-02-08 mdsId = "mds_oeh" importWhitelist: [str] = None custom_settings = { From 212646ada080c860afe4906fb63c16d1510da1a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 9 Feb 2023 14:05:27 +0100 Subject: [PATCH 248/590] fix: ""-default behaviour - the URL attribute acts as a fallback for missing -elements (according to Apple's Guidelines) --- converter/spiders/base_classes/rss_base.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/converter/spiders/base_classes/rss_base.py b/converter/spiders/base_classes/rss_base.py index 3467f4fa..a592c00f 100644 --- a/converter/spiders/base_classes/rss_base.py +++ b/converter/spiders/base_classes/rss_base.py @@ -1,3 +1,5 @@ +import logging + from scrapy.spiders import CrawlSpider from .lom_base import LomBase @@ -131,15 +133,21 @@ def getLOMTechnical(self, response): link_url = response.meta["item"].xpath("link//text()").get() if link_url: technical.add_value("location", link_url) - elif enclosure_url: - technical.add_value("location", enclosure_url) + guid: str = response.meta["item"].xpath("guid//text()").get() guid_is_permalink: str = response.meta["item"].xpath("guid/@isPermaLink").get() - if guid_is_permalink: - guid: str = response.meta["item"].xpath("guid//text()").get() - # if 's "isPermaLink"-attribute is true, guid points to a URL - if guid_is_permalink.strip() == "true" and guid: + if guid: + # if 's "isPermaLink"-attribute is true or missing, the guid points to a URL + if guid_is_permalink: + if guid_is_permalink.strip() == "false" and guid: + logging.debug(f"The {guid} is not an URL. Will not save it to 'technical.location'") + elif guid: if guid != response.url: + # making sure to save the provided URI (in addition to the resolved URL) technical.add_value("location", guid) + elif enclosure_url: + # According to Apple RSS Guidelines, the enclosure URL-attribute is considered a fallback for a missing + # element + technical.add_value("location", enclosure_url) return technical def getLOMLifecycle(self, response): From cfaa2c99e87aafe76270478c07624802e8a6ebda Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 Feb 2023 13:28:58 +0100 Subject: [PATCH 249/590] fix: LisumPipeline: w3id-URLs appearing in 'ccm:taxonid' - 'discipline' should not contain any w3id URLs after mapping them to Lisum shorthands/eafCodes anymore -- this was necessary because the 'discipline'-field is written to 'ccm:taxonid' in the Lisum Repository, which expects shorthands instead of w3ids (e.g. "C-GE") - style: code formatting Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 93 +++++++++++++++++---------------- converter/spiders/oeh_spider.py | 2 +- 2 files changed, 49 insertions(+), 46 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 00e94c28..abcf7796 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -682,45 +682,46 @@ def process_item(self, item, spider): class LisumPipeline(BasicPipeline): - DISCIPLINE_TO_LISUM_SHORTHANDLE = { - "020": "C-WAT", # Arbeitslehre -> Wirtschaft, Arbeit, Technik - "060": "C-KU", # Bildende Kunst - "080": "C-BIO", # Biologie - "100": "C-CH", # Chemie - "120": "C-DE", # Deutsch - "160": "C-Eth", # Ethik - "200": "C-FS", # Fremdsprachen - "220": "C-GEO", # Geographie, - "240": "C-GE", # Geschichte - "260": "B-GES", # Gesundheit -> Gesundheitsförderung - "380": "C-MA", # Mathematik - "400": "B-BCM", # Medienerziehung / Medienpädagogik -> Basiscurriculum Medienbildung - "420": "C-MU", # Musik - "450": "C-Phil", # Philosophie - "460": "C-Ph", # Physik - "480": "C-PB", # Politische Bildung - "510": "C-Psy", # Psychologie - "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde - "560": "B-SE", # Sexualerziehung - "660": "B-MB", # Verkehrserziehung -> "Mobilitätsbildung und Verkehrserziehung" - "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" - "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater - "20001": "C-EN", # Englisch - "20002": "C-FR", # Französisch - "20003": "C-AGR", # Griechisch -> Altgriechisch - "20004": "C-IT", # Italienisch - "20005": "C-La", # Latein - "20006": "C-RU", # Russisch - "20007": "C-ES", # Spanisch - "20008": "C-TR", # Türkisch - "20011": "C-PL", # Polnisch - "20014": "C-PT", # Portugiesisch - "20041": "C-ZH", # Chinesisch - "28010": "C-SU", # Sachkunde -> Sachunterricht - "32002": "C-Inf", # Informatik - "46014": "C-AS", # Astronomie - "48005": "C-GEWIWI", # Gesellschaftspolitische Gegenwartsfragen -> Gesellschaftswissenschaften - "2800506": "C-PL", # Polnisch + DISCIPLINE_TO_LISUM_SHORTHAND = { + "020": "C-WAT", # Arbeitslehre -> Wirtschaft, Arbeit, Technik + "060": "C-KU", # Bildende Kunst + "080": "C-BIO", # Biologie + "100": "C-CH", # Chemie + "120": "C-DE", # Deutsch + "160": "C-Eth", # Ethik + "200": "C-FS", # Fremdsprachen + "220": "C-GEO", # Geographie, + "240": "C-GE", # Geschichte + "260": "B-GES", # Gesundheit -> Gesundheitsförderung + "380": "C-MA", # Mathematik + "400": "B-BCM", # Medienerziehung / Medienpädagogik -> Basiscurriculum Medienbildung + "420": "C-MU", # Musik + "450": "C-Phil", # Philosophie + "460": "C-Ph", # Physik + "480": "C-PB", # Politische Bildung + "510": "C-Psy", # Psychologie + "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde + "560": "B-SE", # Sexualerziehung + # "600": "", # ToDo: "Sport" is not available as a Lisum Rahmenlehrplan shorthand + "660": "B-MB", # Verkehrserziehung -> "Mobilitätsbildung und Verkehrserziehung" + "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" + "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater + "20001": "C-EN", # Englisch + "20002": "C-FR", # Französisch + "20003": "C-AGR", # Griechisch -> Altgriechisch + "20004": "C-IT", # Italienisch + "20005": "C-La", # Latein + "20006": "C-RU", # Russisch + "20007": "C-ES", # Spanisch + "20008": "C-TR", # Türkisch + "20011": "C-PL", # Polnisch + "20014": "C-PT", # Portugiesisch + "20041": "C-ZH", # Chinesisch + "28010": "C-SU", # Sachkunde -> Sachunterricht + "32002": "C-Inf", # Informatik + "46014": "C-AS", # Astronomie + "48005": "C-GEWIWI", # Gesellschaftspolitische Gegenwartsfragen -> Gesellschaftswissenschaften + "2800506": "C-PL", # Polnisch } EAFCODE_EXCLUSIONS = [ @@ -777,11 +778,11 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy custom_field = base_item_adapter.get("custom") if "ccm:taxonentry" in custom_field: taxon_entries: list = custom_field.get("ccm:taxonentry") - # first round of mapping from (all) Sodix eafCodes to 'ccm:taxonid' + # first round of mapping from SODIX eafCodes to 'ccm:taxonid' if taxon_entries: for taxon_entry in taxon_entries: - if taxon_entry in self.DISCIPLINE_TO_LISUM_SHORTHANDLE: - discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHANDLE.get(taxon_entry)) + if taxon_entry in self.DISCIPLINE_TO_LISUM_SHORTHAND: + discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHAND.get(taxon_entry)) if base_item_adapter.get("valuespaces"): valuespaces = base_item_adapter.get("valuespaces") if valuespaces.get("discipline"): @@ -794,9 +795,9 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy for discipline_w3id in discipline_list: discipline_eaf_code: str = discipline_w3id.split(sep='/')[-1] eaf_code_digits_only_regex: re.Pattern = re.compile(r'\d{3,}') - match discipline_eaf_code in self.DISCIPLINE_TO_LISUM_SHORTHANDLE: + match discipline_eaf_code in self.DISCIPLINE_TO_LISUM_SHORTHAND: case True: - discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHANDLE.get(discipline_eaf_code)) + discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHAND.get(discipline_eaf_code)) # ToDo: there are no Sodix eafCode-values for these Lisum keys: # - Deutsche Gebärdensprache (C-DGS) # - Hebräisch (C-HE) @@ -831,6 +832,8 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))") logging.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") + valuespaces["discipline"] = list() # clearing 'discipline'-field, so we don't accidentally write the + # remaining OEH w3id-URLs to Lisum's 'ccm:taxonid'-field if valuespaces.get("educationalContext"): # mapping educationalContext values from OEH SKOS to lisum keys @@ -921,7 +924,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy if discipline_lisum_keys: discipline_lisum_keys = list(discipline_lisum_keys) discipline_lisum_keys.sort() - valuespaces["discipline"] = discipline_lisum_keys + valuespaces["discipline"] = discipline_lisum_keys # only shorthand values are saved to 'ccm:taxonid' if discipline_eafcodes: # Fallback: saving 'discipline.ttl'-Vocab keys to eafCodes ('ccm:taxonentry') if base_item_adapter.get("custom"): diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index de796bbf..627bfaa2 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -11,7 +11,7 @@ class OEHSpider(EduSharingBase): url = "https://redaktion.openeduhub.net/edu-sharing/" apiUrl = "https://redaktion.openeduhub.net/edu-sharing/rest/" searchUrl = "search/v1/queries/-home-/" - version = "0.1.8" # last update: 2023-02-08 + version = "0.1.9" # last update: 2023-02-14 mdsId = "mds_oeh" importWhitelist: [str] = None custom_settings = { From 9b53b2a04cb6b2e0785a7f0ed53ab9569f801dad Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 Feb 2023 15:58:00 +0100 Subject: [PATCH 250/590] fix: LisumPipeline eafCode edge-cases - fix for eafCode edge-cases where OEH 'discipline' vocab keys don't line up with "eafsys.txt" eafCodes -- for context, see: https://github.com/openeduhub/oeh-metadata-vocabs/pull/36 - add: additional Lisum shorthand mapping for 'discipline' value "320" (Informatik) to "C-Inf" Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index abcf7796..aea7f888 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -693,6 +693,7 @@ class LisumPipeline(BasicPipeline): "220": "C-GEO", # Geographie, "240": "C-GE", # Geschichte "260": "B-GES", # Gesundheit -> Gesundheitsförderung + "320": "C-Inf", # Informatik "380": "C-MA", # Mathematik "400": "B-BCM", # Medienerziehung / Medienpädagogik -> Basiscurriculum Medienbildung "420": "C-MU", # Musik @@ -728,7 +729,8 @@ class LisumPipeline(BasicPipeline): # eafCodes in this list are used as keys in # https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl # but are not part of the (standard) http://agmud.de/wp-content/uploads/2021/09/eafsys.txt - '20090', # "Esperanto" ToDo: remove this entry after the vocab has been corrected + '04010', # OEH: "Körperpflege" <-> eafCode 04010: "Mechatronik" + '20090', # OEH: "Esperanto" <-> eafCode: 20080 '44099', # "Open Educational Resources" '64018', # "Nachhaltigkeit" '72001', # "Zeitgemäße Bildung" @@ -736,7 +738,7 @@ class LisumPipeline(BasicPipeline): '999', # Sonstiges 'niederdeutsch', 'oeh01', # "Arbeit, Ernährung, Soziales" - 'oeh04010' # Mechatronik + 'oeh04010' # OEH: "Mechatronik" <-> eafCode: 04010 (Mechatronik) ] EDUCATIONALCONTEXT_TO_LISUM = { @@ -811,6 +813,14 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum logging.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") + match discipline_eaf_code: + # catching edge-cases where OEH 'discipline'-vocab-keys don't line up with eafsys.txt values + case "20090": + discipline_eafcodes.add("20080") # Esperanto + case "oeh04010": + discipline_eafcodes.add("04010") # Mechatronik + case "04010": + discipline_eafcodes.add("2600103") # Körperpflege if eaf_code_digits_only_regex.search(discipline_eaf_code): # each numerical eafCode must have a length of (minimum) 3 digits to be considered valid logging.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " From 25b49a49dd72347a80d1241e8b5975cde0837db0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 15 Feb 2023 14:24:22 +0100 Subject: [PATCH 251/590] chore: update requirements.txt - bumped package versions according to Dependabot recommendations from 2023-02-14: -- 'wheel', 'lxml', 'Pillow', 'certifi' - additionally, also bumped 'playwright' and 'requests' package to a newer version Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- requirements.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index 921f5575..178f605c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -wheel==0.37.1 +wheel==0.38.4 image dateparser==1.1.6 isodate==0.6.1 @@ -8,7 +8,7 @@ scrapy-splash==0.8.0 python-dateutil==2.8.2 python-dotenv==0.20.0 Scrapy==2.6.3 -requests==2.28.1 +requests==2.28.2 vobject==0.9.6.1 xmltodict~=0.12.0 overrides==3.1.0 @@ -16,13 +16,13 @@ jmespath==1.0.0 flake8==5.0.3 pytest==7.1.1 extruct~=0.13.0 -lxml~=4.6.3 +lxml==4.9.2 w3lib~=1.22.0 itemloaders~=1.0.4 -Pillow==9.1.0 +Pillow==9.4.0 itemadapter==0.5.0 six==1.16.0 -certifi==2021.10.8 +certifi==2022.12.7 urllib3~=1.26.09 -playwright==1.27.1 +playwright==1.30.0 pyOpenSSL==22.1.0 \ No newline at end of file From 49fbfe894ef84d618fa17785e1b948243b9a0c58 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 15 Feb 2023 18:34:51 +0100 Subject: [PATCH 252/590] drop 'pyppeteer' from requirements.txt and WebTools - the 'build-and-publish'-pipeline failed due to a dependency conflict between pyppeteer and playwright - since all crawlers which previously used pyppeteer switched to Playwright a while ago anyway, I removed the obsolete package from our requirements.txt and web_tools.py Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 23 ----------------------- requirements.txt | 1 - 2 files changed, 24 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index 998c839d..d3536324 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -3,7 +3,6 @@ from enum import Enum import html2text -import pyppeteer import requests from playwright.async_api import async_playwright from scrapy.utils.project import get_project_settings @@ -14,8 +13,6 @@ class WebEngine(Enum): # Splash (default engine) Splash = 'splash', - # Pyppeteer is controlling a headless Chrome browser - Pyppeteer = 'pyppeteer' # Playwright is controlling a headless Chrome browser Playwright = 'playwright' @@ -25,19 +22,11 @@ class WebTools: def getUrlData(url: str, engine=WebEngine.Splash): if engine == WebEngine.Splash: return WebTools.__getUrlDataSplash(url) - elif engine == WebEngine.Pyppeteer: - return WebTools.__getUrlDataPyppeteer(url) elif engine == WebEngine.Playwright: return WebTools.__getUrlDataPlaywright(url) raise Exception("Invalid engine") - @staticmethod - def __getUrlDataPyppeteer(url: str): - # html = "test" - html = asyncio.run(WebTools.fetchDataPyppeteer(url)) - return {"html": html, "text": WebTools.html2Text(html), "cookies": None, "har": None} - @staticmethod def __getUrlDataPlaywright(url: str): playwright_dict = asyncio.run(WebTools.fetchDataPlaywright(url)) @@ -82,18 +71,6 @@ def __getUrlDataSplash(url: str): else: return {"html": None, "text": None, "cookies": None, "har": None} - @staticmethod - async def fetchDataPyppeteer(url: str): - browser = await pyppeteer.connect({ - 'browserWSEndpoint': env.get('PYPPETEER_WS_ENDPOINT'), - 'logLevel': 'WARN' - }) - page = await browser.newPage() - await page.goto(url) - content = await page.content() - # await page.close() - return content - @staticmethod async def fetchDataPlaywright(url: str): # relevant docs for this implementation: https://hub.docker.com/r/browserless/chrome#playwright and diff --git a/requirements.txt b/requirements.txt index 178f605c..774b83a5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ wheel==0.38.4 image dateparser==1.1.6 isodate==0.6.1 -pyppeteer==1.0.2 html2text~=2020.1.16 scrapy-splash==0.8.0 python-dateutil==2.8.2 From 3dd2183b4e36c6ee1146a30f44d61b140160f6db Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 17 Feb 2023 15:53:11 +0100 Subject: [PATCH 253/590] fix: Constants (NEW_LRT) - replaced URLs by uuids because the pipelines expect uuids, 'prefLabel' or 'altLabel' string values Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/constants.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index eac55826..38235b0a 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -90,8 +90,8 @@ class Constants: LICENSE_CUSTOM: Final[str] = "CUSTOM" # Custom License, use the license description field for arbitrary values LICENSE_NONPUBLIC: Final[str] = "NONPUBLIC" - NEW_LRT_MATERIAL: Final[str] = "https://w3id.org/openeduhub/vocabs/new_lrt/1846d876-d8fd-476a-b540-b8ffd713fedb" - NEW_LRT_TOOL: Final[str] = "https://w3id.org/openeduhub/vocabs/new_lrt/cefccf75-cba3-427d-9a0f-35b4fedcbba1" + NEW_LRT_MATERIAL: Final[str] = "1846d876-d8fd-476a-b540-b8ffd713fedb" + NEW_LRT_TOOL: Final[str] = "cefccf75-cba3-427d-9a0f-35b4fedcbba1" SOURCE_TYPE_SPIDER: int = 1 SOURCE_TYPE_EDITORIAL: int = 2 From cd6f9dd317115051f2a8f39947b70cd158e2e554 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 17 Feb 2023 16:34:43 +0100 Subject: [PATCH 254/590] serlo_spider v0.2.4 - fix: 'description' (+ fallbacks) - refactor: 'base.sourceId' and 'base.hash' (+ fallbacks) - code cleanup Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 92 ++++++++++++++++--------------- 1 file changed, 47 insertions(+), 45 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index cf0980cc..d818c1e6 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -1,3 +1,4 @@ +import datetime import json import requests @@ -17,7 +18,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.3" # last update: 2022-08-26 + version = "0.2.4" # last update: 2023-02-17 custom_settings = settings.BaseSettings({ # playwright cause of issues with thumbnails+text for serlo "WEB_TOOLS": WebEngine.Playwright @@ -102,13 +103,30 @@ def start_requests(self): } ) - def getId(self, response=None) -> str: - # we set this value in the parse()-method as 'sourceId' in the BaseItemLoader - pass - - def getHash(self, response=None) -> str: - # we set this value in the parse()-method as 'hash' in the BaseItemLoader - pass + def getId(self, response=None, graphql_json=None) -> str: + # The actual URL of a learning material is dynamic and can change at any given time + # (e.g. when the title gets changed by a serlo editor/contributor), + # therefore we use the "id"-field and its identifier value + # e.g.: "id": "https://serlo.org/2097" + # "value": "2097" + graphql_json: dict = graphql_json + if "identifier" in graphql_json: + if "value" in graphql_json["identifier"]: + identifier_value = graphql_json["identifier"]["value"] + if identifier_value: + return identifier_value + else: + return response.url + + def getHash(self, response=None, graphql_json=None) -> str: + graphql_json: dict = graphql_json + if "dateModified" in graphql_json: + date_modified: str = graphql_json["dateModified"] + if date_modified: + hash_combined = f"{date_modified}{self.version}" + return hash_combined + else: + return f"{datetime.datetime.now().isoformat()}{self.version}" def parse(self, response, **kwargs): graphql_json: dict = kwargs.get("graphql_item") @@ -126,15 +144,9 @@ def parse(self, response, **kwargs): # # TODO: fill "base"-keys with values for # # - thumbnail recommended base.add_value('screenshot_bytes', screenshot_bytes) - # The actual URL of a learning material is dynamic and can change at any given time - # (e.g. when the title gets changed by a serlo editor), therefore we use the "id"-field - # or the identifier number as a stable ID - # base.add_value('sourceId', graphql_json["id"]) # e.g.: "id": "https://serlo.org/2097" - base.add_value('sourceId', graphql_json["identifier"]["value"]) # e.g.: "value": "2097" - hash_temp: str = graphql_json["dateModified"] + self.version - base.add_value('hash', hash_temp) + base.add_value('sourceId', self.getId(response, graphql_json=graphql_json)) + base.add_value('hash', self.getHash(response, graphql_json=graphql_json)) base.add_value('lastModified', graphql_json["dateModified"]) - type_list: list = graphql_json["type"] # thumbnail_url: str = "This string should hold the thumbnail URL" # base.add_value('thumbnail', thumbnail_url) if "publisher" in json_ld: @@ -152,36 +164,33 @@ def parse(self, response, **kwargs): title_1st_try: str = graphql_json["headline"] # not all materials carry a title in the GraphQL API, therefore we're trying to grab a valid title from # different sources (GraphQL > json_ld > header) - if title_1st_try is not None: + if title_1st_try: general.add_value('title', title_1st_try) - elif title_1st_try is None: + elif not title_1st_try: title_2nd_try = json_ld["name"] - if title_2nd_try is not None: + if title_2nd_try: general.add_value('title', title_2nd_try) - if title_1st_try is None and title_2nd_try is None: + if not title_1st_try and not title_2nd_try: title_from_header = response.xpath('//meta[@property="og:title"]/@content').get() - if title_from_header is not None: + if title_from_header: general.add_value('title', title_from_header) - # not all graphql entries have a description either, therefore we try to grab that from different sources - # (GraphQL > json_ld > header > first paragraph (from the DOM itself)) + # not all GraphQL entries have a description either, therefore we try to grab that from different sources + # (GraphQL > JSON-LD > DOM header) + description_1st_try = str() + description_2nd_try = str() if "description" in graphql_json: description_1st_try: str = graphql_json["description"] - if description_1st_try is not None and len(description_1st_try) != 0: + if description_1st_try: general.add_value('description', description_1st_try) - elif "description" in json_ld: - # some json_ld containers don't have a description + if not description_1st_try and "description" in json_ld: + # some json_ld containers don't have a description either description_2nd_try: str = json_ld["description"] - if description_2nd_try is not None and len(description_2nd_try) != 0: + if description_2nd_try: general.add_value('description', description_2nd_try) - # elif len(description_1st_try) == 0 and len(description_2nd_try) == 0: - else: - description_from_header: str = response.xpath('//meta[@name="description"]/@content').get() - if description_from_header is not None and len(description_from_header) != 0: - general.add_value('description', description_from_header) - else: - description_from_first_paragraph = response.xpath('//p[@class="serlo-p"]/text()').get() - if len(description_from_first_paragraph) != 0: - general.add_value('description', description_from_first_paragraph) + elif not description_1st_try and not description_2nd_try: + description_from_header: str = response.xpath('//meta[@name="description"]/@content').get() + if description_from_header: + general.add_value('description', description_from_header) in_language: list = graphql_json["inLanguage"] general.add_value('language', in_language) # ToDo: keywords would be extremely useful, but aren't supplied by neither the API / JSON_LD nor the header @@ -241,13 +250,10 @@ def parse(self, response, **kwargs): # # - keyword optional # lom.add_value('classification', classification.load_item()) - # # once you've filled "general", "technical", "lifecycle" and "educational" with values, - # # the LomBaseItem is loaded into the "base"-BaseItemLoader base.add_value('lom', lom.load_item()) vs = ValuespaceItemLoader() vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) - vs.add_value('new_lrt', type_list) # # for possible values, either consult https://vocabs.openeduhub.de # # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs # # TODO: fill "valuespaces"-keys with values for @@ -255,14 +261,10 @@ def parse(self, response, **kwargs): # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/conditionsOfAccess.ttl) # # - educationalContext optional # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/educationalContext.ttl) - # # - toolCategory optional - # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/toolCategory.ttl) # # - accessibilitySummary optional # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/accessibilitySummary.ttl) # # - dataProtectionConformity optional # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/dataProtectionConformity.ttl) - # # - fskRating optional - # # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/fskRating.ttl) if "audience" in json_ld: # mapping educationalAudienceRole to IntendedEndUserRole here @@ -313,7 +315,7 @@ def parse(self, response, **kwargs): elif graphql_json["isAccessibleForFree"] is False: # only set the price to "kostenpflichtig" if it's explicitly stated, otherwise we'll leave it empty vs.add_value('price', 'yes') - if graphql_json["learningResourceType"] is not None: + if graphql_json["learningResourceType"]: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) vs.add_value('learningResourceType', graphql_json["learningResourceType"]) @@ -324,7 +326,7 @@ def parse(self, response, **kwargs): # # - author recommended # # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) license_url = graphql_json["license"]["id"] - if license_url is not None: + if license_url: lic.add_value('url', license_url) base.add_value('license', lic.load_item()) From e6f5156423042756005c5c070a9340bcf175bd3e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 17 Feb 2023 17:24:28 +0100 Subject: [PATCH 255/590] fix: serlo_spider crashing 'Splash'-container - fix: 'ResponseItemLoader' call to LomBase -- since serlo_spider already (completely) relies on Playwright for text and screenshot extraction, I replaced the (one) remaining use of Splash (which was called by super().mapResponse) from serlo_spider -- serlo webpages reliably crashed the 'Splash'-container during the start of a crawl Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index d818c1e6..e53ab2ad 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase from converter.web_tools import WebEngine, WebTools @@ -333,9 +333,12 @@ def parse(self, response, **kwargs): permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader = ResponseItemLoader() + response_loader.replace_value('headers', response.headers) response_loader.replace_value('html', html_body) + response_loader.replace_value('status', response.status) response_loader.replace_value('text', html_text) + response_loader.replace_value('url', self.getUri(response)) base.add_value('response', response_loader.load_item()) yield base.load_item() From 3e1022cc0e92773649e73280663965cf5d793c04 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 2 Mar 2023 00:11:56 +0100 Subject: [PATCH 256/590] fix: save both the 'id'-URL and resolved URL to 'technical.location' - if the URL found within OERSI's '_source.id'-field is different from the resolved URL by Scrapy, both strings will be saved to 'technical.location' -- this might be necessary for future duplicate detection - this change was made due to KLISUM-212 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 6d454ad2..6ec3d49c 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -635,20 +635,19 @@ def parse(self, response: scrapy.http.Response, **kwargs): lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() - technical.add_value( - "format", "text/html" - ) # e.g. if the learning object is a web-page if "id" in elastic_item_source: identifier_url: str = elastic_item_source.get( "id" - ) # this URL REQUIRED and should always be available + ) # this URL is REQUIRED and should always be available # see https://dini-ag-kim.github.io/amb/draft/#id if identifier_url: - technical.add_value("location", identifier_url) - # the identifier_url should be more stable/robust than the current response.url - # navigated by the crawler - else: - technical.add_value("location", response.url) + if identifier_url != response.url: + technical.add_value("location", identifier_url) + # the identifier_url should be more stable/robust than the (resolved) response.url in the long term, + # so we will save both + technical.add_value("location", response.url) + else: + technical.add_value("location", response.url) lom.add_value("technical", technical.load_item()) authors = self.get_lifecycle_author( From c032727939098e559b873b8c88f8912c72eb5576 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Mar 2023 18:59:20 +0100 Subject: [PATCH 257/590] serlo_spider v0.2.5 - fix 'intendedEndUserRole' mapping for "mentor" (-> "counsellor") -- the 'intendedEndUserRole'-Vocab had a misplaced description string, which was placed below "author" and was actually meant to be the description string of "counsellor" Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index e53ab2ad..2c1105fc 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -18,7 +18,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.4" # last update: 2023-02-17 + version = "0.2.5" # last update: 2023-03-03 custom_settings = settings.BaseSettings({ # playwright cause of issues with thumbnails+text for serlo "WEB_TOOLS": WebEngine.Playwright @@ -33,7 +33,7 @@ class SerloSpider(scrapy.Spider, LomBase): # A trainer or educator with administrative authority and responsibility. "general public": "other", # The public at large. - "mentor": "author", + "mentor": "counsellor", # Someone who advises, trains, supports, and/or guides. "peer tutor": ["learner", "other"], # The peer learner serving as tutor of another learner. From 16608263398dc8fe7ccbaf9059ab55750898e280 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 3 Mar 2023 19:11:52 +0100 Subject: [PATCH 258/590] LisumPipeline mappings for "Medienbildung" and OEH LRT "text" - discipline '900' -> Lisum shorthand 'B-BCM' (Basiscurriculum Medienbildung) - Lisum "learningResourceType" valuespace should natively support the LRT value 'text' now, therefore the previous mapping is no longer needed --- converter/pipelines.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index aea7f888..64ffc1f8 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -706,6 +706,7 @@ class LisumPipeline(BasicPipeline): # "600": "", # ToDo: "Sport" is not available as a Lisum Rahmenlehrplan shorthand "660": "B-MB", # Verkehrserziehung -> "Mobilitätsbildung und Verkehrserziehung" "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" + "900": "B-BCM", # Medienbildung -> "Basiscurriculum Medienbildung" "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater "20001": "C-EN", # Englisch "20002": "C-FR", # Französisch @@ -759,7 +760,6 @@ class LisumPipeline(BasicPipeline): "open_activity": "", # exists in 2 out of 60.000 items "broadcast": "audio", "demonstration": ["demonstration", "image"], # "Veranschaulichung" - "text": "teaching_aids", # "Arbeitsmaterial" } def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy.Item]: From 809db250cfb65f200aa4ddef463de8a498776db6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Mar 2023 15:31:09 +0100 Subject: [PATCH 259/590] tutory_spider v0.1.3 - change: 'license.internal' no longer defaults to 'copyright' -- this change was requested as a temporary workaround until mixed Tutory licenses can be systematically determined - fix: API pagination -- the previous method of paginating through Tutory's API didn't work anymore because the old 'pageSize'-URL-parameter nowadays returns an HTTP Error 502 (Bad Gateway) -- tutory_spider will attempt to crawl through the API pages in iterations of 5000. If the API returns similar HTTP Errors in the future, try lowering the "api_pagesize_limit"-variable. - style: code formatting (via black) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 79 +++++++++++++++++++++--------- 1 file changed, 55 insertions(+), 24 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index fa90b866..507ddee7 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -1,8 +1,10 @@ +import logging +import re + import scrapy from scrapy.selector import Selector from scrapy.spiders import CrawlSpider -from converter.constants import Constants from .base_classes import LomBase, JSONBase @@ -12,27 +14,61 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.2" # last update: 2022-05-23 - custom_settings = { - "AUTOTHROTTLE_ENABLED": True, - "ROBOTSTXT_OBEY": False, - "AUTOTHROTTLE_DEBUG": True - } + version = "0.1.3" # last update: 2022-03-07 + custom_settings = {"AUTOTHROTTLE_ENABLED": True, "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_DEBUG": True} + + api_pagesize_limit = 5000 + # the old API pageSize of 999999 (which was used in 2021) doesn't work anymore and throws a 502 Error (Bad Gateway). + # Setting the pageSize to 5000 appears to be a reasonable value with an API response time of 12-15s def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def start_requests(self): - url = self.baseUrl + "worksheet?groupSlug=entdecken&pageSize=999999" - yield scrapy.Request(url=url, callback=self.parse_list) - - def parse_list(self, response: scrapy.http.TextResponse): - data = response.json() - for j in data["worksheets"]: - response_copy = response.replace(url=self.objectUrl + j["id"]) - response_copy.meta["item"] = j - if self.hasChanged(response_copy): - yield self.parse(response_copy) + first_url: str = self.assemble_tutory_api_url(api_page=0) + yield scrapy.Request(url=first_url, callback=self.parse_api_page) + + def parse_api_page(self, response: scrapy.http.TextResponse): + """ + This method tries to parse the current pagination parameter from response.url and yields two types of + scrapy.Requests: + 1) if the "worksheets"-list isn't empty, try to crawl the next API page + 2) if there are "worksheets" in the current JSON Response, try to crawl the individual items + """ + json_data: dict = response.json() + page_regex = re.compile(r"&page=(?P\d+)") + pagination_parameter = page_regex.search(response.url) + pagination_current_page: int = 0 + if pagination_parameter: + pagination_current_page: int = pagination_parameter.groupdict().get("page") + if "total" in json_data: + total_items = json_data.get("total") + logging.info( + f"Currently crawling Tutory API page {pagination_current_page} -> {response.url} // " + f"Expected items (in total): {total_items}" + ) + pagination_next_page: int = int(pagination_current_page) + 1 + url_next_page = self.assemble_tutory_api_url(pagination_next_page) + if "worksheets" in json_data: + worksheets_data: list = json_data.get("worksheets") + if worksheets_data: + # only crawl the next page if the "worksheets"-dict isn't empty + yield scrapy.Request(url=url_next_page, callback=self.parse_api_page) + logging.info( + f"Tutory API page {pagination_current_page} is expected to yield " f"{len(worksheets_data)} items." + ) + for j in worksheets_data: + response_copy = response.replace(url=self.objectUrl + j["id"]) + response_copy.meta["item"] = j + if self.hasChanged(response_copy): + yield self.parse(response_copy) + + def assemble_tutory_api_url(self, api_page: int): + url_current_page = ( + f"{self.baseUrl}worksheet?groupSlug=entdecken&pageSize={str(self.api_pagesize_limit)}" + f"&page={str(api_page)}" + ) + return url_current_page def getId(self, response=None): return str(response.meta["item"]["id"]) @@ -64,27 +100,22 @@ def getValuespaces(self, response): ) ) valuespaces.add_value("discipline", discipline) - - # valuespaces.add_value("learningResourceType", "worksheet") # remove this value when reaching crawler v0.1.3 valuespaces.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # Arbeitsblatt return valuespaces def getLicense(self, response=None): license_loader = LomBase.getLicense(self, response) - license_loader.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) return license_loader def getLOMGeneral(self, response=None): general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["name"]) - if 'description' in response.meta["item"]: + if "description" in response.meta["item"]: general.add_value("description", response.meta["item"]["description"]) else: html = self.getUrlData(response.url)["html"] if html: - data = ( - Selector(text=html).xpath('//ul[contains(@class,"worksheet-pages")]//text()').getall() - ) + data = Selector(text=html).xpath('//ul[contains(@class,"worksheet-pages")]//text()').getall() cutoff = 4 if len(data) > cutoff: for i in range(cutoff): From cb6705a83b3957c51db68c852cd10401fd7f2a71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 23 Feb 2023 16:42:39 +0100 Subject: [PATCH 260/590] feat: license_mapper and tests (squashed) - status: all tests passed - ToDos: cleanup / docs / more test-cases / refactoring - style: code formatting (via black) --- converter/util/license_mapper.py | 191 ++++++++++++++++++++++++++ converter/util/test_license_mapper.py | 63 +++++++++ 2 files changed, 254 insertions(+) create mode 100644 converter/util/license_mapper.py create mode 100644 converter/util/test_license_mapper.py diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py new file mode 100644 index 00000000..374795f6 --- /dev/null +++ b/converter/util/license_mapper.py @@ -0,0 +1,191 @@ +import logging +import re + +from converter.constants import Constants + + +class LicenseMapper: + """ + This (rudimentary) LicenseMapper is intended to help you provide (correct) values for the 'LicenseItem'-fields + 'internal' and 'url'. + + Usage scenario: + 1) Try to map a string for the 'url'-field + 2) If result is None: Try to map the string to the 'internal'-field + 3) If 'internal'-result is None: + Use this information to set 'internal' to 'CUSTOM' and save the string as a custom license description. + """ + + logging.basicConfig(level=logging.DEBUG) # ToDo: remove me after debugging + + cc_pattern = re.compile( + r"(?<=c{2}.)(?Pby(.[acdns]{2}){0,3})" + r".?(?P\d.\d)?" + r"|(?Ppublic.?domain|pdm|gemeinfrei)" + r"|(?Pc{2}.?0|cc.zero|creative.?commons.?zero)" + ) + + # ToDo: + # - gather more license string edge-cases from debug crawlers for test cases + # - feature-idea: fill up provided 'LicenseItemLoader' automatically? + # flow: try 'url' + # -> fallback: try 'internal' + # -> fallback: set 'internal' to 'CUSTOM' & save string to 'description'-field? + + def get_license_url(self, license_string: str = None) -> str | None: + """ + This method can be used to extract a value intended for the 'LicenseItem'-field 'url'. + If no license could be mapped, it will return None. + """ + license_string: str = license_string + if license_string: + return self.identify_cc_license(license_string) + else: + logging.debug(f"LicenseMapper ('url'): The provided '{license_string}' does not seem to be a valid string.") + return None + + def get_license_internal_key(self, license_string: str = None) -> str | None: + """ + This method is intended as a fallback for the 'LicenseItem'-field 'internal'. + (This might be the case when license strings are provided that don't have a specific CC Version) + It will return None if no mapping was possible. + """ + license_string: str = license_string + if license_string: + license_string = license_string.lower() + copyright_hit = self.identify_if_string_contains_copyright(license_string) + internal_hit = self.fallback_to_license_internal_key(license_string) + if copyright_hit: + return Constants.LICENSE_COPYRIGHT_LAW + if internal_hit: + return internal_hit + else: + logging.debug( + f"LicenseMapper ('internal'): Could not map '{license_string}' to 'license.internal'-key since it doesn't " + f"seem to be a valid string." + ) + return None + + @staticmethod + def identify_if_string_contains_copyright(license_string: str = None) -> bool: + """ + Checks a provided string if the word 'copyright' or copyright-indicating unicode symbols are mentioned within + it. + @param license_string: string that might or might not contain any 'copyright'-indicating words + @return: Returns True if 'copyright' was mentioned within a string. + """ + if license_string: + license_string = license_string.lower() + if "copyright" in license_string or "©" in license_string: + return True + return False + + @staticmethod + def identify_if_string_contains_url_pattern(license_string: str = None) -> bool: + """ + Returns True if URL patterns are found within the string, otherwise returns False. + """ + license_string: str = license_string + if license_string: + license_stripped: str = license_string.strip() + if "http://" in license_stripped or "https://" in license_stripped: + # ToDo: use RegEx for more precise URL patterns? + return True + else: + return False + + def fallback_to_license_internal_key(self, license_string: str = None) -> str | None: + license_string = license_string + if license_string: + if self.identify_if_string_contains_copyright(license_string): + return Constants.LICENSE_COPYRIGHT_LAW + if self.cc_pattern.search(license_string): + result_dict = self.cc_pattern.search(license_string).groupdict() + cc_type = result_dict.get("CC_TYPE") + cc_zero = result_dict.get("CC_ZERO") + public_domain = result_dict.get("PDM") + if cc_zero: + return "CC_0" + if public_domain: + return "PDM" + if cc_type: + cc_string_internal: str = f"CC_{result_dict.get('CC_TYPE')}".upper() + if "-" in cc_string_internal or " " in cc_string_internal: + cc_string_internal = cc_string_internal.replace("-", "_") + cc_string_internal = cc_string_internal.replace(" ", "_") + if cc_string_internal in Constants.LICENSE_MAPPINGS_INTERNAL: + return cc_string_internal + else: + logging.debug( + f"LicenseMapper: Fallback to 'license.internal' failed for string " + f"'{license_string}' . The extracted string_internal value was: " + f"{cc_string_internal}" + ) + else: + return None + + def identify_cc_license(self, license_string: str) -> str | None: + """ + Checks the provided string if it can be mapped to one of the known URL-strings of Constants.py. + If no mapping is possible, returns None. + """ + # ToDo (refactor): check string validity first? - warn otherwise + license_string_original: str = license_string + if self.identify_if_string_contains_url_pattern(license_string_original): + license_url_candidate = license_string_original + logging.info(f"LicenseMapper: {license_url_candidate} was recognized as a URL") + if "http://" in license_url_candidate: + license_url_candidate = license_url_candidate.replace("http://", "https://") + if license_url_candidate.endswith("deed.de"): + license_url_candidate = license_url_candidate[: -len("deed.de")] + if license_url_candidate.endswith("/de/"): + license_url_candidate = license_url_candidate[: -len("de/")] + for valid_license_url in Constants.VALID_LICENSE_URLS: + if license_url_candidate in valid_license_url: + return valid_license_url + elif license_string: + license_string = license_string.lower() + logging.debug(f"LicenseMapper: Recognized license string '{license_string}'") + if self.cc_pattern.search(license_string): + result_dict: dict = self.cc_pattern.search(license_string).groupdict() + cc_type = result_dict.get("CC_TYPE") + cc_version = result_dict.get("CC_VERSION") + cc_zero = result_dict.get("CC_ZERO") + public_domain = result_dict.get("PDM") + if cc_zero: + return Constants.LICENSE_CC_ZERO_10 + if cc_type and cc_version: + partial_url = ( + f"/{str(result_dict.get('CC_TYPE')).lower().strip()}" + f"/{str(result_dict.get('CC_VERSION')).lower().strip()}/" + ) + logging.debug(f"partial_url: {partial_url}") + for valid_license_url in Constants.VALID_LICENSE_URLS: + if partial_url in valid_license_url: + logging.debug( + f"LicenseMapper: License string '{license_string}' was recognized as " + f"{valid_license_url}" + ) + return valid_license_url + if public_domain: + return Constants.LICENSE_PDM + elif cc_type: + logging.debug( + f"LicenseMapper: Couldn't recognize a (valid) CC Version within {license_string} - " + f"Trying fallback method..." + ) + return None + else: + logging.debug(f"LicenseMapper: Couldn't detect a CC license within {license_string}") + return None + + +if __name__ == "__main__": + test_mapper = LicenseMapper() + # test-cases for debugging purposes + print(test_mapper.get_license_internal_key("CC BY-NC-ND")) + print(test_mapper.get_license_internal_key("zufälliger CC BY lizenzierter Freitext-String")) + print(test_mapper.get_license_url("a random CC-BY 4.0 string")) + print(test_mapper.get_license_url("https://creativecommons.org/licenses/by-nc/3.0/de/")) + print(test_mapper.identify_cc_license("https://creativecommons.org/licenses/by-nc/3.0/deed.de")) + pass diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py new file mode 100644 index 00000000..30500940 --- /dev/null +++ b/converter/util/test_license_mapper.py @@ -0,0 +1,63 @@ +import pytest + +from converter.constants import Constants +from .license_mapper import LicenseMapper + + +class TestLicenseMapper: + @pytest.mark.parametrize( + "test_input, expected_result", + [ + ("a random CC-BY 4.0 string", Constants.LICENSE_CC_BY_40), + ("CC-0", Constants.LICENSE_CC_ZERO_10), + ("the license CC0 is mentioned somewhere", Constants.LICENSE_CC_ZERO_10), + ("CC-Zero", Constants.LICENSE_CC_ZERO_10), + ("Creative Commons Zero", Constants.LICENSE_CC_ZERO_10), + ("CC-BY-SA-4.0", Constants.LICENSE_CC_BY_SA_40), + ("CC-BY-NC-SA 3.0", Constants.LICENSE_CC_BY_NC_SA_30), + (" CC BY 4.0 ", Constants.LICENSE_CC_BY_40), + ( + "https://creativecommons.org/licenses/by-sa/4.0", + Constants.LICENSE_CC_BY_SA_40, + ), + ( + "https://creativecommons.org/licenses/by-nd/3.0/", + Constants.LICENSE_CC_BY_ND_30, + ), + ("https://creativecommons.org/licenses/by-nc/3.0/deed.de", Constants.LICENSE_CC_BY_NC_30), + ("https://creativecommons.org/licenses/by-nc/3.0/de/", Constants.LICENSE_CC_BY_NC_30), + ( + "Copyright Zweites Deutsches Fernsehen, ZDF", + None, + ), + ("Public Domain", Constants.LICENSE_PDM), + ], + ) + def test_get_license_url(self, test_input, expected_result): + test_mapper = LicenseMapper() + assert LicenseMapper.get_license_url(test_mapper, license_string=test_input) == expected_result + + @pytest.mark.parametrize( + "test_input, expected_result", + [ + ("Copyright Zweites Deutsches Fernsehen, ZDF", Constants.LICENSE_COPYRIGHT_LAW), + (" © ", Constants.LICENSE_COPYRIGHT_LAW), + # ToDo: regularly check if new enums for the 'internal' field need to be added here or in Constants.py + ("jemand erwähnt CC0 in einem Freitext", "CC_0"), + ("CC-0", "CC_0"), + ("zufälliger CC BY lizensierter Freitext-String ohne Versionsnummer", "CC_BY"), + ("CC-BY-NC ohne Version", "CC_BY_NC"), + ("CC BY-NC-ND", "CC_BY_NC_ND"), + (" CC BY NC SA", "CC_BY_NC_SA"), + (" CC BY ND ", "CC_BY_ND"), + (" CC BY SA ", "CC_BY_SA"), + ("dieser Text ist public domain", "PDM"), + ("Gemeinfrei", "PDM"), + ("Frei nutzbares Material", None), + (" ", None), + ("", None), + ], + ) + def test_get_license_internal_key(self, test_input, expected_result): + test_mapper = LicenseMapper() + assert LicenseMapper.get_license_internal_key(test_mapper, license_string=test_input) == expected_result From daa6aa5c59be794ba9e3624cb5eab702a385ea02 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 24 Feb 2023 12:43:59 +0100 Subject: [PATCH 261/590] chore: update flake8 & pytest - chore: flake8 v5.0.3 -> 6.0.0 - chore: pytest 7.1.1 -> 7.2.1 --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 774b83a5..e536f5ca 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,8 +12,8 @@ vobject==0.9.6.1 xmltodict~=0.12.0 overrides==3.1.0 jmespath==1.0.0 -flake8==5.0.3 -pytest==7.1.1 +flake8==6.0.0 +pytest==7.2.1 extruct~=0.13.0 lxml==4.9.2 w3lib~=1.22.0 From e6cffa5c028c3951fd6135653378648286211d56 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Mar 2023 17:44:34 +0100 Subject: [PATCH 262/590] digitallearninglab_spider v0.1.4 - change: replace crawler-specific RegEx parsing of license strings by using the newly implemented LicenseMapper -- this should reduce maintenance in the long run and will enable us to properly test edge-cases when they happen - change: try to gather 'description' from json_ld first -- if it's not available, stick to the previously used "teaser"-field - feat: 'lifecycle' authors / metadata_providers - feat: license authors - feat: digitallearninglab_spider overwrites parse() method - fix: unnecessary API calls (dupefilter warning during initial API pagination) - fix: "new_lrt"-mapping -> "Unterrichtsbaustein" (according to item_type) - docs: ToDos for future crawler updates --- .../spiders/digitallearninglab_spider.py | 168 +++++++++++++----- 1 file changed, 126 insertions(+), 42 deletions(-) diff --git a/converter/spiders/digitallearninglab_spider.py b/converter/spiders/digitallearninglab_spider.py index b2591934..762ae2c0 100644 --- a/converter/spiders/digitallearninglab_spider.py +++ b/converter/spiders/digitallearninglab_spider.py @@ -1,5 +1,5 @@ import html -import re +import logging import time import scrapy @@ -8,14 +8,15 @@ from converter.constants import Constants from converter.valuespace_helper import ValuespaceHelper from .base_classes import LrmiBase, LomBase -from ..items import LicenseItemLoader +from ..items import LicenseItemLoader, LomLifecycleItemloader +from ..util.license_mapper import LicenseMapper class DigitallearninglabSpider(CrawlSpider, LrmiBase): name = "digitallearninglab_spider" friendlyName = "digital.learning.lab" url = "https://digitallearninglab.de" - version = "0.1.3" # last update: 2022-08-09 + version = "0.1.4" # last update: 2023-03-08 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -26,10 +27,11 @@ class DigitallearninglabSpider(CrawlSpider, LrmiBase): "AUTOTHROTTLE_START_DELAY": 0.25 } apiUrl = "https://digitallearninglab.de/api/%type?q=&sorting=latest&page=%page" - - # Unterrichtsbausteine (API "count" value): 228 - # tools: 182 - # therefore we expect 410 items after a successful crawl + # API Counts (as of 2023-03-08) + # type 'unterrichtsbausteine': 234 + # type 'tool': 184 + # therefore we expect (in total): 418 items after a successful crawl + # under the assumption that there are no duplicates across types def __init__(self, **kwargs): LrmiBase.__init__(self, **kwargs) @@ -72,12 +74,12 @@ def parse_request(self, response: scrapy.http.TextResponse): callback=self.handle_entry, meta={"item": item, "type": response.meta["type"]}, ) - yield self.start_request( - response.meta["type"], response.meta["page"] + 1 - ) + yield self.start_request( + response.meta["type"], response.meta["page"] + 1 + ) def handle_entry(self, response): - return LrmiBase.parse(self, response) + return self.parse(response) @staticmethod def get_new_lrt(response): @@ -101,9 +103,14 @@ def getLOMGeneral(self, response): general.replace_value( "title", html.unescape(response.meta["item"].get("name").strip()) ) - general.add_value( - "description", html.unescape(response.meta["item"].get("teaser")) - ) + json_ld_description = self.getLRMI("description", response=response) + if json_ld_description: + general.add_value('description', json_ld_description) + else: + # fallback via DLL API: shorter "teaser"-description + general.add_value( + "description", html.unescape(response.meta["item"].get("teaser")) + ) # general.add_value('keyword', list(filter(lambda x: x,map(lambda x: x.strip(), response.xpath('//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()').getall())))) return general @@ -113,41 +120,77 @@ def getLOMTechnical(self, response): technical.replace_value("location", response.url) return technical + def get_lifecycle_author(self, response): + lifecycle_loader = LomLifecycleItemloader() + json_ld_authors: list[dict] = self.getLRMI("author", response=response) + if json_ld_authors: + for author_item in json_ld_authors: + if "@type" in author_item: + author_type = author_item["@type"] + if author_type == "Person": + if "name" in author_item: + lifecycle_loader.add_value('role', 'author') + lifecycle_loader.add_value('firstName', author_item["name"]) + if "sameAs" in author_item: + lifecycle_loader.add_value('url', author_item["sameAs"]) + elif author_type == "Organization": + if "name" in author_item: + lifecycle_loader.add_value('role', 'publisher') + lifecycle_loader.add_value('organization', author_item["name"]) + if "sameAs" in author_item: + lifecycle_loader.add_value('url', author_item["sameAs"]) + return lifecycle_loader + + def get_lifecycle_metadata_provider(self, response, provider_item: dict = None): + if provider_item: + lifecycle_loader = LomLifecycleItemloader() + provider_name = provider_item.get("name") + provider_url = provider_item.get("sameAs") + date_published = self.getLRMI("datePublished", response=response) + if provider_name: + lifecycle_loader.add_value('role', 'metadata_provider') + lifecycle_loader.add_value('organization', provider_name) + if provider_url: + lifecycle_loader.add_value('url', provider_url) + if date_published: + lifecycle_loader.add_value('date', date_published) + return lifecycle_loader + def getLicense(self, response): license_loader: LicenseItemLoader = LomBase.getLicense(self, response) - # Footer: "Inhalte der Seite stehen unter CC BY-SA 4.0 Lizenz, wenn nicht anders angegeben." - license_loader.add_value('url', Constants.LICENSE_CC_BY_SA_40) # default for every item license_raw = self.getLRMI("license", response=response) + json_ld_authors: list[dict] = self.getLRMI("author", response=response) + authors = set() # by adding all authors to a set, we're making sure to only save unique author names + if json_ld_authors: + # if available, the second (there are two!) json_ld container contains a single author, while the DLL API + # itself provides a "co_author"-field (which will be used later on in lifecycle 'role' -> 'unknown') + for author_item in json_ld_authors: + if "name" in author_item: + author_name = author_item["name"] + authors.add(author_name) + if authors: + license_loader.add_value('author', authors) if license_raw: - if license_raw.startswith("http"): - # the "license" field holds a valid URL -> use it directly as is - license_loader.add_value("url", license_raw) - elif license_raw.startswith("CC"): - # this mapping is necessary for digitallearninglab since it serves a CC-pattern within its - # "license"-field (e.g. "CC BY-NC-SA") - cc_pattern = re.compile(r'C{2}\s' - r'\w{2}' - r'(-\w{2})*') - if cc_pattern.search(license_raw) is not None: - license_prepared_for_mapping: str = license_raw.replace(' ', '_') - license_prepared_for_mapping = license_prepared_for_mapping.replace('-', '_') - if license_prepared_for_mapping in Constants.LICENSE_MAPPINGS_INTERNAL: - license_mapped = Constants.LICENSE_MAPPINGS_INTERNAL.get(license_prepared_for_mapping) - license_mapped = license_mapped[0] - # assumption: the most recent CC-Version 4.0 is used for all materials - license_loader.replace_value('url', license_mapped) - else: - self.logger.warning(f"The specified value {license_prepared_for_mapping} can't be mapped to " - f"Constants.LICENSE_MAPPINGS_INTERNAL." - f"Please check Constants.py and LrmiBase for missing mappings/values.") - else: - self.logger.warning(f"Could not map the received 'license'-value {license_raw} . " - f"Please check Constants.py and LrmiBase for missing mappings/values.") + license_mapper = LicenseMapper() + license_url = license_mapper.get_license_url(license_string=license_raw) + license_internal = license_mapper.get_license_internal_key(license_string=license_raw) + if license_url: + license_loader.replace_value("url", license_url) + elif license_internal: + license_loader.add_value('internal', license_internal) + else: + # Footer: "Inhalte der Seite stehen unter CC BY-SA 4.0 Lizenz, wenn nicht anders angegeben." + logging.debug(f"DigitalLearningLabs did not provide a valid license for {response.url} . Setting fallback " + f"value CC-BY-SA 4.0.") + license_loader.add_value('url', Constants.LICENSE_CC_BY_SA_40) # default for every item return license_loader def getValuespaces(self, response): valuespaces = LrmiBase.getValuespaces(self, response) valuespaces.replace_value('new_lrt', self.get_new_lrt(response)) + # ToDo: scrape DOM (left bar) for additional metadata: + # - 'conditionsOfAccess' + # - dataProtectionConformity? try: range = ( response.xpath( @@ -170,10 +213,14 @@ def getValuespaces(self, response): '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-subject")]/parent::*//text()' ).getall() valuespaces.add_value("discipline", discipline) + # ToDo: implement a proper 'discipline'-mapping with the 'digitalCompetencies'-update of the crawler except: pass - lrt = response.meta["item"].get("type") - valuespaces.add_value("new_lrt", lrt) + item_type = response.meta["item"].get("type") + # the DLL API currently provides only 3 values for "type": 'teaching-module', 'tool', 'trend' + valuespaces.add_value("new_lrt", item_type) + if item_type == "teaching-module": + valuespaces.replace_value("new_lrt", "5098cf0b-1c12-4a1b-a6d3-b3f29621e11d") # Unterrichtsbaustein try: tool_type = list( map( @@ -188,3 +235,40 @@ def getValuespaces(self, response): except: pass return valuespaces + + def parse(self, response, **kwargs): + if self.shouldImport(response) is False: + logging.debug( + "Skipping entry {} because shouldImport() returned false".format(str(self.getId(response))) + ) + return None + if self.getId(response) is not None and self.getHash(response) is not None: + if not self.hasChanged(response): + return None + base = self.getBase(response) + # ToDo: educational -> competencies ("ccm:competencies")? + lom = self.getLOM(response) + + if self.getLRMI("author", response=response): + lom.add_value('lifecycle', self.get_lifecycle_author(response).load_item()) + provider_list: list[dict] = self.getLRMI("provider", response=response) + # there might be multiple providers within the "provider"-field of the json_ld + if provider_list: + for provider_item in provider_list: + lom.add_value("lifecycle", self.get_lifecycle_metadata_provider(response, provider_item=provider_item).load_item()) + if "co_authors" in response.meta["item"]: + co_authors: list = response.meta["item"]["co_authors"] + if co_authors: + for co_author in co_authors: + lifecycle_unknown_item_loader = LomLifecycleItemloader() + if co_author: + lifecycle_unknown_item_loader.add_value('role', 'unknown') + lifecycle_unknown_item_loader.add_value('firstName', co_author) + lom.add_value('lifecycle', lifecycle_unknown_item_loader.load_item()) + base.add_value("lom", lom.load_item()) + base.add_value("license", self.getLicense(response).load_item()) + base.add_value("permissions", self.getPermissions(response).load_item()) + base.add_value("response", self.mapResponse(response).load_item()) + base.add_value("valuespaces", self.getValuespaces(response).load_item()) + + return base.load_item() From feb53c36282b335091683b0af0ccdf576a7e0a33 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Mar 2023 21:14:41 +0100 Subject: [PATCH 263/590] fix: license['internal']-mapping missing values - add: "CC_BY_NC_ND" and "CC_BY_NC_SA" Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 72398daa..f22d7a61 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -295,7 +295,7 @@ def mapLicense(self, spaces, license): match license["internal"]: case Constants.LICENSE_COPYRIGHT_LAW: spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" - case "CC_BY" | "CC_BY_SA" | "CC_BY_NC" | "CC_BY_ND" | "CC_0" | "PDM": + case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM": spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" From a04468d2c92bd23c86a5481ed896d4fecb8de306 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Mar 2023 21:17:08 +0100 Subject: [PATCH 264/590] fix: LicenseItem 'author' freetext strings Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/items.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/items.py b/converter/items.py index af151468..a3739d0a 100644 --- a/converter/items.py +++ b/converter/items.py @@ -242,7 +242,7 @@ class LicenseItem(Item): properties. To make sure that licenses are properly recognized by edu-sharing, make sure to provide a valid 'url'-string and if that's not possible, set a correct 'internal'-constant. (see: constants.py) """ - author = Field() + author = Field(output_processor=JoinMultivalues()) """An author freetext string. (Basically, how the author should be named in case this is a 'CC-BY'-license. Corresponding edu-sharing property: 'ccm:author_freetext'""" description = Field() From e5433512ab2dd8f1fdbcf644543475ef6155e230 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 7 Mar 2023 21:19:11 +0100 Subject: [PATCH 265/590] style: more explicit LicenseMapper debug messages --- converter/util/license_mapper.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index 374795f6..1d9c76a3 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -105,8 +105,12 @@ def fallback_to_license_internal_key(self, license_string: str = None) -> str | cc_zero = result_dict.get("CC_ZERO") public_domain = result_dict.get("PDM") if cc_zero: + logging.debug(f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " + f"CC_0") return "CC_0" if public_domain: + logging.debug(f"Licensemapper: Fallback to 'license.internal' for '{license_string}' successful: " + f"Public Domain ") return "PDM" if cc_type: cc_string_internal: str = f"CC_{result_dict.get('CC_TYPE')}".upper() @@ -114,6 +118,8 @@ def fallback_to_license_internal_key(self, license_string: str = None) -> str | cc_string_internal = cc_string_internal.replace("-", "_") cc_string_internal = cc_string_internal.replace(" ", "_") if cc_string_internal in Constants.LICENSE_MAPPINGS_INTERNAL: + logging.debug(f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " + f"{cc_string_internal}") return cc_string_internal else: logging.debug( @@ -172,7 +178,7 @@ def identify_cc_license(self, license_string: str) -> str | None: elif cc_type: logging.debug( f"LicenseMapper: Couldn't recognize a (valid) CC Version within {license_string} - " - f"Trying fallback method..." + f"Trying fallback method for 'license.internal' next..." ) return None else: From 4b05cc5fc3b344e16c2046fc6b295660d0979f85 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 11 Mar 2023 00:37:39 +0100 Subject: [PATCH 266/590] tutory_spider v0.1.4 - change: switch to Playwright for HTML extraction -- after Serlo, Tutory is the next website that seems to cause the "Splash"-ccontainer to crash after a while - feat: gathering of 'license.author'-metadata in accordance to the "publishName"-flag of the Tutory API - fix: gather "description"-metadata with additional fallbacks -- the previously used XPath could no longer be found within Tutory's DOM and thousands of items would get dropped while crawling due to missing 'description' fields -- the crawler tries to parse the "description" metadata from the Tutory API first, then falls back to the DOM header meta fields --- if neither one of the preferred "description" fields are available, the crawler will try to grab text within the DOM itself - style: code formatting via black Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 70 ++++++++++++++++++++++++------ 1 file changed, 57 insertions(+), 13 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 507ddee7..7fc4475b 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -1,11 +1,13 @@ import logging import re +import urllib.parse import scrapy from scrapy.selector import Selector from scrapy.spiders import CrawlSpider from .base_classes import LomBase, JSONBase +from ..web_tools import WebEngine, WebTools class TutorySpider(CrawlSpider, LomBase, JSONBase): @@ -14,10 +16,16 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.3" # last update: 2022-03-07 - custom_settings = {"AUTOTHROTTLE_ENABLED": True, "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_DEBUG": True} + version = "0.1.4" # last update: 2022-03-11 + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_DEBUG": True, + "WEB_TOOLS": WebEngine.Playwright, + } api_pagesize_limit = 5000 + # the old API pageSize of 999999 (which was used in 2021) doesn't work anymore and throws a 502 Error (Bad Gateway). # Setting the pageSize to 5000 appears to be a reasonable value with an API response time of 12-15s @@ -105,25 +113,61 @@ def getValuespaces(self, response): def getLicense(self, response=None): license_loader = LomBase.getLicense(self, response) + if "user" in response.meta["item"]: + user_dict: dict = response.meta["item"]["user"] + if "publishName" in user_dict: + # the 'publishName'-field seems to indicate whether the username or the full name appears on top of a + # worksheet as author metadata. + publish_decision: str = user_dict["publishName"] + if publish_decision == "username": + if "username" in user_dict: + username: str = user_dict["username"] + if username: + license_loader.add_value("author", username) + elif publish_decision == "name": + # ToDo: this information could also be used for lifecycle role 'author' in a future crawler update + firstname = None + lastname = None + if "firstname" in user_dict: + firstname = user_dict.get("firstname") + if "lastname" in user_dict: + lastname = user_dict.get("lastname") + if firstname and lastname: + full_name = f"{firstname} {lastname}" + license_loader.add_value("author", full_name) return license_loader def getLOMGeneral(self, response=None): general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["name"]) + item_description = None if "description" in response.meta["item"]: - general.add_value("description", response.meta["item"]["description"]) + item_description = response.meta["item"]["description"] + meta_description = response.xpath("//meta[@property='description']/@content").get() + meta_og_description = response.xpath("//meta[@property='og:description']/@content").get() + if item_description: + general.add_value("description", item_description) + elif meta_description: + # 1st fallback: trying to parse a description string from the header + general.add_value("description", meta_description) + elif meta_og_description: + # 2nd fallback: + general.add_value("description", meta_og_description) else: - html = self.getUrlData(response.url)["html"] + html = WebTools.getUrlData(response.url, engine=WebEngine.Playwright)["html"] if html: - data = Selector(text=html).xpath('//ul[contains(@class,"worksheet-pages")]//text()').getall() - cutoff = 4 - if len(data) > cutoff: - for i in range(cutoff): - del data[0] - - text = " ".join(data) - text = text[:1000] - general.add_value("description", text) + # apparently, the human-readable text is nested within + #
OR
elements + edumark_combined: list[str] = ( + Selector(text=html) + .xpath("//div[contains(@class,'eduMark')]//text()|//div[contains(@class,'noEduMark')]//text()") + .getall() + ) + if edumark_combined: + text_combined: str = " ".join(edumark_combined) + text_combined = urllib.parse.unquote(text_combined) + text_combined = f"{text_combined[:1000]} [...]" + general.add_value("description", text_combined) return general def getLOMTechnical(self, response=None): From 531766979dd374ac017ef2ff7dae318daac5493e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 Mar 2023 01:33:10 +0100 Subject: [PATCH 267/590] fix: LisumPipeline "Informatik" eafCode edge-case - this change was necessary for oeh_spider since the "discipline"-Vocab-key for informatik ("320") does not line up with the eafCode for Informatik ("32002") Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 64ffc1f8..70ccb593 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -815,6 +815,8 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") match discipline_eaf_code: # catching edge-cases where OEH 'discipline'-vocab-keys don't line up with eafsys.txt values + case "320": + discipline_eafcodes.add("32002") # Informatik case "20090": discipline_eafcodes.add("20080") # Esperanto case "oeh04010": From d69a42db3b7f2b64404b8c4c89da8b0577225292 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 Mar 2023 13:07:01 +0100 Subject: [PATCH 268/590] serlo_spider v0.2.6 - feat: 4th fallback for Serlo 'title' -- lots of Serlo items only provide a generic "... - lernen mit Serlo!" title if the user didn't specify a title for his specific exercise --- this occurs more than 2700 times for "Mathe Aufgabe - lernen mit Serlo!" alone -- therefore we're now using the "lernen mit Serlo!"-String as an indicator that the title is most probably a generic one (set by the Serlo CMS) and we try to extract a fallback title from the last breadcrumb label instead Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 34 ++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 2c1105fc..50608f6b 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -3,7 +3,6 @@ import requests import scrapy -from scrapy import settings from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ @@ -18,11 +17,11 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.5" # last update: 2023-03-03 - custom_settings = settings.BaseSettings({ - # playwright cause of issues with thumbnails+text for serlo - "WEB_TOOLS": WebEngine.Playwright - }, 'spider') + version = "0.2.6" # last update: 2023-03-14 + custom_settings = { + # Using Playwright because of Splash-issues with thumbnails+text for Serlo + "WEB_TOOLS": WebEngine.Playwright + } graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: @@ -40,14 +39,11 @@ class SerloSpider(scrapy.Spider, LomBase): "professional": "other", # Someone already practicing a profession; an industry partner, or professional development trainer. "student": "learner", - # "parent": "parent", # no mapping needed - # "teacher": "teacher" # no mapping needed } def __init__(self, *a, **kw): super().__init__(*a, **kw) self.graphql_items = self.fetch_all_graphql_pages() - # logging.debug(f"Gathered {len(self.graphql_items)} items from the GraphQL API") def fetch_all_graphql_pages(self): all_entities = list() @@ -162,18 +158,36 @@ def parse(self, response, **kwargs): # # - aggregationLevel optional general.add_value('identifier', graphql_json["id"]) title_1st_try: str = graphql_json["headline"] + title_fallback: str = str() # not all materials carry a title in the GraphQL API, therefore we're trying to grab a valid title from - # different sources (GraphQL > json_ld > header) + # different sources (GraphQL > (DOM) json_ld > (DOM) header > (DOM) last breadcrumb label) if title_1st_try: general.add_value('title', title_1st_try) elif not title_1st_try: title_2nd_try = json_ld["name"] if title_2nd_try: general.add_value('title', title_2nd_try) + title_fallback = title_2nd_try if not title_1st_try and not title_2nd_try: title_from_header = response.xpath('//meta[@property="og:title"]/@content').get() if title_from_header: general.add_value('title', title_from_header) + title_fallback = title_from_header + if "lernen mit Serlo!" in title_fallback: + # We assume that Strings ending with "lernen mit Serlo!" are placeholders + # e.g. "Mathe Aufgabe - lernen mit Serlo!" occurs over 2700 times as a title + # therefore we try to grab the last breadcrumb label and use it as a more specific fallback + page_data_json: str = response.xpath("//script[@id='__NEXT_DATA__']/text()").get() + if page_data_json: + page_data_json: dict = json.loads(page_data_json) + if page_data_json: + if "breadcrumbsData" in page_data_json["props"]["pageProps"]["pageData"]: + breadcrumbs: list = page_data_json["props"]["pageProps"]["pageData"]["breadcrumbsData"] + if breadcrumbs: + if "label" in breadcrumbs[-1]: + title_breadcrumb_last_label: str = breadcrumbs[-1]["label"] + if title_breadcrumb_last_label: + general.replace_value('title', title_breadcrumb_last_label) # not all GraphQL entries have a description either, therefore we try to grab that from different sources # (GraphQL > JSON-LD > DOM header) description_1st_try = str() From 5546e47d69d7c345d01d6a3cb6d2e24acb5caefc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 Mar 2023 13:09:34 +0100 Subject: [PATCH 269/590] style: code formatting via black - line length 120 --- converter/spiders/serlo_spider.py | 128 +++++++++++++++--------------- 1 file changed, 64 insertions(+), 64 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 50608f6b..c703a489 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -5,8 +5,17 @@ import scrapy from converter.constants import Constants -from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + ResponseItemLoader, +) from converter.spiders.base_classes import LomBase from converter.web_tools import WebEngine, WebTools @@ -80,11 +89,7 @@ def query_graphql_page(self, amount_of_nodes: int = 500, pagination_string: str """ } request = requests.post( - url=self.API_URL, - headers={ - "Content-Type": "application/json" - }, - json=graphql_metadata_query_body + url=self.API_URL, headers={"Content-Type": "application/json"}, json=graphql_metadata_query_body ) return request.json() @@ -92,12 +97,7 @@ def start_requests(self): for graphql_item in self.graphql_items: # logging.debug(f"{graphql_item}") item_url = graphql_item["id"] - yield scrapy.Request(url=item_url, - callback=self.parse, - cb_kwargs={ - "graphql_item": graphql_item - } - ) + yield scrapy.Request(url=item_url, callback=self.parse, cb_kwargs={"graphql_item": graphql_item}) def getId(self, response=None, graphql_json=None) -> str: # The actual URL of a learning material is dynamic and can change at any given time @@ -139,14 +139,14 @@ def parse(self, response, **kwargs): # # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py # # TODO: fill "base"-keys with values for # # - thumbnail recommended - base.add_value('screenshot_bytes', screenshot_bytes) - base.add_value('sourceId', self.getId(response, graphql_json=graphql_json)) - base.add_value('hash', self.getHash(response, graphql_json=graphql_json)) - base.add_value('lastModified', graphql_json["dateModified"]) + base.add_value("screenshot_bytes", screenshot_bytes) + base.add_value("sourceId", self.getId(response, graphql_json=graphql_json)) + base.add_value("hash", self.getHash(response, graphql_json=graphql_json)) + base.add_value("lastModified", graphql_json["dateModified"]) # thumbnail_url: str = "This string should hold the thumbnail URL" # base.add_value('thumbnail', thumbnail_url) if "publisher" in json_ld: - base.add_value('publisher', json_ld["publisher"]) + base.add_value("publisher", json_ld["publisher"]) lom = LomBaseItemloader() @@ -156,22 +156,22 @@ def parse(self, response, **kwargs): # # - coverage optional # # - structure optional # # - aggregationLevel optional - general.add_value('identifier', graphql_json["id"]) + general.add_value("identifier", graphql_json["id"]) title_1st_try: str = graphql_json["headline"] title_fallback: str = str() # not all materials carry a title in the GraphQL API, therefore we're trying to grab a valid title from # different sources (GraphQL > (DOM) json_ld > (DOM) header > (DOM) last breadcrumb label) if title_1st_try: - general.add_value('title', title_1st_try) + general.add_value("title", title_1st_try) elif not title_1st_try: title_2nd_try = json_ld["name"] if title_2nd_try: - general.add_value('title', title_2nd_try) + general.add_value("title", title_2nd_try) title_fallback = title_2nd_try if not title_1st_try and not title_2nd_try: title_from_header = response.xpath('//meta[@property="og:title"]/@content').get() if title_from_header: - general.add_value('title', title_from_header) + general.add_value("title", title_from_header) title_fallback = title_from_header if "lernen mit Serlo!" in title_fallback: # We assume that Strings ending with "lernen mit Serlo!" are placeholders @@ -187,7 +187,7 @@ def parse(self, response, **kwargs): if "label" in breadcrumbs[-1]: title_breadcrumb_last_label: str = breadcrumbs[-1]["label"] if title_breadcrumb_last_label: - general.replace_value('title', title_breadcrumb_last_label) + general.replace_value("title", title_breadcrumb_last_label) # not all GraphQL entries have a description either, therefore we try to grab that from different sources # (GraphQL > JSON-LD > DOM header) description_1st_try = str() @@ -195,22 +195,22 @@ def parse(self, response, **kwargs): if "description" in graphql_json: description_1st_try: str = graphql_json["description"] if description_1st_try: - general.add_value('description', description_1st_try) + general.add_value("description", description_1st_try) if not description_1st_try and "description" in json_ld: # some json_ld containers don't have a description either description_2nd_try: str = json_ld["description"] if description_2nd_try: - general.add_value('description', description_2nd_try) + general.add_value("description", description_2nd_try) elif not description_1st_try and not description_2nd_try: description_from_header: str = response.xpath('//meta[@name="description"]/@content').get() if description_from_header: - general.add_value('description', description_from_header) + general.add_value("description", description_from_header) in_language: list = graphql_json["inLanguage"] - general.add_value('language', in_language) + general.add_value("language", in_language) # ToDo: keywords would be extremely useful, but aren't supplied by neither the API / JSON_LD nor the header # # once we've added all available values to the necessary keys in our LomGeneralItemLoader, # # we call the load_item()-method to return a (now filled) LomGeneralItem to the LomBaseItemLoader - lom.add_value('general', general.load_item()) + lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() # # TODO: fill "technical"-keys with values for @@ -219,10 +219,10 @@ def parse(self, response, **kwargs): # # - installationRemarks optional # # - otherPlatformRequirements optional # # - duration optional (only applies to audiovisual content like videos/podcasts) - technical.add_value('format', 'text/html') # e.g. if the learning object is a web-page - technical.add_value('location', graphql_json["id"]) # we could also use response.url here + technical.add_value("format", "text/html") # e.g. if the learning object is a web-page + technical.add_value("location", graphql_json["id"]) # we could also use response.url here - lom.add_value('technical', technical.load_item()) + lom.add_value("technical", technical.load_item()) lifecycle = LomLifecycleItemloader() # # TODO: fill "lifecycle"-keys with values for @@ -231,16 +231,16 @@ def parse(self, response, **kwargs): # # - lastName recommended # # - uuid optional if "publisher" in json_ld: - lifecycle.add_value('organization', "Serlo Education e. V.") - lifecycle.add_value('role', 'publisher') # supported roles: "author" / "editor" / "publisher" + lifecycle.add_value("organization", "Serlo Education e. V.") + lifecycle.add_value("role", "publisher") # supported roles: "author" / "editor" / "publisher" # for available roles mapping, please take a look at converter/es_connector.py - lifecycle.add_value('url', json_ld["publisher"]) - lifecycle.add_value('email', "de@serlo.org") + lifecycle.add_value("url", json_ld["publisher"]) + lifecycle.add_value("email", "de@serlo.org") for language_item in in_language: if language_item == "en": - lifecycle.replace_value('email', "en@serlo.org") - lifecycle.add_value('date', graphql_json["dateCreated"]) - lom.add_value('lifecycle', lifecycle.load_item()) + lifecycle.replace_value("email", "en@serlo.org") + lifecycle.add_value("date", graphql_json["dateCreated"]) + lom.add_value("lifecycle", lifecycle.load_item()) educational = LomEducationalItemLoader() # # TODO: fill "educational"-keys with values for @@ -251,9 +251,9 @@ def parse(self, response, **kwargs): # # - typicalAgeRange optional # # - difficulty optional # # - typicalLearningTime optional - educational.add_value('language', in_language) + educational.add_value("language", in_language) - lom.add_value('educational', educational.load_item()) + lom.add_value("educational", educational.load_item()) # classification = LomClassificationItemLoader() # # TODO: fill "classification"-keys with values for @@ -264,10 +264,10 @@ def parse(self, response, **kwargs): # # - keyword optional # lom.add_value('classification', classification.load_item()) - base.add_value('lom', lom.load_item()) + base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) + vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) # # for possible values, either consult https://vocabs.openeduhub.de # # or take a look at https://github.com/openeduhub/oeh-metadata-vocabs # # TODO: fill "valuespaces"-keys with values for @@ -286,11 +286,11 @@ def parse(self, response, **kwargs): for audience_item in json_ld["audience"]: edu_audience_role = audience_item["prefLabel"]["en"] if edu_audience_role == "professional": - vs.add_value('educationalContext', ["Further Education", "vocational education"]) + vs.add_value("educationalContext", ["Further Education", "vocational education"]) if edu_audience_role in self.EDU_AUDIENCE_ROLE_MAPPING.keys(): edu_audience_role = self.EDU_AUDIENCE_ROLE_MAPPING.get(edu_audience_role) intended_end_user_roles.append(edu_audience_role) - vs.add_value('intendedEndUserRole', intended_end_user_roles) + vs.add_value("intendedEndUserRole", intended_end_user_roles) # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) if "about" in json_ld and len(json_ld["about"]) != 0: @@ -306,34 +306,34 @@ def parse(self, response, **kwargs): discipline_en: str = list_item["prefLabel"]["en"] disciplines.append(discipline_en) if len(disciplines) > 0: - vs.add_value('discipline', disciplines) + vs.add_value("discipline", disciplines) # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) # if the json_ld doesn't hold a discipline value for us, we'll try to grab the discipline from the url path else: if "/mathe/" in response.url: - vs.add_value('discipline', "Mathematik") + vs.add_value("discipline", "Mathematik") if "/biologie/" in response.url: - vs.add_value('discipline', "Biologie") + vs.add_value("discipline", "Biologie") if "/chemie/" in response.url: - vs.add_value('discipline', "Chemie") + vs.add_value("discipline", "Chemie") if "/nachhaltigkeit/" in response.url: - vs.add_value('discipline', "Nachhaltigkeit") + vs.add_value("discipline", "Nachhaltigkeit") if "/informatik/" in response.url: - vs.add_value('discipline', "Informatik") - vs.add_value('containsAdvertisement', 'No') + vs.add_value("discipline", "Informatik") + vs.add_value("containsAdvertisement", "No") # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/containsAdvertisement.ttl) # serlo doesn't want to distract learners with ads, therefore we can set it by default to 'no' if graphql_json["isAccessibleForFree"] is True: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/price.ttl) - vs.add_value('price', 'no') + vs.add_value("price", "no") elif graphql_json["isAccessibleForFree"] is False: # only set the price to "kostenpflichtig" if it's explicitly stated, otherwise we'll leave it empty - vs.add_value('price', 'yes') + vs.add_value("price", "yes") if graphql_json["learningResourceType"]: # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) - vs.add_value('learningResourceType', graphql_json["learningResourceType"]) + vs.add_value("learningResourceType", graphql_json["learningResourceType"]) - base.add_value('valuespaces', vs.load_item()) + base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() # # TODO: fill "license"-keys with values for @@ -341,18 +341,18 @@ def parse(self, response, **kwargs): # # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) license_url = graphql_json["license"]["id"] if license_url: - lic.add_value('url', license_url) - base.add_value('license', lic.load_item()) + lic.add_value("url", license_url) + base.add_value("license", lic.load_item()) permissions = super().getPermissions(response) - base.add_value('permissions', permissions.load_item()) + base.add_value("permissions", permissions.load_item()) response_loader = ResponseItemLoader() - response_loader.replace_value('headers', response.headers) - response_loader.replace_value('html', html_body) - response_loader.replace_value('status', response.status) - response_loader.replace_value('text', html_text) - response_loader.replace_value('url', self.getUri(response)) - base.add_value('response', response_loader.load_item()) + response_loader.replace_value("headers", response.headers) + response_loader.replace_value("html", html_body) + response_loader.replace_value("status", response.status) + response_loader.replace_value("text", html_text) + response_loader.replace_value("url", self.getUri(response)) + base.add_value("response", response_loader.load_item()) yield base.load_item() From a9f6ccbb8fef0b95f78450c2fa520871f232744a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 10 Nov 2022 14:12:42 +0100 Subject: [PATCH 270/590] oersi_spider v0.0.4 (squashed) - change: remove hard-coded "discipline"-values for OMA entries and use the new valuespaces field for the "hochschulfaechersystematik"-Vocab instead - add: 13 new metadata providers - change: use LicenseMapper for URL-Parsing (since each Metadataprovider might use slightly different values) - add: 'affiliation'-metadata for persons (OERSI fields 'contributor' or 'author' optionally provide these additional fields) - add: 'datePublished' for lifecycle publishers - fix: 'getHash'-method checks first if 'datePublished' or 'dateCreated' fields exist at all before trying to access its value - fix: edge-case for missing 'technical.location' values - remove: "hcrt"-mapping value "reference_work" -- fixes mixup between hcrt key "index" and its prefLabel "reference work" -- since "index" is available in both hcrt and the "old" learningResourceType, this key doesn't need to appear in our mapping table to "new_lrt" - style: code formatting via black --- converter/spiders/oersi_spider.py | 335 ++++++++++++++++-------------- 1 file changed, 174 insertions(+), 161 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 6ec3d49c..bcb5ea68 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -20,6 +20,7 @@ ResponseItemLoader, ) from converter.spiders.base_classes import LomBase +from converter.util.license_mapper import LicenseMapper from converter.web_tools import WebEngine, WebTools @@ -34,7 +35,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.3" # last update: 2022-11-08 + version = "0.0.4" # last update: 2023-03-23 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -46,44 +47,57 @@ class OersiSpider(scrapy.Spider, LomBase): ELASTIC_PARAMETER_KEEP_ALIVE: str = "1m" # for reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units - ELASTIC_PARAMETER_REQUEST_SIZE: int = 1000 # maximum: 10.000, but responses for bigger request sizes take significantly longer + ELASTIC_PARAMETER_REQUEST_SIZE: int = ( + 1000 # maximum: 10.000, but responses for bigger request sizes take significantly longer + ) ELASTIC_PIT_ID: dict = dict() # the provider-filter at https://oersi.org/resources/ shows you which String values can be used as a provider-name # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) ELASTIC_PROVIDERS_TO_CRAWL: list = [ + # "BC Campus", # "detmoldMusicTools", # "digiLL", # "DuEPublico", # "eaDNURT", + # "eCampusOntario", # "eGov-Campus", + # "Finnish Library of Open Educational Resources", + # "GitHub", + # "GitLab", + # "Helmholtz Codebase", # "HessenHub", # "HHU Mediathek", # "HOOU", # "iMoox", # "KI Campus", + # "MIT OpenCourseWare", + # "OER Portal Uni Graz", # "oncampus", + # "Open Music Academy", + # "Open Textbook Library", + # "Opencast Universität Osnabrück", # "openHPI", # "OpenLearnWare", - "Open Music Academy" # "OpenRub", - # "ORCA.nrw", + "ORCA.nrw", + # "Phaidra Uni Wien", # "RWTH Aachen GitLab", - # "twillo", # "TIB AV-Portal", # "TU Delft OpenCourseWare", + # "twillo", + # "Universität Innsbruck OER Repositorium", + # "VCRP", # "vhb", # "Virtual Linguistics Campus", - # "ZOERR" + # "ZOERR", ] - # ToDo: DO NOT activate other providers until 'Hochschulfaechersystematik'-values are possible within edu-sharing! ELASTIC_ITEMS_ALL = list() MAPPING_HCRT_TO_NEW_LRT = { "diagram": "f7228fb5-105d-4313-afea-66dd59b1b6f8", # "Graph, Diagramm und Charts" "portal": "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9", # "Webseite und Portal (stabil)" "questionnaire": "d31a5b68-611f-4015-8be9-56bd5eb44c64", # "Fragebogen und Umfragen" - "reference_work": "c022c920-c236-4234-bae1-e264a3e2bdf6", # "Nachschlagewerk und Glossar" "script": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" "sheet_music": "f7e92628-4132-4985-bcf5-93c285e300a8", # "Noten" "textbook": "a5897142-bf57-4cd0-bcd9-7d0f1932e87a", # "Lehrbuch und Grundlagenwerk (auch E-Book)" @@ -99,20 +113,14 @@ def __init__(self, **kwargs): # after all items have been collected, delete the ElasticSearch PIT json_response = self.elastic_pit_delete() if json_response: - logging.info( - f"ElasticSearch API response (upon PIT delete): {json_response}" - ) + logging.info(f"ElasticSearch API response (upon PIT delete): {json_response}") def start_requests(self): for elastic_item in self.ELASTIC_ITEMS_ALL: - main_entity_of_page: list[dict] = elastic_item.get("_source").get( - "mainEntityOfPage" - ) + main_entity_of_page: list[dict] = elastic_item.get("_source").get("mainEntityOfPage") if main_entity_of_page: item_url = main_entity_of_page[0].get("id") - yield scrapy.Request( - url=item_url, cb_kwargs={"elastic_item": elastic_item} - ) + yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}) def elastic_pit_create(self) -> dict: """ @@ -154,9 +162,7 @@ def elastic_query_provider_metadata(self, provider_name, search_after=None): if search_after is None: payload = { "size": self.ELASTIC_PARAMETER_REQUEST_SIZE, - "query": { - "match": {"mainEntityOfPage.provider.name": f"{provider_name}"} - }, + "query": {"match": {"mainEntityOfPage.provider.name": f"{provider_name}"}}, "pit": { "id": self.ELASTIC_PIT_ID.get("id"), "keep_alive": self.ELASTIC_PARAMETER_KEEP_ALIVE, @@ -167,9 +173,7 @@ def elastic_query_provider_metadata(self, provider_name, search_after=None): else: payload = { "size": self.ELASTIC_PARAMETER_REQUEST_SIZE, - "query": { - "match": {"mainEntityOfPage.provider.name": f"{provider_name}"} - }, + "query": {"match": {"mainEntityOfPage.provider.name": f"{provider_name}"}}, "pit": { "id": self.ELASTIC_PIT_ID.get("id"), "keep_alive": self.ELASTIC_PARAMETER_KEEP_ALIVE, @@ -197,36 +201,26 @@ def elastic_fetch_all_provider_pages(self): provider_name=provider_name, search_after=pagination_parameter ) if "pit_id" in current_page_json_response: - if current_page_json_response.get( - "pit_id" - ) != self.ELASTIC_PIT_ID.get("id"): + if current_page_json_response.get("pit_id") != self.ELASTIC_PIT_ID.get("id"): self.ELASTIC_PIT_ID = current_page_json_response.get("pit_id") logging.info( f"ElasticSearch: pit_id changed between queries, using the new pit_id " f"{current_page_json_response.get('pit_id')} for subsequent queries." ) if "hits" in current_page_json_response: - total_count = ( - current_page_json_response.get("hits").get("total").get("value") - ) + total_count = current_page_json_response.get("hits").get("total").get("value") logging.info(f"Expecting {total_count} items for {provider_name}") if "hits" in current_page_json_response.get("hits"): - provider_items: list = current_page_json_response.get("hits").get( - "hits" - ) + provider_items: list = current_page_json_response.get("hits").get("hits") if provider_items: - logging.info( - f"The provider_items list has {len(provider_items)} entries" - ) + logging.info(f"The provider_items list has {len(provider_items)} entries") all_items.extend(provider_items) last_entry: dict = provider_items[-1] # ToDo: pagination documentation if "sort" in last_entry: last_sort_result: list = last_entry.get("sort") if last_sort_result: - logging.info( - f"The last_sort_result is {last_sort_result}" - ) + logging.info(f"The last_sort_result is {last_sort_result}") has_next_page = True pagination_parameter = last_sort_result else: @@ -235,7 +229,7 @@ def elastic_fetch_all_provider_pages(self): else: logging.info( f"reached the end of the ElasticSearch results for {provider_name} // " - f"Total amount of items collected: {len(all_items)}" + f"Total amount of items collected (across all metadata-providers): {len(all_items)}" ) break return all_items @@ -247,7 +241,7 @@ def getId(self, response=None, elastic_item: dict = dict) -> str: """ return elastic_item["_id"] - def getHash(self, response=None, elastic_item: dict = dict) -> str: + def getHash(self, response=None, elastic_item_source: dict = dict) -> str: """ Creates a hash-value by combining a date + the crawler version number within a string. Since OERSI's date-fields are not always available, this method has several fallbacks: @@ -255,8 +249,12 @@ def getHash(self, response=None, elastic_item: dict = dict) -> str: 2) OERSI "dateCreated"-field 3) if neither of the above are available: combine the current datetime + crawler version """ - date_published: str = elastic_item["_source"]["datePublished"] - date_created: str = elastic_item["_source"]["dateCreated"] + date_published = str() + date_created = str() + if "datePublished" in elastic_item_source: + date_published: str = elastic_item_source["datePublished"] + if "dateCreated" in elastic_item_source: + date_created: str = elastic_item_source["dateCreated"] if date_published: hash_temp: str = f"{date_published}{self.version}" elif date_created: @@ -279,16 +277,10 @@ def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: logging.info(f"matching requested id: {self.remoteId}") return True return False - db = EduSharing().findItem( - self.getId(response, elastic_item=elastic_item), self - ) - changed = db is None or db[1] != self.getHash( - response, elastic_item=elastic_item - ) + db = EduSharing().findItem(self.getId(response, elastic_item=elastic_item), self) + changed = db is None or db[1] != self.getHash(response, elastic_item_source=elastic_item["_source"]) if not changed: - logging.info( - f"Item {self.getId(response, elastic_item=elastic_item)} (uuid: {db[0]}) has not changed" - ) + logging.info(f"Item {self.getId(response, elastic_item=elastic_item)} (uuid: {db[0]}) has not changed") return changed def get_lifecycle_author( @@ -321,21 +313,30 @@ def get_lifecycle_author( lifecycle_author.add_value("date", date_created) if "affiliation" in creator_item: affiliation_item = creator_item.get("affiliation") - # ToDo: affiliation.type (e.g. Organization) + # affiliation.type is always "Organization" according to + # https://dini-ag-kim.github.io/amb/draft/schemas/affiliation.json if "name" in affiliation_item: affiliation_name = affiliation_item.get("name") lifecycle_author.add_value("organization", affiliation_name) if "id" in affiliation_item: + # the affiliation.id is always a reference to GND, Wikidata or ROR affiliation_url = affiliation_item.get("id") lifecycle_author.add_value("url", affiliation_url) if creator_item.get("type") == "Person": - lifecycle_author.add_value( - "role", "author" - ) # supported roles: "author" / "editor" / "publisher" + lifecycle_author.add_value("role", "author") author_name: str = creator_item.get("name") - authors.append( - author_name - ) # this string is going to be used in the license field "author" + # ToDo: 'honorificPrefix' yields dirty values which need to be cleaned up first and need to be + # checked for edge-cases before we can gather data from this field + # examples from metadataprovider 'ORCA.nrw': + # "Dr.", + # "Prof.", + # "http://hbz-nrw.de/regal#academicDegree/unkown", + # "unknown", + # if "honorificPrefix" in creator_item: + # honorific_prefix = creator_item["honorificPrefix"] + # if honorific_prefix: + # author_name = f"{honorific_prefix} {author_name}" + authors.append(author_name) # this string is going to be used in the license field "author" self.split_names_if_possible_and_add_to_lifecycle( name_string=author_name, lifecycle_item_loader=lifecycle_author ) @@ -343,18 +344,12 @@ def get_lifecycle_author( person_dictionary=creator_item, lifecycle_item_loader=lifecycle_author, ) - lom_base_item_loader.add_value( - "lifecycle", lifecycle_author.load_item() - ) + lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) elif creator_item.get("type") == "Organization": creator_organization_name = creator_item.get("name") lifecycle_author.add_value("role", "author") - lifecycle_author.add_value( - "organization", creator_organization_name - ) - lom_base_item_loader.add_value( - "lifecycle", lifecycle_author.load_item() - ) + lifecycle_author.add_value("organization", creator_organization_name) + lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) return authors def get_lifecycle_contributor( @@ -375,6 +370,11 @@ def get_lifecycle_contributor( lifecycle_contributor.add_value("role", "unknown") contributor_name: str = contributor_item.get("name") if contributor_name: + # ToDo: activate honorificPrefix in a later version (when having solved the problem for 'creator') + # if "honorificPrefix" in contributor_item: + # honorific_prefix: str = contributor_item["honorificPrefix"] + # if honorific_prefix: + # contributor_name = f"{honorific_prefix} {contributor_name}" if author_list: if contributor_name in author_list: # OMA lists one author, but also lists the same person as a "contributor", @@ -389,9 +389,7 @@ def get_lifecycle_contributor( lifecycle_item_loader=lifecycle_contributor, ) elif contributor_item.get("type") == "Organization": - lifecycle_contributor.add_value( - "organization", contributor_name - ) + lifecycle_contributor.add_value("organization", contributor_name) if "id" in contributor_item: # id points to a URI reference of ORCID, GND, WikiData or ROR # (while this isn't necessary for OMA items yet (as they have no 'id'-field), it will be necessary @@ -401,21 +399,26 @@ def get_lifecycle_contributor( lifecycle_item_loader=lifecycle_contributor, ) if "affiliation" in contributor_item: - # ToDo: in future versions of the crawler, this field needs to be handled - # (the 'affiliation'-field currently ONLY appears in items from provider "ORCA.nrw") - # - affiliation - # - id - # - name - # - type - pass - lom_base_item_loader.add_value( - "lifecycle", lifecycle_contributor.load_item() - ) + # the 'affiliation'-field currently ONLY appears in items from provider "ORCA.nrw" + # the 'affiliation.type' is always 'Organization' + affiliation_dict: dict = contributor_item["affiliation"] + # if the dictionary exists, it might contain the following fields: + # - id (= URL to GND / ROR / Wikidata) + # - name (= human readable String) + if affiliation_dict: + if "id" in affiliation_dict: + affiliation_id_url: str = affiliation_dict["id"] + if affiliation_id_url: + lifecycle_contributor.add_value("url", affiliation_id_url) + if "name" in affiliation_dict: + affiliation_name: str = affiliation_dict["name"] + if affiliation_name: + lifecycle_contributor.add_value("organization", affiliation_name) + + lom_base_item_loader.add_value("lifecycle", lifecycle_contributor.load_item()) @staticmethod - def get_lifecycle_metadata_provider( - lom_base_item_loader: LomBaseItemloader, oersi_main_entity_of_page_item: dict - ): + def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oersi_main_entity_of_page_item: dict): """ Collects metadata from OERSI's "provider"-field and stores it within a LomLifecycleItemLoader. """ @@ -427,26 +430,17 @@ def get_lifecycle_metadata_provider( if "name" in provider_dict: lifecycle_metadata_provider = LomLifecycleItemloader() lifecycle_metadata_provider.add_value("role", "metadata_provider") - metadata_provider_name: str = oersi_main_entity_of_page_item.get( - "provider" - ).get("name") - lifecycle_metadata_provider.add_value( - "organization", metadata_provider_name - ) + metadata_provider_name: str = oersi_main_entity_of_page_item.get("provider").get("name") + lifecycle_metadata_provider.add_value("organization", metadata_provider_name) if "id" in provider_dict: # unique URL to the landing-page of the metadata, e.g.: "id"-value for a typical # 'Open Music Academy'-item looks like: "https://openmusic.academy/docs/26vG1SR17Zqf5LXpVLULqb" - metadata_provider_url: str = oersi_main_entity_of_page_item.get( - "provider" - ).get("id") + metadata_provider_url: str = oersi_main_entity_of_page_item.get("provider").get("id") lifecycle_metadata_provider.add_value("url", metadata_provider_url) - lom_base_item_loader.add_value( - "lifecycle", lifecycle_metadata_provider.load_item() - ) + lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) - def get_lifecycle_publisher( - self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict - ): + def get_lifecycle_publisher(self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, + date_published: Optional[str] = None): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. """ @@ -470,9 +464,9 @@ def get_lifecycle_publisher( publisher_url = publisher_item.get("id") if publisher_url: lifecycle_publisher.add_value("url", publisher_url) - lom_base_item_loader.add_value( - "lifecycle", lifecycle_publisher.load_item() - ) + if date_published: + lifecycle_publisher.add_value("date", date_published) + lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) @staticmethod def lifecycle_save_oersi_identifier_to_url_or_uuid( @@ -497,9 +491,7 @@ def lifecycle_save_oersi_identifier_to_url_or_uuid( lifecycle_item_loader.add_value("uuid", author_uuid_or_url) @staticmethod - def split_names_if_possible_and_add_to_lifecycle( - name_string: str, lifecycle_item_loader: LomLifecycleItemloader - ): + def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_item_loader: LomLifecycleItemloader): """ Splits a string containing a person's name - if there's a whitespace within that string - into two parts: first_name and last_name. @@ -520,15 +512,11 @@ def parse(self, response: scrapy.http.Response, **kwargs): # _source is the original JSON body passed for the document at index time # see: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html if self.shouldImport(response) is False: - logging.debug( - "Skipping entry {} because shouldImport() returned false".format( - str(self.getId(response)) - ) - ) + logging.debug("Skipping entry {} because shouldImport() returned false".format(str(self.getId(response)))) return None if ( self.getId(response=response, elastic_item=elastic_item) is not None - and self.getHash(response=response, elastic_item=elastic_item) is not None + and self.getHash(response=response, elastic_item_source=elastic_item_source) is not None ): if not self.hasChanged(response, elastic_item=elastic_item): return None @@ -538,7 +526,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): # ToDo: The following keys DON'T EXIST (yet?) in the OERSI ElasticSearch API, # but could appear in the future as possible metadata fields according to the AMB metadata draft: - # - affiliation (OERSI uses their own 'sourceOrganization'-field instead) # - assesses # - audience (might be suitable for "valuespaces.intendedEndUserRole") # - competencyRequired @@ -556,14 +543,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): provider_name = str() if "mainEntityOfPage" in elastic_item_source: - main_entity_of_page: list[dict] = elastic_item_source.get( - "mainEntityOfPage" - ) + main_entity_of_page: list[dict] = elastic_item_source.get("mainEntityOfPage") if main_entity_of_page: if "provider" in main_entity_of_page[0]: - provider_name: str = ( - main_entity_of_page[0].get("provider").get("name") - ) + provider_name: str = main_entity_of_page[0].get("provider").get("name") # the first provider_name is used for saving individual items to edu-sharing sub-folders # via 'base.origin' later for maeop_item in main_entity_of_page: @@ -578,23 +561,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): oersi_main_entity_of_page_item=maeop_item, ) - # if "about" in elastic_item_source: - # about = elastic_item_source.get("about") - # # about is OPTIONAL - # for about_item in about: - # # ToDo: disciplines are available as a list (according to the 'Hochschulfaechersystematik') - # # - 'de'-field: human-readable German String - # # - 'id'-field: URL of the entry (e.g. "https://w3id.org/kim/hochschulfaechersystematik/n78") - # pass - # # see: https://dini-ag-kim.github.io/amb/draft/#about - # # ToDo: DISCIPLINES! - # # - prefLabel - # # - de: German description (Schulfach / Studienfach) - # # - en: English ... - # # - uk: Ukrainian ... - # # - etc. (depending on the provider, several more languages + descriptions are listed) - # # - id - date_created = str() if "dateCreated" in elastic_item_source: date_created: str = elastic_item_source.get("dateCreated") @@ -603,7 +569,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): date_published: str = elastic_item_source.get("datePublished") base.add_value("sourceId", self.getId(response, elastic_item=elastic_item)) - base.add_value("hash", self.getHash(response, elastic_item=elastic_item)) + base.add_value("hash", self.getHash(response, elastic_item_source=elastic_item)) if "image" in elastic_item_source: thumbnail_url = elastic_item_source.get("image") # thumbnail if thumbnail_url: @@ -635,18 +601,17 @@ def parse(self, response: scrapy.http.Response, **kwargs): lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() + identifier_url: str = str() if "id" in elastic_item_source: - identifier_url: str = elastic_item_source.get( - "id" - ) # this URL is REQUIRED and should always be available + identifier_url: str = elastic_item_source.get("id") # this URL is REQUIRED and should always be available # see https://dini-ag-kim.github.io/amb/draft/#id if identifier_url: + technical.add_value("location", identifier_url) if identifier_url != response.url: - technical.add_value("location", identifier_url) # the identifier_url should be more stable/robust than the (resolved) response.url in the long term, - # so we will save both + # so we will save both URLs in case the resolved URL is different technical.add_value("location", response.url) - else: + elif not identifier_url: technical.add_value("location", response.url) lom.add_value("technical", technical.load_item()) @@ -663,9 +628,8 @@ def parse(self, response: scrapy.http.Response, **kwargs): author_list=authors, ) - self.get_lifecycle_publisher( - lom_base_item_loader=lom, elastic_item_source=elastic_item_source - ) + self.get_lifecycle_publisher(lom_base_item_loader=lom, elastic_item_source=elastic_item_source, + date_published=date_published) # ToDo: 'sourceOrganization' doesn't appear in OMA results, but will be available for other providers # each item can have multiple 'soureOrganization' dictionaries attached to it, which typically look like @@ -702,9 +666,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value("discipline", "420") # Musik - # ToDo: remove this hardcoded value in the future! (oersi_spider v0.0.1 is hardcoded for 'Open Music Academy') - # ToDo: future versions of the crawler need to use 'Hochschulfaechersystematik'-values! vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) is_accessible_for_free: bool = elastic_item_source.get("isAccessibleForFree") if is_accessible_for_free: @@ -722,15 +683,13 @@ def parse(self, response: scrapy.http.Response, **kwargs): if "/conditionsOfAccess/" in conditions_of_access_id: conditions_of_access_value = conditions_of_access_id.split("/")[-1] if conditions_of_access_value: - vs.add_value('conditionsOfAccess', conditions_of_access_value) + vs.add_value("conditionsOfAccess", conditions_of_access_value) hcrt_types = dict() oeh_lrt_types = dict() learning_resource_types = list() if "learningResourceType" in elastic_item_source: - learning_resource_types: list[dict] = elastic_item_source.get( - "learningResourceType" - ) + learning_resource_types: list[dict] = elastic_item_source.get("learningResourceType") # see: https://dini-ag-kim.github.io/amb/draft/#learningresourcetype - a typical LRT-dict looks like this: # { # "prefLabel": { @@ -775,15 +734,74 @@ def parse(self, response: scrapy.http.Response, **kwargs): if oeh_lrt_types: vs.add_value("learningResourceType", list(oeh_lrt_types.keys())) + if "about" in elastic_item_source: + about: list[dict] = elastic_item_source.get("about") + # see: https://dini-ag-kim.github.io/amb/draft/#about + # "about" is an OPTIONAL field. + # The equivalent edu-sharing field will be "ccm:oeh_taxonid_university". + # each about-field is a list of dictionaries. Here's an example from Open Music Academy: + # [ + # { + # "prefLabel": { + # "de": "Musik, Musikwissenschaft", + # "uk": "Музика, музикознавство", + # "en": "Music, Musicology" + # }, + # "id": "https://w3id.org/kim/hochschulfaechersystematik/n78" + # }, + # { + # "prefLabel": { + # "de": "Kunst, Kunstwissenschaft", + # "uk": "Мистецтво, мистецтвознавство", + # "en": "Art, Art Theory" + # }, + # "id": "https://w3id.org/kim/hochschulfaechersystematik/n9" + # } + # ], + for about_item in about: + if "id" in about_item: + about_id: str = about_item.get("id") + # According to the AMB spec, the 'id'-field: can either contain a URL from the + # "Destatis-Systematik der Fächergrppen, Studienbereiche und Studienfächer" + # (= hochschulfaechersystematik) + # e.g.: "https://w3id.org/kim/hochschulfaechersystematik/n78") + # or alternatively "Schulfächer" (e.g. http://w3id.org/kim/schulfaecher/) + if about_id: + # ToDo: at the moment OERSI exclusively provides university URL values, + # but might start providing "schulfaecher"-URLs as well in the future (-> mapping + # to 'discipline' will be necessary) + if about_id.startswith("https://w3id.org/kim/hochschulfaechersystematik/"): + about_id_key = about_id.split("/")[-1] + if about_id_key: + vs.add_value("hochschulfaechersystematik", about_id_key) + else: + logging.debug( + f"The value of OERSI 'about.id' was not recognized during mapping to " + f"valuespaces 'hochschulfaechersystematik': {about_id} ." + ) + # if "prefLabel" in about_item: + # # ToDo: the 'prefLabel'-strings might be used as fallback values in the future + # # Hochschulfächer are available as a list of prefLabel strings in several languages (according to + # # the 'Hochschulfaechersystematik') + # # - 'de'-field: human-readable German string + # # - 'en'-field: human-readable English string + # if "de" in about_item: + # about_preflabel_de: str = about_item["prefLabel"]["de"] + # if "en" in about_item: + # about_preflabel_en: str = about_item["prefLabel"]["en"] + base.add_value("valuespaces", vs.load_item()) license_loader = LicenseItemLoader() if "license" in elastic_item_source: license_url: str = elastic_item_source.get("license").get("id") if license_url: - # ToDo: from some providers (e.g. twillo) license URLs end with "deed.de", confirm if licenses get + license_mapper = LicenseMapper() + license_url_mapped = license_mapper.get_license_url(license_string=license_url) + if license_url_mapped: + # ToDo: from some providers (e.g. twillo) license URLs end with "deed.DE", confirm if licenses get # properly recognized in edu-sharing - license_loader.add_value("url", license_url) + license_loader.add_value("url", license_url_mapped) if authors: license_loader.add_value("author", authors) # noinspection DuplicatedCode @@ -793,13 +811,8 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value("permissions", permissions.load_item()) response_loader = ResponseItemLoader(response=response) - # for future maintenance, during debugging the following problems occurred one day, - # but disappeared the next day: - # - OMA URLs cause HTTP Error 400 in Splash response_loader.add_value("status", response.status) - url_data = WebTools.getUrlData( - url=response.url, engine=WebEngine.Playwright - ) + url_data = WebTools.getUrlData(url=response.url, engine=WebEngine.Playwright) if "html" in url_data: response_loader.add_value("html", url_data["html"]) if "text" in url_data: @@ -809,8 +822,8 @@ def parse(self, response: scrapy.http.Response, **kwargs): if "har" in url_data: response_loader.add_value("har", url_data["har"]) if "screenshot_bytes" in url_data: - # ToDo: optional thumbnail feature (toggleable via a list?) - # -> OMA serves generic thumbnails, which is why a screenshot of the + # ToDo: control which thumbnail is used, depending on the metadata-provider? + # -> metadata-provider 'Open Music Academy' serves generic thumbnails, which is why a screenshot of the # website will always be more interesting to users than the same generic image across ~650 materials base.add_value("screenshot_bytes", url_data["screenshot_bytes"]) response_loader.add_value("headers", response.headers) From 0de74aacf784d4bec7aae390d115efa58fadd512 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 23 Mar 2023 20:36:45 +0100 Subject: [PATCH 271/590] feat: implements "hochschulfaechersystematik"-vocab - uses the corresponding edu-sharing field "ccm:oeh_taxonid_university" - since the "hochschulfaechersystematik" is currently generated as a 'scheme.json' instead of the usual 'index.json' file by SkoHub, a workaround was necessary in the meantime Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 1 + converter/items.py | 2 ++ valuespace_converter/app/valuespaces.py | 38 ++++++++++++++++--------- 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index f22d7a61..eb8ebc29 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -425,6 +425,7 @@ def transformItem(self, uuid, spider, item): "discipline": "ccm:taxonid", "educationalContext": "ccm:educationalcontext", "fskRating": "ccm:fskRating", + "hochschulfaechersystematik": "ccm:oeh_taxonid_university", "intendedEndUserRole": "ccm:educationalintendedenduserrole", "learningResourceType": "ccm:educationallearningresourcetype", "new_lrt": "ccm:oeh_lrt", diff --git a/converter/items.py b/converter/items.py index a3739d0a..ed7b9d81 100644 --- a/converter/items.py +++ b/converter/items.py @@ -218,6 +218,8 @@ class ValuespaceItem(Item): """Corresponding edu-sharing property: 'ccm:educationalcontext'""" fskRating = Field(output_processor=JoinMultivalues()) """Corresponding edu-sharing property: 'ccm:fskRating'""" + hochschulfaechersystematik = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'ccm:oeh_taxonid_university""" intendedEndUserRole = Field(output_processor=JoinMultivalues()) """Corresponding edu-sharing property: 'ccm:intendedEndUserRole'""" learningResourceType = Field(output_processor=JoinMultivalues()) diff --git a/valuespace_converter/app/valuespaces.py b/valuespace_converter/app/valuespaces.py index b92e1e4c..f862d941 100644 --- a/valuespace_converter/app/valuespaces.py +++ b/valuespace_converter/app/valuespaces.py @@ -1,24 +1,35 @@ import string import requests -import json + class Valuespaces: - idsVocabs = ['intendedEndUserRole', 'discipline', 'educationalContext', 'learningResourceType', - 'sourceContentType', 'toolCategory', 'conditionsOfAccess', 'oer', 'new_lrt'] + idsVocabs = ['conditionsOfAccess', 'discipline', 'educationalContext', + 'intendedEndUserRole', 'learningResourceType', 'new_lrt', 'oer', 'sourceContentType', 'toolCategory'] idsW3ID = ['containsAdvertisement', 'price', 'accessibilitySummary', 'dataProtectionConformity', 'fskRating'] + ids_workaround = ['hochschulfaechersystematik'] + # ToDo: move workaround to 'idsVocabs'-list as soon as the vocab itself is fixed data = {} + def __init__(self): - urls = [] + vocab_list: list[dict] = [] + # one singular dictionary in the vocab list will typically look like this: + # {'key': 'discipline', 'url': 'https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/discipline/index.json'} for v in self.idsVocabs: - urls.append({'key': v, 'url': 'https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/' + v + '/index.json'}) + vocab_list.append( + {'key': v, 'url': 'https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/' + v + '/index.json'}) for v in self.idsW3ID: - urls.append({'key': v, 'url': 'http://w3id.org/openeduhub/vocabs/' + v + '/index.json'}) - for url in urls: - #try: - r = requests.get(url['url']) - self.data[url['key']] = self.flatten(r.json()['hasTopConcept']) - #except: + vocab_list.append({'key': v, 'url': 'http://w3id.org/openeduhub/vocabs/' + v + '/index.json'}) + for v in self.ids_workaround: + vocab_list.append( + {'key': v, + 'url': f"https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/{v}/scheme.json"} + ) + for vocab_name in vocab_list: + # try: + r = requests.get(vocab_name['url']) + self.data[vocab_name['key']] = self.flatten(r.json()['hasTopConcept']) + # except: # self.valuespaces[v] = {} def flatten(self, tree: []): @@ -29,7 +40,7 @@ def flatten(self, tree: []): return result @staticmethod - def findKey(valuespaceId: string, id: string, valuespace = None): + def findKey(valuespaceId: string, id: string, valuespace=None): if not valuespace: valuespace = Valuespaces.data[valuespaceId] for key in valuespace: @@ -41,7 +52,6 @@ def findKey(valuespaceId: string, id: string, valuespace = None): return found return None - def initTree(self, tree): for t in tree: names = self.getNames(t) @@ -65,4 +75,4 @@ def getNames(self, key): names += key['note']['en'] if 'en' in key['note'] else [] names = list(set(map(lambda x: x.strip(), names))) - return names \ No newline at end of file + return names From bc4fcd4990bb8bf3667d026384ea561694069c69 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 23 Mar 2023 21:24:01 +0100 Subject: [PATCH 272/590] change: playwright waituntil 'DOMContentLoaded' - previously playwright waited until "networkidle" which resulted in some pages taking excessively long to grab a screenshot / text -- this change might have side-effects on some websites that fire the DOMContentLoaded event slightly too early, but the majority of websites should behave nicely -- waiting until 'networkidle' was a desperate workaround anyway since some websites have almost continuously cause traffic (e.g. due to site metrics or huge videos starting to buffer) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index d3536324..034d7b3e 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -78,9 +78,9 @@ async def fetchDataPlaywright(url: str): async with async_playwright() as p: browser = await p.chromium.connect_over_cdp(endpoint_url=env.get("PLAYWRIGHT_WS_ENDPOINT")) page = await browser.new_page() - await page.goto(url, wait_until="networkidle", timeout=90000) - # waits for page to fully load (= no network traffic for 500ms), - # maximum timeout: 90s + await page.goto(url, wait_until="domcontentloaded", timeout=90000) + # waits for a website to fire the DOMContentLoaded event or for a timeout of 90s + # since waiting for 'networkidle' seems to cause timeouts content = await page.content() screenshot_bytes = await page.screenshot() # ToDo: HAR / text / cookies From 3acf51e7385d5f2fa0cce3e7e3c991ee86918fbb Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 24 Mar 2023 14:49:18 +0100 Subject: [PATCH 273/590] remove: 'scheme.json'-workaround Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- valuespace_converter/app/valuespaces.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/valuespace_converter/app/valuespaces.py b/valuespace_converter/app/valuespaces.py index f862d941..153d230f 100644 --- a/valuespace_converter/app/valuespaces.py +++ b/valuespace_converter/app/valuespaces.py @@ -4,11 +4,9 @@ class Valuespaces: - idsVocabs = ['conditionsOfAccess', 'discipline', 'educationalContext', + idsVocabs = ['conditionsOfAccess', 'discipline', 'educationalContext', 'hochschulfaechersystematik', 'intendedEndUserRole', 'learningResourceType', 'new_lrt', 'oer', 'sourceContentType', 'toolCategory'] idsW3ID = ['containsAdvertisement', 'price', 'accessibilitySummary', 'dataProtectionConformity', 'fskRating'] - ids_workaround = ['hochschulfaechersystematik'] - # ToDo: move workaround to 'idsVocabs'-list as soon as the vocab itself is fixed data = {} def __init__(self): @@ -20,11 +18,6 @@ def __init__(self): {'key': v, 'url': 'https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/' + v + '/index.json'}) for v in self.idsW3ID: vocab_list.append({'key': v, 'url': 'http://w3id.org/openeduhub/vocabs/' + v + '/index.json'}) - for v in self.ids_workaround: - vocab_list.append( - {'key': v, - 'url': f"https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/{v}/scheme.json"} - ) for vocab_name in vocab_list: # try: r = requests.get(vocab_name['url']) From be4e89aa15d7a08a8d36ad4b8797f81148201804 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 24 Mar 2023 17:47:36 +0100 Subject: [PATCH 274/590] oersi_spider v0.0.5 - activate all OERSI metadata providers for crawling - revert back to thumbnail default behaviour -- use the provided thumbnail URL first, only take a screenshot of the website if no thumbnail was provided -- overwriting generic thumbnails with Playwright Screenshots could be implemented in a future version, if desired Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 100 ++++++++++++++++-------------- 1 file changed, 52 insertions(+), 48 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index bcb5ea68..6a3391ee 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -35,7 +35,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.4" # last update: 2023-03-23 + version = "0.0.5" # last update: 2023-03-24 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -55,42 +55,42 @@ class OersiSpider(scrapy.Spider, LomBase): # the provider-filter at https://oersi.org/resources/ shows you which String values can be used as a provider-name # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) ELASTIC_PROVIDERS_TO_CRAWL: list = [ - # "BC Campus", - # "detmoldMusicTools", - # "digiLL", - # "DuEPublico", - # "eaDNURT", - # "eCampusOntario", - # "eGov-Campus", - # "Finnish Library of Open Educational Resources", - # "GitHub", - # "GitLab", - # "Helmholtz Codebase", - # "HessenHub", - # "HHU Mediathek", - # "HOOU", - # "iMoox", - # "KI Campus", - # "MIT OpenCourseWare", - # "OER Portal Uni Graz", - # "oncampus", - # "Open Music Academy", - # "Open Textbook Library", - # "Opencast Universität Osnabrück", - # "openHPI", - # "OpenLearnWare", - # "OpenRub", + "BC Campus", + "detmoldMusicTools", + "digiLL", + "DuEPublico", + "eaDNURT", + "eCampusOntario", + "eGov-Campus", + "Finnish Library of Open Educational Resources", + "GitHub", + "GitLab", + "Helmholtz Codebase", + "HessenHub", + "HHU Mediathek", + "HOOU", + "iMoox", + "KI Campus", + "MIT OpenCourseWare", + "OER Portal Uni Graz", + "oncampus", + "Open Music Academy", + "Open Textbook Library", + "Opencast Universität Osnabrück", + "openHPI", + "OpenLearnWare", + "OpenRub", "ORCA.nrw", - # "Phaidra Uni Wien", - # "RWTH Aachen GitLab", - # "TIB AV-Portal", - # "TU Delft OpenCourseWare", - # "twillo", - # "Universität Innsbruck OER Repositorium", - # "VCRP", - # "vhb", - # "Virtual Linguistics Campus", - # "ZOERR", + "Phaidra Uni Wien", + "RWTH Aachen GitLab", + "TIB AV-Portal", + "TU Delft OpenCourseWare", + "twillo", + "Universität Innsbruck OER Repositorium", + "VCRP", + "vhb", + "Virtual Linguistics Campus", + "ZOERR", ] ELASTIC_ITEMS_ALL = list() @@ -321,13 +321,15 @@ def get_lifecycle_author( if "id" in affiliation_item: # the affiliation.id is always a reference to GND, Wikidata or ROR affiliation_url = affiliation_item.get("id") + # ToDo: fix edge-case where both the 'creator' and their affiliation have an "id" + # -> save as URL multi-value? lifecycle_author.add_value("url", affiliation_url) if creator_item.get("type") == "Person": lifecycle_author.add_value("role", "author") author_name: str = creator_item.get("name") # ToDo: 'honorificPrefix' yields dirty values which need to be cleaned up first and need to be # checked for edge-cases before we can gather data from this field - # examples from metadataprovider 'ORCA.nrw': + # examples from metadata-provider 'ORCA.nrw': # "Dr.", # "Prof.", # "http://hbz-nrw.de/regal#academicDegree/unkown", @@ -403,8 +405,8 @@ def get_lifecycle_contributor( # the 'affiliation.type' is always 'Organization' affiliation_dict: dict = contributor_item["affiliation"] # if the dictionary exists, it might contain the following fields: - # - id (= URL to GND / ROR / Wikidata) - # - name (= human readable String) + # - id (= URL to GND / ROR / Wikidata) + # - name (= human readable String) if affiliation_dict: if "id" in affiliation_dict: affiliation_id_url: str = affiliation_dict["id"] @@ -439,8 +441,9 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lifecycle_metadata_provider.add_value("url", metadata_provider_url) lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) - def get_lifecycle_publisher(self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, - date_published: Optional[str] = None): + def get_lifecycle_publisher( + self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, date_published: Optional[str] = None + ): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. """ @@ -570,6 +573,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value("sourceId", self.getId(response, elastic_item=elastic_item)) base.add_value("hash", self.getHash(response, elastic_item_source=elastic_item)) + thumbnail_url = str() if "image" in elastic_item_source: thumbnail_url = elastic_item_source.get("image") # thumbnail if thumbnail_url: @@ -628,8 +632,9 @@ def parse(self, response: scrapy.http.Response, **kwargs): author_list=authors, ) - self.get_lifecycle_publisher(lom_base_item_loader=lom, elastic_item_source=elastic_item_source, - date_published=date_published) + self.get_lifecycle_publisher( + lom_base_item_loader=lom, elastic_item_source=elastic_item_source, date_published=date_published + ) # ToDo: 'sourceOrganization' doesn't appear in OMA results, but will be available for other providers # each item can have multiple 'soureOrganization' dictionaries attached to it, which typically look like @@ -799,8 +804,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): license_mapper = LicenseMapper() license_url_mapped = license_mapper.get_license_url(license_string=license_url) if license_url_mapped: - # ToDo: from some providers (e.g. twillo) license URLs end with "deed.DE", confirm if licenses get - # properly recognized in edu-sharing license_loader.add_value("url", license_url_mapped) if authors: license_loader.add_value("author", authors) @@ -821,9 +824,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): response_loader.add_value("cookies", url_data["cookies"]) if "har" in url_data: response_loader.add_value("har", url_data["har"]) - if "screenshot_bytes" in url_data: - # ToDo: control which thumbnail is used, depending on the metadata-provider? - # -> metadata-provider 'Open Music Academy' serves generic thumbnails, which is why a screenshot of the + if not thumbnail_url and "screenshot_bytes" in url_data: + # if a thumbnail was provided, use that first - otherwise try to use Playwright website screenshot + # ToDo: optional feature - control which thumbnail is used, depending on the metadata-provider? + # metadata-provider 'Open Music Academy' serves generic thumbnails, which is why a screenshot of the # website will always be more interesting to users than the same generic image across ~650 materials base.add_value("screenshot_bytes", url_data["screenshot_bytes"]) response_loader.add_value("headers", response.headers) From a2a91953a3bd5d3aca65b46076bbcbe3b2e59274 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 24 Mar 2023 18:23:52 +0100 Subject: [PATCH 275/590] update: LicenseMapper utility - fix: edge-cases observed during OERSI crawls for license URLs ("deed.DE", "deed.CA") - add: additional test-cases for 2- and 4-char variations of CC license deeds Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/util/license_mapper.py | 32 ++++++++++++++++++--------- converter/util/test_license_mapper.py | 6 +++++ 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index 1d9c76a3..4878b5f4 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -105,12 +105,15 @@ def fallback_to_license_internal_key(self, license_string: str = None) -> str | cc_zero = result_dict.get("CC_ZERO") public_domain = result_dict.get("PDM") if cc_zero: - logging.debug(f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " - f"CC_0") + logging.debug( + f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " f"CC_0" + ) return "CC_0" if public_domain: - logging.debug(f"Licensemapper: Fallback to 'license.internal' for '{license_string}' successful: " - f"Public Domain ") + logging.debug( + f"Licensemapper: Fallback to 'license.internal' for '{license_string}' successful: " + f"Public Domain " + ) return "PDM" if cc_type: cc_string_internal: str = f"CC_{result_dict.get('CC_TYPE')}".upper() @@ -118,8 +121,10 @@ def fallback_to_license_internal_key(self, license_string: str = None) -> str | cc_string_internal = cc_string_internal.replace("-", "_") cc_string_internal = cc_string_internal.replace(" ", "_") if cc_string_internal in Constants.LICENSE_MAPPINGS_INTERNAL: - logging.debug(f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " - f"{cc_string_internal}") + logging.debug( + f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " + f"{cc_string_internal}" + ) return cc_string_internal else: logging.debug( @@ -138,12 +143,19 @@ def identify_cc_license(self, license_string: str) -> str | None: # ToDo (refactor): check string validity first? - warn otherwise license_string_original: str = license_string if self.identify_if_string_contains_url_pattern(license_string_original): - license_url_candidate = license_string_original - logging.info(f"LicenseMapper: {license_url_candidate} was recognized as a URL") + license_url_candidate = license_string_original.lower() + logging.debug(f"LicenseMapper: The string '{license_url_candidate}' was recognized as a URL.") if "http://" in license_url_candidate: license_url_candidate = license_url_candidate.replace("http://", "https://") - if license_url_candidate.endswith("deed.de"): - license_url_candidate = license_url_candidate[: -len("deed.de")] + if "deed" in license_url_candidate: + # licenses with a deed suffix could appear in two variations, e.g.: + # - "deed.de" / "deed.CA" (2-char language code) + # - "deed.es_ES" (4-char language code) + regex_deed = re.compile(r"deed\.\w{2}(_?\w{2})?") + regex_deed_hit = regex_deed.search(license_url_candidate) + if regex_deed_hit: + deed_hit = regex_deed_hit.group() + license_url_candidate = license_url_candidate[: -len(deed_hit)] if license_url_candidate.endswith("/de/"): license_url_candidate = license_url_candidate[: -len("de/")] for valid_license_url in Constants.VALID_LICENSE_URLS: diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 30500940..c4ace994 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -31,6 +31,12 @@ class TestLicenseMapper: None, ), ("Public Domain", Constants.LICENSE_PDM), + ("https://creativecommons.org/licenses/by-nc-nd/3.0/deed.DE", Constants.LICENSE_CC_BY_NC_ND_30), + ("https://creativecommons.org/licenses/by-nc-nd/2.0/deed.CA", Constants.LICENSE_CC_BY_NC_ND_20), + ("https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES", Constants.LICENSE_CC_BY_SA_40), + # ToDo: Apache / BSD / GNU GPL licenses can't be mapped at the moment + ("https://www.gnu.org/licenses/gpl-3.0", None), + ("https://opensource.org/licenses/MIT", None), ], ) def test_get_license_url(self, test_input, expected_result): From 19b4eb6abf9e7e13cb5c7912a1cdb857eb644afe Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 24 Mar 2023 19:39:32 +0100 Subject: [PATCH 276/590] oersi_spider v0.0.6 - improvement: additional metadata fields are considered for 'lifecycle' metadata_provider - (temporarily) deactivate "BC Campus" metadata provider -- reason: the website appears to detect webcrawlers? needs further investigation Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 34 +++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 6a3391ee..a5d8c142 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -35,7 +35,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.5" # last update: 2023-03-24 + version = "0.0.6" # last update: 2023-03-24 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -55,7 +55,7 @@ class OersiSpider(scrapy.Spider, LomBase): # the provider-filter at https://oersi.org/resources/ shows you which String values can be used as a provider-name # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) ELASTIC_PROVIDERS_TO_CRAWL: list = [ - "BC Campus", + # "BC Campus", # ToDo: BC Campus website cannot be crawled at the moment, needs further investigation "detmoldMusicTools", "digiLL", "DuEPublico", @@ -424,19 +424,37 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer """ Collects metadata from OERSI's "provider"-field and stores it within a LomLifecycleItemLoader. """ - # each provider-item has 3 fields: - # - 'id' (= URL of the Metadata provider, e.g. 'https://openmusic.academy') - # - 'name' (= human readable name, e.g. "Open Music Academy") - # - 'type' (= String 'Service' in 100% of cases) + # mainEntityofPage structure -> 'id' is the only REQUIRED field, all other fields are OPTIONAL: + # 'id' (= URL of the Metadata Landing Page) + # 'dateCreated' (= creation date of the metadata) + # 'dateModified' (= last modified date of the metadata) + # 'provider': + # - 'id' (= URL of the Metadata provider, e.g. 'https://openmusic.academy') + # - 'name' (= human readable name, e.g. "Open Music Academy") + # - 'type' (= String 'Service' in 100% of cases) provider_dict: dict = oersi_main_entity_of_page_item.get("provider") if "name" in provider_dict: lifecycle_metadata_provider = LomLifecycleItemloader() lifecycle_metadata_provider.add_value("role", "metadata_provider") - metadata_provider_name: str = oersi_main_entity_of_page_item.get("provider").get("name") + metadata_provider_name: str = provider_dict.get("name") lifecycle_metadata_provider.add_value("organization", metadata_provider_name) - if "id" in provider_dict: + if "id" in oersi_main_entity_of_page_item: # unique URL to the landing-page of the metadata, e.g.: "id"-value for a typical # 'Open Music Academy'-item looks like: "https://openmusic.academy/docs/26vG1SR17Zqf5LXpVLULqb" + maeop_id_url: str = oersi_main_entity_of_page_item["id"] + if maeop_id_url: + lifecycle_metadata_provider.add_value("url", maeop_id_url) + if "dateCreated" in oersi_main_entity_of_page_item: + maeop_date_created: str = oersi_main_entity_of_page_item["dateCreated"] + if maeop_date_created: + lifecycle_metadata_provider.add_value("date", maeop_date_created) + elif "dateModified" in oersi_main_entity_of_page_item: + # if no creation date of the metadata is available, we use dateModified as a fallback (if available) + maeop_date_modified: str = oersi_main_entity_of_page_item["dateModified"] + if maeop_date_modified: + lifecycle_metadata_provider.add_value("date", maeop_date_modified) + if "id" in provider_dict: + # the 'provider.id' URL will always point to a more generic URL metadata_provider_url: str = oersi_main_entity_of_page_item.get("provider").get("id") lifecycle_metadata_provider.add_value("url", metadata_provider_url) lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) From fbfe6f960132251d0b1ccf891cf09aaee6a9b79d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 24 Mar 2023 21:57:04 +0100 Subject: [PATCH 277/590] oersi_spider v0.0.7 - improve: 'general.identifier' takes the "_source.id"-value if available (URL) -- both the un- and -resolved URLs will be saved to 'technical.location' anyway for future duplicate detection routines - add: OERSI "audience" to "intendedEndUserRole" mapping -- "audience" as a field only occurs for "Finnish Library of Open Educational Resources" - add: hard-coded value for "educationalContext" - workaround: temporarily deactivating the crawling of "Finnish Library of Open Educational Resources" -- this specific provider serves malformed URLs which contain a URI fragment ("#" in the middle of the URL string), which cannot be resolved by Scrapy -- URLs containing URI fragments get cut off at the "#", which makes Scrapy shorten the Request and identify each URL as a DuplicateRequest Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 60 +++++++++++++++++++++++++------ 1 file changed, 50 insertions(+), 10 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index a5d8c142..73058392 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -35,7 +35,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.6" # last update: 2023-03-24 + version = "0.0.7" # last update: 2023-03-24 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -62,7 +62,7 @@ class OersiSpider(scrapy.Spider, LomBase): "eaDNURT", "eCampusOntario", "eGov-Campus", - "Finnish Library of Open Educational Resources", + # "Finnish Library of Open Educational Resources", # ToDo: URLs of this metadata-provider cannot be resolved "GitHub", "GitLab", "Helmholtz Codebase", @@ -102,6 +102,15 @@ class OersiSpider(scrapy.Spider, LomBase): "sheet_music": "f7e92628-4132-4985-bcf5-93c285e300a8", # "Noten" "textbook": "a5897142-bf57-4cd0-bcd9-7d0f1932e87a", # "Lehrbuch und Grundlagenwerk (auch E-Book)" } + MAPPING_AUDIENCE_TO_INTENDED_END_USER_ROLE = { + # Mapping from https://www.dublincore.org/vocabs/educationalAudienceRole.ttl + # to https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl + "administrator": "manager", + # "generalPublic": "", # ToDo: find mapping + "mentor": "counsellor", + # "peerTutor": "", # ToDo: find mapping + # "professional": "", # ToDo: find mapping + } def __init__(self, **kwargs): super().__init__(**kwargs) @@ -209,11 +218,11 @@ def elastic_fetch_all_provider_pages(self): ) if "hits" in current_page_json_response: total_count = current_page_json_response.get("hits").get("total").get("value") - logging.info(f"Expecting {total_count} items for {provider_name}") + logging.debug(f"Expecting {total_count} items for the current API Pagination of {provider_name}") if "hits" in current_page_json_response.get("hits"): provider_items: list = current_page_json_response.get("hits").get("hits") if provider_items: - logging.info(f"The provider_items list has {len(provider_items)} entries") + logging.debug(f"The provider_items list has {len(provider_items)} entries") all_items.extend(provider_items) last_entry: dict = provider_items[-1] # ToDo: pagination documentation @@ -228,7 +237,7 @@ def elastic_fetch_all_provider_pages(self): break else: logging.info( - f"reached the end of the ElasticSearch results for {provider_name} // " + f"reached the end of the ElasticSearch results for '{provider_name}' // " f"Total amount of items collected (across all metadata-providers): {len(all_items)}" ) break @@ -500,7 +509,6 @@ def lifecycle_save_oersi_identifier_to_url_or_uuid( """ if "id" in person_dictionary: author_uuid_or_url = person_dictionary.get("id") - # ToDo: If this "lazy" approach yields messy results, RegEx differentiate between uuids and URLs if ( "orcid.org" in author_uuid_or_url or "dnb.de" in author_uuid_or_url @@ -548,7 +556,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): # ToDo: The following keys DON'T EXIST (yet?) in the OERSI ElasticSearch API, # but could appear in the future as possible metadata fields according to the AMB metadata draft: # - assesses - # - audience (might be suitable for "valuespaces.intendedEndUserRole") # - competencyRequired # - duration (for audio/video: will be suitable for "technical.location") # - educationalLevel (might be suitable for 'valuespaces.educationalContext') @@ -571,9 +578,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): # the first provider_name is used for saving individual items to edu-sharing sub-folders # via 'base.origin' later for maeop_item in main_entity_of_page: - # ToDo: according to the AMB spec, there could be a 'dateCreated'-field and 'dateModified'-field - # appearing in the future. Regularly check the API if it was implemented (this could be used for - # 'lifecycle.date') # a random sample showed that there can be multiple "mainEntityOfPage"-objects # this only occurred once within 55438 items in the API, but might happen more often in the future if "provider" in maeop_item: @@ -628,6 +632,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): identifier_url: str = elastic_item_source.get("id") # this URL is REQUIRED and should always be available # see https://dini-ag-kim.github.io/amb/draft/#id if identifier_url: + general.replace_value("identifier", identifier_url) technical.add_value("location", identifier_url) if identifier_url != response.url: # the identifier_url should be more stable/robust than the (resolved) response.url in the long term, @@ -813,6 +818,41 @@ def parse(self, response: scrapy.http.Response, **kwargs): # if "en" in about_item: # about_preflabel_en: str = about_item["prefLabel"]["en"] + vs.add_value("educationalContext", "hochschule") + # according to https://oersi.org/resources/pages/en/about/ all Materials indexed by OERSI are in the context of + # higher education + # ToDo: remove this hard-coded educationalContext value as soon as OERSI provides metadata for this field + + if "audience" in elastic_item_source: + # "audience" is an OPTIONAL field in OERSI and is currently only provided for materials from + # the "Finnish Library of Open Educational Resources" + audience_dicts: list[dict] = elastic_item_source["audience"] + # one "audience"-dictionary might look like this: + # { + # "prefLabel": { + # "de": "Lehrer", + # "fi": "Opettaja", + # "uk": "вчитель", + # "en": "teacher", + # "fr": "enseignant", + # "da": "lærer", + # "es": "profesor", + # "nl": "onderwijzer" + # }, + # "id": "http://purl.org/dcx/lrmi-vocabs/educationalAudienceRole/teacher" + # }, + if audience_dicts: + for audience in audience_dicts: + # ToDo: we could use prefLabel values of "de" or "en" as fallbacks in the future (if necessary) + if "id" in audience: + audience_id_url: str = audience["id"] + if audience_id_url: + audience_key: str = audience_id_url.split("/")[-1] + if audience_key: + if audience_key in self.MAPPING_AUDIENCE_TO_INTENDED_END_USER_ROLE: + audience_key = self.MAPPING_AUDIENCE_TO_INTENDED_END_USER_ROLE.get(audience_key) + vs.add_value("intendedEndUserRole", audience_key) + base.add_value("valuespaces", vs.load_item()) license_loader = LicenseItemLoader() From af7f0fa79eee945c323131f7250e34989e0eb9dc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sun, 26 Mar 2023 16:50:42 +0200 Subject: [PATCH 278/590] reduce amount of initial ElasticSearch requests - we're expecting roughly 62.000+ items in the ElasticSearch index of OERSI -- therefore we'll try bigger iteration steps to not cause as many API Requests during the initial gathering of OERSI items Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 73058392..57aaf547 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -48,7 +48,7 @@ class OersiSpider(scrapy.Spider, LomBase): ELASTIC_PARAMETER_KEEP_ALIVE: str = "1m" # for reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units ELASTIC_PARAMETER_REQUEST_SIZE: int = ( - 1000 # maximum: 10.000, but responses for bigger request sizes take significantly longer + 5000 # maximum: 10.000, but responses for bigger request sizes take significantly longer ) ELASTIC_PIT_ID: dict = dict() From 56e5ce297fc988f5859f26c154c0190a0c9039e9 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Mar 2023 15:01:52 +0200 Subject: [PATCH 279/590] feat: lifecycle 'title'-field - this field can be used to store an optional string for academic titles of a PERSON (e.g. role "author") -- OERSI.org provides these titles in a field called 'honorificPrefix' - ToDo: before activating this field in edu_sharing_base.py, it should be tested with example values from OERSI Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 3 +++ converter/items.py | 15 +++++++++------ .../spiders/base_classes/edu_sharing_base.py | 4 ++++ 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index eb8ebc29..43c75af5 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -384,6 +384,7 @@ def transformItem(self, uuid, spider, item): # convert to a vcard string firstName = person["firstName"] if "firstName" in person else "" lastName = person["lastName"] if "lastName" in person else "" + title: str = person["title"] if "title" in person else "" organization = ( person["organization"] if "organization" in person else "" ) @@ -399,6 +400,8 @@ def transformItem(self, uuid, spider, item): if organization else (firstName + " " + lastName).strip() ) + if title: + vcard.add("title").value = title if date: vcard.add("X-ES-LOM-CONTRIBUTE-DATE").value = date.isoformat() if person["role"].lower() == 'publisher': diff --git a/converter/items.py b/converter/items.py index ed7b9d81..c61a0cb0 100644 --- a/converter/items.py +++ b/converter/items.py @@ -72,17 +72,20 @@ class LomLifecycleItem(Item): The role 'unknown' is used for contributors in an unknown capacity ("Mitarbeiter"). """ - role = Field() + date = Field() + """The (publication) date of a contribution. Date values will be automatically transformed/parsed. + Corresponding edu-sharing property: 'ccm:published_date'""" + email = Field() firstName = Field() lastName = Field() organization = Field() - email = Field() + role = Field() + title = Field() + """The (academic) title of a person. String value will be prefixed to '(title) firstName lastName' and written into + the vCard-field 'TITLE'. + """ url = Field() uuid = Field() - date = Field() - """The (publication) date of a contribution. Date values will be automatically transformed/parsed. - Corresponding edu-sharing property: 'ccm:published_date' - """ class LomTechnicalItem(Item): diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 1924b49d..459faa29 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -193,6 +193,10 @@ def get_lifecycle_from_vcard_string(lifecycle: LomLifecycleItemloader, role, vca lifecycle.add_value("role", role) lifecycle.add_value("firstName", given) lifecycle.add_value("lastName", family) + # ToDo: test the 'title'-field before activating it + # if hasattr(vcard, "title"): + # title: str = vcard.title.value + # lifecycle.add_value("title", title) if hasattr(vcard, "email"): # ToDo: recognize multiple emails vcard_email: str = vcard.email.value From b3282c00ab55d3a8be9a5926da6345f8c66a4940 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Mar 2023 15:07:52 +0200 Subject: [PATCH 280/590] oersi_spider v0.0.8 - feat: academic titles -> lifecycle -- basic string validation was necessary because some metadata-providers provide invalid strings for this field -- if a person carries an academic title, this string value will be saved to 'lifecycle.title' -- the OERSI field 'honorificPrefix' is an optional field within "creator"- or "contributor"-items - code cleanup: 'sourceOrganization' -- during testing it was observed that the OERSI-specific 'sourceOrganization'-field (which is not part of the official AMB specification) simply contains the same values as can be found within the optional 'affiliation'-field --- 'affiliation'-fields are optional sub-fields of "creator"- or "contributor"-items Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 89 +++++++++++++++---------------- 1 file changed, 44 insertions(+), 45 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 57aaf547..fb107c5d 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -35,7 +35,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.7" # last update: 2023-03-24 + version = "0.0.8" # last update: 2023-03-28 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -336,18 +336,19 @@ def get_lifecycle_author( if creator_item.get("type") == "Person": lifecycle_author.add_value("role", "author") author_name: str = creator_item.get("name") - # ToDo: 'honorificPrefix' yields dirty values which need to be cleaned up first and need to be - # checked for edge-cases before we can gather data from this field - # examples from metadata-provider 'ORCA.nrw': - # "Dr.", - # "Prof.", - # "http://hbz-nrw.de/regal#academicDegree/unkown", - # "unknown", - # if "honorificPrefix" in creator_item: - # honorific_prefix = creator_item["honorificPrefix"] - # if honorific_prefix: - # author_name = f"{honorific_prefix} {author_name}" - authors.append(author_name) # this string is going to be used in the license field "author" + academic_title: str = str() + if "honorificPrefix" in creator_item: + # the 'honorificPrefix'-field is described in the 'creator'-json-scheme: + # https://dini-ag-kim.github.io/amb/draft/schemas/creator.json + honorific_prefix = creator_item["honorificPrefix"] + if honorific_prefix: + academic_title = self.validate_academic_title_string(honorific_prefix) + if academic_title: + lifecycle_author.add_value("title", academic_title) + author_name_prefixed_with_academic_title = f"{academic_title} {author_name}" + authors.append(author_name_prefixed_with_academic_title) + if not academic_title: + authors.append(author_name) # this string is going to be used in the license field "author" self.split_names_if_possible_and_add_to_lifecycle( name_string=author_name, lifecycle_item_loader=lifecycle_author ) @@ -363,6 +364,23 @@ def get_lifecycle_author( lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) return authors + @staticmethod + def validate_academic_title_string(honorific_prefix: str) -> str: + """ + Some metadata-providers provide weird values for the 'honorificPrefix'-attribute within a "creator"- or + "contributor"-item. This method checks for known edge-cases and drops the string if necessary. + See: https://dini-ag-kim.github.io/amb/draft/#dfn-creator + Check for truthiness after using this method! If a known edge-case was detected, it will return an empty string. + """ + # Typical edge-cases for the 'honorificPrefix'-field that have been noticed so far: + # ORCA.nrw: "http://hbz-nrw.de/regal#academicDegree/unkown", "unknown", + # Open Textbook Library: single backticks + if "unknown" in honorific_prefix or "unkown" in honorific_prefix or len(honorific_prefix) == 1: + logging.debug(f"'honorificPrefix'-validation: The string {honorific_prefix} was recognized as an invalid " + f"edge-case value. Deleting string...") + honorific_prefix = "" + return honorific_prefix.strip() + def get_lifecycle_contributor( self, lom_base_item_loader: LomBaseItemloader, @@ -381,18 +399,21 @@ def get_lifecycle_contributor( lifecycle_contributor.add_value("role", "unknown") contributor_name: str = contributor_item.get("name") if contributor_name: - # ToDo: activate honorificPrefix in a later version (when having solved the problem for 'creator') - # if "honorificPrefix" in contributor_item: - # honorific_prefix: str = contributor_item["honorificPrefix"] - # if honorific_prefix: - # contributor_name = f"{honorific_prefix} {contributor_name}" + if "honorificPrefix" in contributor_item: + honorific_prefix: str = contributor_item["honorificPrefix"] + if honorific_prefix: + academic_title: str = self.validate_academic_title_string(honorific_prefix) + if academic_title: + lifecycle_contributor.add_value("title", academic_title) + # contributor_name_prefixed_with_academic_title = f"{academic_title} {contributor_name}" if author_list: - if contributor_name in author_list: - # OMA lists one author, but also lists the same person as a "contributor", - # therefore causing the same person to appear both as author and unknown contributor in - continue - # removing trailing whitespaces before further processing of the string + for author_string in author_list: + if contributor_name in author_string: + # OMA lists one author, but also lists the same person as a "contributor", + # therefore causing the same person to appear both as author and unknown contributor + continue contributor_name = contributor_name.strip() + # removing trailing whitespaces before further processing of the string if "type" in contributor_item: if contributor_item.get("type") == "Person": self.split_names_if_possible_and_add_to_lifecycle( @@ -659,28 +680,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): lom_base_item_loader=lom, elastic_item_source=elastic_item_source, date_published=date_published ) - # ToDo: 'sourceOrganization' doesn't appear in OMA results, but will be available for other providers - # each item can have multiple 'soureOrganization' dictionaries attached to it, which typically look like - # { - # "type": "Organization", - # "name": "Universität Innsbruck" - # } - # if "sourceOrganization" in elastic_item_source: - # # attention: the "sourceOrganization"-field is not part of the AMB draft - # # see: https://github.com/dini-ag-kim/amb/issues/110 - # # it is used by OERSI to express affiliation to an organization (instead of the AMB 'affiliation'-field) - # lifecycle_org = LomLifecycleItemloader() - # source_organizations: list = elastic_item_source.get('sourceOrganization') - # for source_org_item in source_organizations: - # if "id" in source_org_item: - # source_org_url = source_org_item.get('id') - # lifecycle_org.add_value('url', source_org_url) - # if "name" in source_org_item: - # source_org_name = source_org_item.get('name') - # lifecycle_org.add_value('organization', source_org_name) - # # source_org_type = source_org_item.get('type') # e.g.: "Organization", "CollegeOrUniversity" etc. - # lom.add_value('lifecycle', lifecycle_org.load_item()) - educational = LomEducationalItemLoader() if in_languages: for language_value in in_languages: From 07a2fe9fcce39bded9fd9600c69ce5ec8d8e4405 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 31 Mar 2023 18:11:25 +0200 Subject: [PATCH 281/590] feat: lifecycle identifiers - feat: 4 new lifecycle metadata fields for URI identifiers -- supports URI values for GND / ORCID / ROR / Wikidata - docs: documentation within items.py and sample_spider_alternative -- todo: once was OERSI was crawled, edu_sharing_base needs to be extended for these new features Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 12 ++++++++++++ converter/items.py | 13 +++++++++++++ converter/spiders/base_classes/edu_sharing_base.py | 9 +++++++++ converter/spiders/sample_spider_alternative.py | 5 +++++ 4 files changed, 39 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 43c75af5..5c661e05 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -391,6 +391,10 @@ def transformItem(self, uuid, spider, item): url = person["url"] if "url" in person else "" email = person["email"] if "email" in person else "" date = person["date"] if "date" in person else None + id_gnd: str = person["id_gnd"] if "id_gnd" in person else "" + id_orcid: str = person["id_orcid"] if "id_orcid" in person else "" + id_ror: str = person["id_ror"] if "id_ror" in person else "" + id_wikidata: str = person["id_wikidata"] if "id_wikidata" in person else "" vcard = vobject.vCard() vcard.add("n").value = vobject.vcard.Name( family=lastName, given=firstName @@ -400,6 +404,14 @@ def transformItem(self, uuid, spider, item): if organization else (firstName + " " + lastName).strip() ) + if id_gnd: + vcard.add("X-GND-URI").value = id_gnd + if id_orcid: + vcard.add("X-ORCID").value = id_orcid + if id_ror: + vcard.add("X-ROR").value = id_ror + if id_wikidata: + vcard.add("X-Wikidata").value = id_wikidata if title: vcard.add("title").value = title if date: diff --git a/converter/items.py b/converter/items.py index c61a0cb0..ae0a221a 100644 --- a/converter/items.py +++ b/converter/items.py @@ -86,6 +86,19 @@ class LomLifecycleItem(Item): """ url = Field() uuid = Field() + id_gnd = Field() + """The GND identifier (URI) of a PERSON, e.g. "https://d-nb.info/gnd/". + Values will be written into the vCard namespace 'X-GND-URI'.""" + id_orcid = Field() + """The ORCID identifier (URI) of a PERSON, e.g. "https://orcid.org/". + Values will be written into the vCard namespace 'X-ORCID'.""" + id_ror = Field() + """The ROR identifier (URI) of an ORGANIZATION, e.g. "https://ror.org/". + Values will be written into the vCard namespace 'X-ROR'.""" + id_wikidata = Field() + """The Wikidata identifier (URI) of an ORGANIZATION, e.g. "https://www.wikidata.org/wiki/". + Values will be written into the vCard namespace 'X-Wikidata'.""" + class LomTechnicalItem(Item): diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 459faa29..49da62e8 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -197,6 +197,15 @@ def get_lifecycle_from_vcard_string(lifecycle: LomLifecycleItemloader, role, vca # if hasattr(vcard, "title"): # title: str = vcard.title.value # lifecycle.add_value("title", title) + # ToDo: implement identifiers (GND / ORCID / ROR / Wikidata) + # if hasattr(vcard, "x-gnd-uri"): + # pass + # if hasattr(vcard, "x-orcid"): + # pass + # if hasattr(vcard, "x-ror"): + # pass + # if hasattr(vcard, "x-wikidata"): + # pass if hasattr(vcard, "email"): # ToDo: recognize multiple emails vcard_email: str = vcard.email.value diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 5215c8e9..f462de9d 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -133,6 +133,11 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # - organization optional # - email optional # - uuid optional + # - title optional (academic title) + # - id_gnd optional (expected: URI) + # - id_orcid optional (expected: URI) + # - id_ror optional (expected: URI) + # - id_wikidata optional (expected: URI) lifecycle.add_value('role', 'author') # supported roles: # "author" / "editor" / "publisher" / "metadata_contributor" / "metadata_provider" / "unknown" From f2325aa7d51d8d7534239812060d2eb7f8d51a09 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 31 Mar 2023 18:18:54 +0200 Subject: [PATCH 282/590] oersi_spider v0.0.9 - feat: identifiers for persons and organizations -- URIs of GND / ORCID / ROR / Wikidata will be saved to the lifecycle vCard -- change: identifier URIs are checked first for known URL patterns of id-providers and (if recognized as one of the 4 supported ids) saved to its specific field --- the previous workaround which saved these identifiers either to lifecycle's 'url'- or 'uuid'-field is therefore no longer necessary and has been repurposed --- converter/spiders/oersi_spider.py | 97 +++++++++++++++++++++---------- 1 file changed, 65 insertions(+), 32 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index fb107c5d..2457ce30 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -35,7 +35,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.8" # last update: 2023-03-28 + version = "0.0.9" # last update: 2023-03-31 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -328,11 +328,11 @@ def get_lifecycle_author( affiliation_name = affiliation_item.get("name") lifecycle_author.add_value("organization", affiliation_name) if "id" in affiliation_item: - # the affiliation.id is always a reference to GND, Wikidata or ROR - affiliation_url = affiliation_item.get("id") - # ToDo: fix edge-case where both the 'creator' and their affiliation have an "id" - # -> save as URL multi-value? - lifecycle_author.add_value("url", affiliation_url) + # according to the AMB spec, the affiliation.id should always be a reference to + # GND, Wikidata or ROR + self.lifecycle_determine_type_of_identifier_and_save_uri( + affiliation_item, lifecycle_item_loader=lifecycle_author + ) if creator_item.get("type") == "Person": lifecycle_author.add_value("role", "author") author_name: str = creator_item.get("name") @@ -352,15 +352,33 @@ def get_lifecycle_author( self.split_names_if_possible_and_add_to_lifecycle( name_string=author_name, lifecycle_item_loader=lifecycle_author ) - self.lifecycle_save_oersi_identifier_to_url_or_uuid( - person_dictionary=creator_item, + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=creator_item, lifecycle_item_loader=lifecycle_author, ) lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) elif creator_item.get("type") == "Organization": + # ToDo: find a solution for edge-case where "creator" is an organization which itself is affiliated + # to an organization, e.g.: + # [ + # { + # "affiliation": { + # "name": "RWTH Aachen", + # "id": "https://ror.org/04xfq0f34", + # "type": "Organization" + # }, + # "name": "OMB+-Konsortium", + # "type": "Organization" + # } + # ], + # the vCard standard 4.0 provides a "RELATED"-property which could be suitable for this edge-case, + # but both edu-sharing and the currently used "vobject"-package only support vCard standard v3.0 + # see: https://www.rfc-editor.org/rfc/rfc6350.html#section-6.6.6 creator_organization_name = creator_item.get("name") lifecycle_author.add_value("role", "author") lifecycle_author.add_value("organization", creator_organization_name) + self.lifecycle_determine_type_of_identifier_and_save_uri(item_dictionary=creator_item, + lifecycle_item_loader=lifecycle_author) lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) return authors @@ -376,8 +394,10 @@ def validate_academic_title_string(honorific_prefix: str) -> str: # ORCA.nrw: "http://hbz-nrw.de/regal#academicDegree/unkown", "unknown", # Open Textbook Library: single backticks if "unknown" in honorific_prefix or "unkown" in honorific_prefix or len(honorific_prefix) == 1: - logging.debug(f"'honorificPrefix'-validation: The string {honorific_prefix} was recognized as an invalid " - f"edge-case value. Deleting string...") + logging.debug( + f"'honorificPrefix'-validation: The string {honorific_prefix} was recognized as an invalid " + f"edge-case value. Deleting string..." + ) honorific_prefix = "" return honorific_prefix.strip() @@ -426,8 +446,8 @@ def get_lifecycle_contributor( # id points to a URI reference of ORCID, GND, WikiData or ROR # (while this isn't necessary for OMA items yet (as they have no 'id'-field), it will be necessary # for other metadata providers once we extend the crawler) - self.lifecycle_save_oersi_identifier_to_url_or_uuid( - person_dictionary=contributor_item, + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=contributor_item, lifecycle_item_loader=lifecycle_contributor, ) if "affiliation" in contributor_item: @@ -436,12 +456,12 @@ def get_lifecycle_contributor( affiliation_dict: dict = contributor_item["affiliation"] # if the dictionary exists, it might contain the following fields: # - id (= URL to GND / ROR / Wikidata) - # - name (= human readable String) + # - name (= string containing the name of the affiliated organization) if affiliation_dict: if "id" in affiliation_dict: - affiliation_id_url: str = affiliation_dict["id"] - if affiliation_id_url: - lifecycle_contributor.add_value("url", affiliation_id_url) + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=affiliation_dict, lifecycle_item_loader=lifecycle_contributor + ) if "name" in affiliation_dict: affiliation_name: str = affiliation_dict["name"] if affiliation_name: @@ -520,25 +540,38 @@ def get_lifecycle_publisher( lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) @staticmethod - def lifecycle_save_oersi_identifier_to_url_or_uuid( - person_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader + def lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader ): """ - OERSI's author 'id'-field delivers both URLs and uuids in the same field. Since edu-sharing expects URLs and - uuids to be saved in separate fields, this method checks if the 'id'-field is available at all, and if it is, - determines if the string should be saved to the 'url' or 'uuid'-field of LomLifecycleItemLoader. + OERSI's "creator"/"contributor"/"affiliation" items might contain an 'id'-field which (optionally) provides + URI-identifiers that reference GND / ORCID / Wikidata / ROR. + This method checks if the 'id'-field is available at all, and if it is, determines if the string should be + saved to an identifier-specific field of LomLifecycleItemLoader. + If the URI string of "id" could not be recognized, it will save the value to 'lifecycle.url' as a fallback. """ - if "id" in person_dictionary: - author_uuid_or_url = person_dictionary.get("id") + if "id" in item_dictionary: + uri_string: str = item_dictionary.get("id") if ( - "orcid.org" in author_uuid_or_url - or "dnb.de" in author_uuid_or_url - or "wikidata.org" in author_uuid_or_url - or "ror.org" in author_uuid_or_url + "orcid.org" in uri_string + or "/gnd/" in uri_string + or "wikidata.org" in uri_string + or "ror.org" in uri_string ): - lifecycle_item_loader.add_value("url", author_uuid_or_url) + if "/gnd/" in uri_string: + lifecycle_item_loader.add_value("id_gnd", uri_string) + if "orcid.org" in uri_string: + lifecycle_item_loader.add_value("id_orcid", uri_string) + if "ror.org" in uri_string: + lifecycle_item_loader.add_value("id_ror", uri_string) + if "wikidata.org" in uri_string: + lifecycle_item_loader.add_value("id_wikidata", uri_string) else: - lifecycle_item_loader.add_value("uuid", author_uuid_or_url) + logging.info( + f"The URI identifier '{uri_string}' was not recognized. " + f"Fallback: Saving its value to 'lifecycle.url'." + ) + # lifecycle_item_loader.add_value("url", uri_string) @staticmethod def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_item_loader: LomLifecycleItemloader): @@ -794,9 +827,9 @@ def parse(self, response: scrapy.http.Response, **kwargs): # e.g.: "https://w3id.org/kim/hochschulfaechersystematik/n78") # or alternatively "Schulfächer" (e.g. http://w3id.org/kim/schulfaecher/) if about_id: - # ToDo: at the moment OERSI exclusively provides university URL values, - # but might start providing "schulfaecher"-URLs as well in the future (-> mapping - # to 'discipline' will be necessary) + # at the moment OERSI exclusively provides university-specific URL values, + # but might start providing "schulfaecher"-URLs as well in the future (-> mapping + # to 'discipline' will be necessary) if about_id.startswith("https://w3id.org/kim/hochschulfaechersystematik/"): about_id_key = about_id.split("/")[-1] if about_id_key: From 7363c0591459a6d6999928ae55b4afe31b36a285 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 31 Mar 2023 18:57:04 +0200 Subject: [PATCH 283/590] update: LicenseMapper recognition of URLs ending in "/de" - while testing OERSI's metadata provider "HHU Mediathek", another edge-case for CreativeCommons URLs was found - fix: added the edge-case to our testsuite and fixed the LicenseMapper method, so it can handle these types of URL patterns in the future -- 35/35 tests passed Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/util/license_mapper.py | 4 ++++ converter/util/test_license_mapper.py | 1 + 2 files changed, 5 insertions(+) diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index 4878b5f4..b45e60c6 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -156,6 +156,10 @@ def identify_cc_license(self, license_string: str) -> str | None: if regex_deed_hit: deed_hit = regex_deed_hit.group() license_url_candidate = license_url_candidate[: -len(deed_hit)] + # ToDo: while it (thankfully) hasn't happened yet, we have to assume that URLs ending in "/fr/" or "/es" + # could be problematic as well. Therefore: refactor the if-checks for "/de/" and "/de" asap + if license_url_candidate.endswith("/de"): + license_url_candidate = license_url_candidate[: -len("de")] if license_url_candidate.endswith("/de/"): license_url_candidate = license_url_candidate[: -len("de/")] for valid_license_url in Constants.VALID_LICENSE_URLS: diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index c4ace994..85238007 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -37,6 +37,7 @@ class TestLicenseMapper: # ToDo: Apache / BSD / GNU GPL licenses can't be mapped at the moment ("https://www.gnu.org/licenses/gpl-3.0", None), ("https://opensource.org/licenses/MIT", None), + ("http://creativecommons.org/licenses/by/3.0/de", Constants.LICENSE_CC_BY_30), ], ) def test_get_license_url(self, test_input, expected_result): From 4da69ca471febd1f22aca2a6d118afcf1ac6a61b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 1 Apr 2023 20:16:01 +0200 Subject: [PATCH 284/590] refactor: LicenseMapper 2-char language code detection - by using a more general RegEx pattern, we should be able to detect all (currently available) language codes that might be appended to CreativeCommons license URLs - added 2 more tests to the testsuite - 37/37 tests passed Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/util/license_mapper.py | 16 ++++++++++++---- converter/util/test_license_mapper.py | 2 ++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index b45e60c6..04cfaa35 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -158,10 +158,18 @@ def identify_cc_license(self, license_string: str) -> str | None: license_url_candidate = license_url_candidate[: -len(deed_hit)] # ToDo: while it (thankfully) hasn't happened yet, we have to assume that URLs ending in "/fr/" or "/es" # could be problematic as well. Therefore: refactor the if-checks for "/de/" and "/de" asap - if license_url_candidate.endswith("/de"): - license_url_candidate = license_url_candidate[: -len("de")] - if license_url_candidate.endswith("/de/"): - license_url_candidate = license_url_candidate[: -len("de/")] + url_ending_in_two_char_language_code_regex = re.compile(r"/([a-z]{2}/?)$") + # RegEx pattern for handling URLs that end in "/de", "/de/", "/fr", "/es/" etc. + two_char_language_code_hit = url_ending_in_two_char_language_code_regex.search(license_url_candidate) + if two_char_language_code_hit: + # checks if the URL pattern ends in "/de", "/de/" or any other type of 2-char language code, e.g.: + # http://creativecommons.org/licenses/by/3.0/de or https://creativecommons.org/licenses/by/3.0/es/ + # and only keeps the part of the string that can be recognized by the pipeline + url_language_code_trail: str = two_char_language_code_hit.group() + if url_language_code_trail: + # the url_language_code_trail will typically look like "/de/" or "/de", but we only want to cut off + # the 2-char language code and its trailing slash, but keep the first slash intact + license_url_candidate = license_url_candidate[: -len(url_language_code_trail) +1] for valid_license_url in Constants.VALID_LICENSE_URLS: if license_url_candidate in valid_license_url: return valid_license_url diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 85238007..7de1084c 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -38,6 +38,8 @@ class TestLicenseMapper: ("https://www.gnu.org/licenses/gpl-3.0", None), ("https://opensource.org/licenses/MIT", None), ("http://creativecommons.org/licenses/by/3.0/de", Constants.LICENSE_CC_BY_30), + ("https://creativecommons.org/licenses/by/3.0/es/", Constants.LICENSE_CC_BY_30), + ("https://creativecommons.org/licenses/by/3.0/fr", Constants.LICENSE_CC_BY_30), ], ) def test_get_license_url(self, test_input, expected_result): From 37c6cd7d562f2b17fe92d62018ec731352ed574d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 1 Apr 2023 21:12:08 +0200 Subject: [PATCH 285/590] feat: MediaWikiBase "categories" to "new_lrt"-mapping - during testing for Lisum and the following analysis/evaluation of all ZUM Mediawiki "categories", some mapping possibilities were noticed -- after consulting with Romy on 2023-03-30, we implemented her mapping suggestions in this commit - this should improve the metadata quality for all current "MediaWiki"-based crawlers (ZUM Deutsch Lernen, ZUM Klexikon, ZUM Unterrichten) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .../spiders/base_classes/mediawiki_base.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index d60e2f45..65bcbd8a 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -248,10 +248,30 @@ def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: def getValuespaces(self, response): loader = super().getValuespaces(response) data = response.meta['item'] - categories = jmes_categories.search(data) # ['Ethik', 'Sekundarstufe_1'] + categories: list[str] = jmes_categories.search(data) # ['Ethik', 'Sekundarstufe_1'] if categories: loader.add_value("discipline", categories) loader.add_value("educationalContext", categories) loader.add_value("intendedEndUserRole", categories) + for category in categories: + # ZUM MediaWiki "category"-strings can consist of several words. We're looking for individual parts of + # the whole string and use a search-hit as our indicator to set the corresponding "new_lrt"-value. + category: str = str(category).lower() + if "arbeitsblatt" in category: + loader.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # "Arbeitsblatt" + if "erklärvideo" in category: + loader.add_value("new_lrt", "a0218a48-a008-4975-a62a-27b1a83d454f") # "Erklärvideo und gefilmtes Experiment" + if "lernpfad" in category: + loader.add_value("new_lrt", "ad9b9299-0913-40fb-8ad3-50c5fd367b6a") # "Lernpfad, Lernobjekt" + if "methode" in category: + loader.add_value("new_lrt", "0a79a1d0-583b-47ce-86a7-517ab352d796") # "Methode" + if "tool" in category: + loader.add_value("new_lrt", "cefccf75-cba3-427d-9a0f-35b4fedcbba1") # "Tool" + if "unterrichtsidee" in category: + loader.add_value("new_lrt", "94222751-6c90-4623-9c7e-09e21d885599") # Unterrichtsidee + if "video" in category: + loader.add_value("new_lrt", "7a6e9608-2554-4981-95dc-47ab9ba924de") # "Video" + if "übung" in category: + loader.add_value("new_lrt", "a33ef73d-9210-4305-97f9-7357bbf43486") # "Übungsmaterial" loader.add_value("new_lrt", "6b9748e4-fb3b-4082-ae08-c7a11c717256") # "Wiki (dynamisch)" return loader From 15f6430fbb9a84d8d212119d345dfab6f727f297 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Apr 2023 14:35:06 +0200 Subject: [PATCH 286/590] feat: edu_sharing_precheck helper utility - implements a way to check all 'ccm:replicationsourceid's of a given edu-sharing "saved search"-node-ID - can be controlled by the '.env'-variable: -- "CONTINUE_CRAWL": expected to be True or False -- "EDU_SHARING_PRECHECK_SAVED_SEARCH_ID": expects a valid node-ID string value of a "saved search"-filter of the repository that is set within "EDU_SHARING_BASE_URL" --- converter/util/edu_sharing_precheck.py | 164 +++++++++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 converter/util/edu_sharing_precheck.py diff --git a/converter/util/edu_sharing_precheck.py b/converter/util/edu_sharing_precheck.py new file mode 100644 index 00000000..54cceb1d --- /dev/null +++ b/converter/util/edu_sharing_precheck.py @@ -0,0 +1,164 @@ +import logging + +import requests + +from converter import env + + +class EduSharingPreCheck: + """ + Helper class to continue previously aborted crawl processes where they left off (instead of crawling from the start + and checking/updating each item individually during the parse method). Gathers 'ccm:replicationsourceid's from a + pre-defined "saved search"-node-ID. + + Depending on the size of your "saved search", this pre-check might take a while. Each API response typically takes + about ~12s and is NOT controlled by Scrapy's program flow. + + Please make sure that your .env file has (valid) settings for: + EDU_SHARING_BASE -> this is typically the edu-sharing repository that you previously crawled against + EDU_SHARING_PRECHECK_SAVED_SEARCH_ID -> the node-ID of a "saved search" needs to be created within the edu-sharing + web-interface and can be looked up via the "debug"-view. + """ + + edu_sharing_url = "https://localhost:8000/edu-sharing/" + # the edu_sharing_url will typically look like this: + # "https://localhost:8000/edu-sharing/rest/search/v1/queries/load/f702baeb-c0c5-4abc-9171-95f9a5d3fac9" + # "" + + edu_sharing_rest_api_path = "rest/search/v1/queries/load/" + saved_search_node_id = "" + max_item_parameter = 500 # ToDo: keep an eye on connection timeouts depending on the request size + skip_item_parameter = 0 + # ToDo: .env variables -> .env.example documentation + # ToDo: (optional feature) caching to local file, so we don't have to wait every time for the full API Pagination + + querystring = { + "contentType": "FILES", + "propertyFilter": "ccm:replicationsourceid", + "maxItems": f"{max_item_parameter}", + f"skipCount": f"{skip_item_parameter}", + "sortProperties": "cm:created", + "sortAscending": "true", + } + + payload = "" + + logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.DEBUG) + + replication_source_id_list: list[str] = list() + + def __init__(self): + self.set_edu_sharing_url_from_dot_env() + self.build_query_string() + + def set_edu_sharing_url_from_dot_env(self): + """ + Checks the '.env'-file for two required variables: + EDU_SHARING_BASE_URL & EDU_SHARING_PRECHECK_SAVED_SEARCH_ID + and sets the class member variable for API Pagination. + """ + edu_sharing_url: str = env.get("EDU_SHARING_BASE_URL", True, None) + saved_search_node_id: str = env.get("EDU_SHARING_PRECHECK_SAVED_SEARCH_ID", True, None) + logging.info( + f"PreCheck utility warmup: Checking '.env'-file for EDU_SHARING_BASE_URL and " + f"EDU_SHARING_PRECHECK_SAVED_SEARCH_ID ..." + ) + if edu_sharing_url and saved_search_node_id: + url_combined: str = f"{edu_sharing_url}{self.edu_sharing_rest_api_path}{saved_search_node_id}" + logging.info( + f"PreCheck utility: Recognized .env settings for CONTINUED crawl. Assembled URL string: " + f"{url_combined}" + ) + self.edu_sharing_url = url_combined + self.saved_search_node_id = saved_search_node_id + else: + logging.error( + f"PreCheck utility: Could not retrieve valid .env settings for EDU_SHARING_BASE and " + f"EDU_SHARING_PRECHECK_SAVED_SEARCH_ID. Please make sure that both settings are valid if " + f"you want to COMPLETE/COMPLEMENT a previously aborted crawl." + ) + + def build_query_string(self): + self.querystring = { + "contentType": "FILES", + "propertyFilter": "ccm:replicationsourceid", + "maxItems": f"{self.max_item_parameter}", + f"skipCount": f"{self.skip_item_parameter}", + "sortProperties": "cm:created", + "sortAscending": "true", + } + + def collect_replication_source_ids_from_nodes(self, response: requests.Response): + """ + Collects the 'ccm:replicationsourceid'-values from each node of the edu-sharing API response and queries the + next API page. + """ + json_response = response.json() + nodes: list[dict] = json_response["nodes"] + logging.info(f"Collecting 'ccm:replicationsourceid's from: {response.url}") + if nodes: + # as long as there are nodes, we haven't reached the final page of the API yet. + for node in nodes: + if "properties" in node: + id_list = node["properties"]["ccm:replicationsourceid"] + for replication_source_id in id_list: + if replication_source_id not in self.replication_source_id_list: + # since Python sets are more memory-expensive than lists, this basic if-check will do. + self.replication_source_id_list.append(replication_source_id) + self.query_next_page() + else: + logging.info( + f"Reached the last API page: {response.url} // \nTotal amount of ids collected: {len(self.replication_source_id_list)}" + ) + + def query_next_page(self): + """ + Increments the API Pagination offset as specified by the 'max_item_parameter' and queries the next API page. + """ + self.skip_item_parameter += self.max_item_parameter + self.build_query_string() + next_api_page: requests.Response = requests.request( + "GET", self.edu_sharing_url, data=self.payload, params=self.querystring + ) + self.collect_replication_source_ids_from_nodes(next_api_page) + + def try_to_retrieve_replication_source_id_list(self) -> list[str] | None: + """ + If everything went smooth during API pagination, sorts the list of strings and returns it. + If the list is empty for some reason, logs a warning. + + @return: a list of 'ccm:replicationsourceid's or None + + """ + if self.replication_source_id_list: + logging.info( + f"PreCheck utility: Successfully collected {len(self.replication_source_id_list)} " + f"'ccm:replicationsourceid'-strings." + ) + self.replication_source_id_list.sort() + return self.replication_source_id_list + else: + logging.warning( + f"PreCheck utility: The list of 'ccm:replicationsourceid'-strings appears to be empty. " + f"This might happen if the API Pagination is interrupted by connection problems to the " + f"edu-sharing repo." + ) + + def get_replication_source_id_list(self) -> list[str]: + """ + The main loop of the edu-sharing PreCheck helper utility class. Use this method if you just want to grab a list + of 'ccm:replicationsourceid's for a given "saved search"-nodeID. + + @return: a sorted list of 'ccm:replicationsourceid's + + """ + expected_response = requests.request("GET", self.edu_sharing_url, data=self.payload, params=self.querystring) + self.collect_replication_source_ids_from_nodes(expected_response) + sorted_result_list = self.try_to_retrieve_replication_source_id_list() + return sorted_result_list + + +if __name__ == "__main__": + es_id_collector = EduSharingPreCheck() + replication_source_ids: list[str] = es_id_collector.get_replication_source_id_list() + print(replication_source_ids) From a5899697796b9dbe00aa29a8ad76ffb1cf599f41 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 21 Apr 2023 00:01:53 +0200 Subject: [PATCH 287/590] docs: edu-sharing PreCheck helper utility - add: "CONTINUE_CRAWL" and "EDU_SHARING_PRECHECK_SAVED_SEARCH" variables & explanations to .env.example Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/.env.example | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/converter/.env.example b/converter/.env.example index d12ef8ea..acac13e6 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -26,6 +26,12 @@ EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" EDU_SHARING_USERNAME = "admin" EDU_SHARING_PASSWORD = "admin" +# Continue / complete a previously aborted crawl process by skipping updates of already known items. +CONTINUE_CRAWL=False +EDU_SHARING_PRECHECK_SAVED_SEARCH_ID="" +# Setting CONTINUE_CRAWL to True will skip all updates of previously crawled items and ONLY crawl new ones! +# ONLY use this mode if you wish to debug/complement/complete huge crawl processes which haven't completed on their own! + # Configure if permissions of edu-sharing nodes are handled by the crawler (default true) # You may want to set this to false if you don't want to apply permissions from crawlers or have a custom implementation in the repository # EDU_SHARING_PERMISSION_CONTROL=true From ab19758ededa2cfdb8ee50d736e218d652c85c07 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 21 Apr 2023 00:21:47 +0200 Subject: [PATCH 288/590] oersi_spider v0.1.0 - feat: implements "CONTINUE_CRAWL"-setting (controlled by '.env'-variables) to skip updates of already crawled items and complete a previously aborted crawl process -- use-case: if a previous crawl process errored out or got somehow stuck, this feature can be used to gather all 'ccm:replicationsourceid's and comparing them to OERSIs identifiers -- the '.env'-file MUST contain a valid string for the "EDU_SHARING_PRECHECK_SAVED_SEARCH_ID"-variable, which needs to be a node-ID of an edu-sharing "saved search"-filter Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 34 +++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 2457ce30..8857b26b 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -5,6 +5,7 @@ import requests import scrapy +from converter import env from converter.constants import Constants from converter.es_connector import EduSharing from converter.items import ( @@ -20,6 +21,7 @@ ResponseItemLoader, ) from converter.spiders.base_classes import LomBase +from converter.util.edu_sharing_precheck import EduSharingPreCheck from converter.util.license_mapper import LicenseMapper from converter.web_tools import WebEngine, WebTools @@ -35,7 +37,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.0.9" # last update: 2023-03-31 + version = "0.1.0" # last update: 2023-04-21 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -125,11 +127,31 @@ def __init__(self, **kwargs): logging.info(f"ElasticSearch API response (upon PIT delete): {json_response}") def start_requests(self): - for elastic_item in self.ELASTIC_ITEMS_ALL: - main_entity_of_page: list[dict] = elastic_item.get("_source").get("mainEntityOfPage") - if main_entity_of_page: - item_url = main_entity_of_page[0].get("id") - yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}) + continue_from_previous_crawl = env.get_bool("CONTINUE_CRAWL", True, False) + # checking if a previously aborted crawl should be completed (by skipping updates of previously collected items) + if continue_from_previous_crawl: + es_id_collector = EduSharingPreCheck() + previously_crawled_replication_source_ids: list[str] = es_id_collector.get_replication_source_id_list() + for elastic_item in self.ELASTIC_ITEMS_ALL: + elastic_item_identifier: str = elastic_item["_id"] + if elastic_item_identifier in previously_crawled_replication_source_ids: + logging.debug(f"Found Elastic item '_id': {elastic_item_identifier} within previously crawled " + f"results in the edu-sharing repository. Skipping item because '.env'-setting " + f"'CONTINUE_CRAWL' is enabled.") + continue + else: + yield from self.yield_request_and_parse_item(elastic_item) + else: + for elastic_item in self.ELASTIC_ITEMS_ALL: + yield from self.yield_request_and_parse_item(elastic_item) + + @staticmethod + def yield_request_and_parse_item(elastic_item) -> scrapy.Request: + main_entity_of_page: list[dict] = elastic_item.get("_source").get("mainEntityOfPage") + if main_entity_of_page: + item_url = main_entity_of_page[0].get("id") + # by omitting the callback parameter, individual requests are yielded to the parse-method + yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}) def elastic_pit_create(self) -> dict: """ From cdd174b443ebb76538b6cef0bf5563255c7a5ae8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 25 Apr 2023 22:02:56 +0200 Subject: [PATCH 289/590] oersi_spider v0.1.1 (squashed) - feat: 'sourceOrganization'-fallback -- if a metadata provider doesn't attach "affiliation"-values to its "creator"- or "contributor"-fields (e.g. metadata-provider "HOOU"), we're gathering a list of organizations from the undocumented 'sourceOrganization'-field -- this is a fallback for metadata-providers that don't adhere to the AMB spec of attaching "affiliation"-metadata directly to individuals/organizations -- this API behaviour might change (unannounced) in the future, so keep an eye out! - style: code formatting - docs: 'sourceOrganization'-fallback method --- converter/spiders/oersi_spider.py | 78 +++++++++++++++++++++++++++---- 1 file changed, 70 insertions(+), 8 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 8857b26b..41331b62 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -37,7 +37,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.0" # last update: 2023-04-21 + version = "0.1.1" # last update: 2023-04-27 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 32, @@ -135,9 +135,11 @@ def start_requests(self): for elastic_item in self.ELASTIC_ITEMS_ALL: elastic_item_identifier: str = elastic_item["_id"] if elastic_item_identifier in previously_crawled_replication_source_ids: - logging.debug(f"Found Elastic item '_id': {elastic_item_identifier} within previously crawled " - f"results in the edu-sharing repository. Skipping item because '.env'-setting " - f"'CONTINUE_CRAWL' is enabled.") + logging.debug( + f"Found Elastic item '_id': {elastic_item_identifier} within previously crawled " + f"results in the edu-sharing repository. Skipping item because '.env'-setting " + f"'CONTINUE_CRAWL' is enabled." + ) continue else: yield from self.yield_request_and_parse_item(elastic_item) @@ -151,6 +153,7 @@ def yield_request_and_parse_item(elastic_item) -> scrapy.Request: if main_entity_of_page: item_url = main_entity_of_page[0].get("id") # by omitting the callback parameter, individual requests are yielded to the parse-method + # ToDo: findItem needs to happen here -> replicationsourceuuid yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}) def elastic_pit_create(self) -> dict: @@ -318,6 +321,7 @@ def get_lifecycle_author( self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, + organization_fallback: set[str], date_created: Optional[str] = None, date_published: Optional[str] = None, ): @@ -327,6 +331,7 @@ def get_lifecycle_author( :param lom_base_item_loader: LomBaseItemLoader where the collected metadata should be saved to :param elastic_item_source: the '_source'-field of the currently parsed OERSI elastic item + :param organization_fallback: a temporary set of strings of all affiliation 'name'-values :param date_created: OERSI 'dateCreated' value (if available) :param date_published: OERSI 'datePublished' value (if available) :returns: list[str] - list of authors (names) for later usage in the LicenseItemLoader @@ -349,6 +354,7 @@ def get_lifecycle_author( if "name" in affiliation_item: affiliation_name = affiliation_item.get("name") lifecycle_author.add_value("organization", affiliation_name) + organization_fallback.add(affiliation_name) if "id" in affiliation_item: # according to the AMB spec, the affiliation.id should always be a reference to # GND, Wikidata or ROR @@ -399,8 +405,9 @@ def get_lifecycle_author( creator_organization_name = creator_item.get("name") lifecycle_author.add_value("role", "author") lifecycle_author.add_value("organization", creator_organization_name) - self.lifecycle_determine_type_of_identifier_and_save_uri(item_dictionary=creator_item, - lifecycle_item_loader=lifecycle_author) + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=creator_item, lifecycle_item_loader=lifecycle_author + ) lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) return authors @@ -427,6 +434,7 @@ def get_lifecycle_contributor( self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, + organization_fallback: set[str], author_list: Optional[list[str]] = None, ): """ @@ -488,7 +496,7 @@ def get_lifecycle_contributor( affiliation_name: str = affiliation_dict["name"] if affiliation_name: lifecycle_contributor.add_value("organization", affiliation_name) - + organization_fallback.add(affiliation_name) lom_base_item_loader.add_value("lifecycle", lifecycle_contributor.load_item()) @staticmethod @@ -561,6 +569,48 @@ def get_lifecycle_publisher( lifecycle_publisher.add_value("date", date_published) lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) + def get_lifecycle_organization_from_source_organization_fallback( + self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] + ): + # ATTENTION: the "sourceOrganization"-field is not part of the AMB draft, therefore this method is currently + # used a fallback, so we don't lose any useful metadata (even if that metadata is not part of the AMB spec). + # see: https://github.com/dini-ag-kim/amb/issues/110 + # 'sourceOrganization' is an OERSI-specific (undocumented) field: it is used by OERSI to express an affiliation + # to an organization (which is normally covered by the AMB 'affiliation'-field). + # it appears to be implemented in two distinct ways: + # 1) For metadata providers which use the "affiliation"-field within "creator" or "contributor", the + # 'sourceOrganization'-field does not contain any useful (additional) data. It's basically a set of all + # "affiliation"-values (without any duplicate entries). -> In this case we SKIP it completely! + # 2) For metadata-providers which DON'T provide any "affiliation"-values, the 'sourceOrganization'-field will + # contain metadata about organizations without being attached to a person. (Therefore it can only be + # interpreted as lifecycle role 'unknown' (= contributor in unknown capacity). + # ToDo: periodically confirm if this fallback is still necessary (check the OERSI API / AMB spec!) + source_organizations: list = elastic_item_source.get("sourceOrganization") + for source_org_item in source_organizations: + if "name" in source_org_item: + source_org_name = source_org_item.get("name") + if source_org_name in organization_fallback: + # if the 'sourceOrganization' name is already in our organization list, skip this loop + continue + lifecycle_org = LomLifecycleItemloader() + lifecycle_org.add_value("role", "unknown") + lifecycle_org.add_value("organization", source_org_name) + if "id" in source_org_item: + # the "id"-field is used completely different between metadata-providers: + # for some providers ("HOOU") it contains just the URL to their website (= not a real identifier), + # but other metadata-providers provide an actual identifier (e.g. to ror.org) within this field. + # Therefore, we're checking which type of URI it is first before saving it to a specific field + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=source_org_item, lifecycle_item_loader=lifecycle_org + ) + # ToDo: sometimes there are more possible fields within a 'sourceOrganization', e.g.: + # - image (-> ?) + # - logo (-> ?) + if "url" in source_org_item: + org_url: str = source_org_item.get("url") + lifecycle_org.add_value("url", org_url) + lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) + @staticmethod def lifecycle_determine_type_of_identifier_and_save_uri( item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader @@ -600,7 +650,7 @@ def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_ite """ Splits a string containing a person's name - if there's a whitespace within that string - into two parts: first_name and last_name. - Afterwards saves the split-up values to their respective 'lifecycle'-fields or saves the string as a whole. + Afterward saves the split values to their respective 'lifecycle'-fields or saves the string as a whole. """ if " " in name_string: name_parts = name_string.split(maxsplit=1) @@ -718,16 +768,20 @@ def parse(self, response: scrapy.http.Response, **kwargs): technical.add_value("location", response.url) lom.add_value("technical", technical.load_item()) + organizations_from_affiliation_fields: set[str] = set() + authors = self.get_lifecycle_author( lom_base_item_loader=lom, elastic_item_source=elastic_item_source, date_created=date_created, date_published=date_published, + organization_fallback=organizations_from_affiliation_fields, ) self.get_lifecycle_contributor( lom_base_item_loader=lom, elastic_item_source=elastic_item_source, + organization_fallback=organizations_from_affiliation_fields, author_list=authors, ) @@ -735,6 +789,13 @@ def parse(self, response: scrapy.http.Response, **kwargs): lom_base_item_loader=lom, elastic_item_source=elastic_item_source, date_published=date_published ) + if "sourceOrganization" in elastic_item_source: + self.get_lifecycle_organization_from_source_organization_fallback( + elastic_item_source=elastic_item_source, + lom_item_loader=lom, + organization_fallback=organizations_from_affiliation_fields, + ) + educational = LomEducationalItemLoader() if in_languages: for language_value in in_languages: @@ -926,6 +987,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): base.add_value("permissions", permissions.load_item()) response_loader = ResponseItemLoader(response=response) + # ToDo: skip the scrapy.Request altogether? (-> would be a huge time benefit) response_loader.add_value("status", response.status) url_data = WebTools.getUrlData(url=response.url, engine=WebEngine.Playwright) if "html" in url_data: From 429aa8fb0094bef52afe1fd3b28460bf06a92e99 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 27 Apr 2023 00:41:27 +0200 Subject: [PATCH 290/590] fix: vCARD urls split up by newlines - the vCARD 'url'-attribute was getting split up at (seemingly arbitrary) places, causing the URLs to arrive broken in the edu-sharing front-end, even though they were completely fine in the crawler .log file -- (as it turned out during debugging with Torsten, the strings were actually split up by newlines) -- this was caused by the "vobject"-package, which uses a default "lineLength"-parameter of 75 when you call its "serialize"-method -- by setting the lineLength to 10.000, this problem should not happen again in the near future for any of the other vCARD attributes - style: remove whitespace (typo) --- converter/es_connector.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 5c661e05..d69b268a 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -393,7 +393,7 @@ def transformItem(self, uuid, spider, item): date = person["date"] if "date" in person else None id_gnd: str = person["id_gnd"] if "id_gnd" in person else "" id_orcid: str = person["id_orcid"] if "id_orcid" in person else "" - id_ror: str = person["id_ror"] if "id_ror" in person else "" + id_ror: str = person["id_ror"] if "id_ror" in person else "" id_wikidata: str = person["id_wikidata"] if "id_wikidata" in person else "" vcard = vobject.vCard() vcard.add("n").value = vobject.vcard.Name( @@ -423,14 +423,20 @@ def transformItem(self, uuid, spider, item): # fix a bug of split org values vcard.org.behavior = VCardBehavior.defaultBehavior vcard.org.value = organization - vcard.add("url").value = url + if url: + vcard.add("url") + vcard.url.value = url if email: vcard.add("EMAIL;TYPE=PREF,INTERNET").value = email if mapping in spaces: # checking if a vcard already exists for this role: if so, extend the list - spaces[mapping].append(vcard.serialize()) + spaces[mapping].append(vcard.serialize(lineLength=10000)) + # default of "lineLength" is 75, which is too short for longer URLs. We're intentionally setting an + # absurdly long lineLength, so we don't run into the problem where vCARD attributes like 'url' would + # get split up with a '\r\n '-string inbetween, which would cause broken URLs in the final vCard + # string and therefore broken links in the edu-sharing front-end else: - spaces[mapping] = [vcard.serialize()] + spaces[mapping] = [vcard.serialize(lineLength=10000)] valuespaceMapping = { "accessibilitySummary": "ccm:accessibilitySummary", From 6e676e050ae0f50bec32480a100467b58ae100c1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 27 Apr 2023 00:48:44 +0200 Subject: [PATCH 291/590] style: add "black"-package to requirements.txt - since the "black" package is helping immensely with code formatting, adding it to the requirements so it doesn't have to be installed manually -- see: https://github.com/psf/black and https://black.readthedocs.io/en/stable/ - oersi_spider was formatted with a line length setting of 120 (since pyCharm is using the same line length by default) -- example usage for an individual crawler: "black -l 120 converter/spiders/oersi_spider.py" --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e536f5ca..3c10cdd5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,4 +24,5 @@ six==1.16.0 certifi==2022.12.7 urllib3~=1.26.09 playwright==1.30.0 -pyOpenSSL==22.1.0 \ No newline at end of file +pyOpenSSL==22.1.0 +black==23.3.0 \ No newline at end of file From 4732e692c8d42c135ddb0cf4e5a8ebd511630eac Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 27 Apr 2023 12:33:28 +0200 Subject: [PATCH 292/590] oersi_spider v0.1.2 (squashed) - add: two new metadata-providers -- "langSci Press" (202 items expected) -- "OEPMS" (75 items expected) - perf: check item if an update is necessary before sending out a scrapy.Request -- feat: "get_uuid()"-method --- since we cannot use the parent's "getUUID()"-method at the time of our check, the item's url is gathered from "_source.id" - perf: distribute scrapy.Requests across several domains by shuffling the list of ElasticSearch Items from OERSI prior to the actual crawl -- previously the crawler would work through all items of before jumping to the list of etc. -- since we have no control over the scrapy scheduler, we can at least split the load a little bit between target domains (which also benefits scrapys Autothrottle -> targets don't throttle us as fast) - fix: getHash()-method -- during debugging of v0.1.2 it was observed that hashes used fallbacks more often than expected --- the method looked for date values on the ElasticSearch item, but actually needed to look at item["_source"] instead -- v0.1.2 of the crawler should preferably one runce with the 'forceUpdate'-flag to override previous hash values where a fallback date was used instead of the preferred dates for hash generation - style: docs & code formatting Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 66 ++++++++++++++++++++----------- 1 file changed, 44 insertions(+), 22 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 41331b62..9ea86c52 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -1,5 +1,6 @@ import datetime import logging +import random from typing import Optional import requests @@ -37,13 +38,13 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.1" # last update: 2023-04-27 + version = "0.1.2" # last update: 2023-04-27 allowed_domains = "oersi.org" custom_settings = { - "CONCURRENT_REQUESTS": 32, + "CONCURRENT_REQUESTS": 48, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 3, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 6, "WEB_TOOLS": WebEngine.Playwright, } @@ -73,7 +74,9 @@ class OersiSpider(scrapy.Spider, LomBase): "HOOU", "iMoox", "KI Campus", + "langSci Press", # new provider as of 2023-04-27 "MIT OpenCourseWare", + "OEPMS", # new provider as of 2023-04-27 "OER Portal Uni Graz", "oncampus", "Open Music Academy", @@ -127,6 +130,8 @@ def __init__(self, **kwargs): logging.info(f"ElasticSearch API response (upon PIT delete): {json_response}") def start_requests(self): + random.shuffle(self.ELASTIC_ITEMS_ALL) # shuffling the list of ElasticSearch items to improve concurrency and + # distribute the load between several target domains. continue_from_previous_crawl = env.get_bool("CONTINUE_CRAWL", True, False) # checking if a previously aborted crawl should be completed (by skipping updates of previously collected items) if continue_from_previous_crawl: @@ -142,18 +147,34 @@ def start_requests(self): ) continue else: - yield from self.yield_request_and_parse_item(elastic_item) + yield from self.check_item_and_yield_to_parse_method(elastic_item) else: for elastic_item in self.ELASTIC_ITEMS_ALL: - yield from self.yield_request_and_parse_item(elastic_item) + yield from self.check_item_and_yield_to_parse_method(elastic_item) - @staticmethod - def yield_request_and_parse_item(elastic_item) -> scrapy.Request: - main_entity_of_page: list[dict] = elastic_item.get("_source").get("mainEntityOfPage") - if main_entity_of_page: - item_url = main_entity_of_page[0].get("id") - # by omitting the callback parameter, individual requests are yielded to the parse-method + def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Request | None: + """ + Checks if the item already exists in the edu-sharing repository and yields a Request to the parse()-method. + If the item already exists, it will be updated (if its hash has changed). + Otherwise, creates a new item in the edu-sharing repository. + """ + item_url: str = elastic_item["_source"]["id"] + if item_url: # ToDo: findItem needs to happen here -> replicationsourceuuid + if self.shouldImport(None) is False: + logging.debug( + "Skipping entry {} because shouldImport() returned false".format( + str(self.getId(response=None, elastic_item=elastic_item)) + ) + ) + return None + if ( + self.getId(response=None, elastic_item=elastic_item) is not None + and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None + ): + if not self.hasChanged(None, elastic_item=elastic_item): + return None + # by omitting the callback parameter, individual requests are yielded to the parse-method yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}) def elastic_pit_create(self) -> dict: @@ -297,12 +318,22 @@ def getHash(self, response=None, elastic_item_source: dict = dict) -> str: hash_temp: str = f"{datetime.datetime.now().isoformat()}{self.version}" return hash_temp + @staticmethod + def get_uuid(elastic_item: dict): + """ + Builds a UUID from the to-be-parsed target URL and returns it. + """ + # The "getUUID"-method of LomBase couldn't be cleanly overridden because at the point of time when we do this + # check, there is no response available yet. + item_url: str = elastic_item["_source"]["id"] + return EduSharing.buildUUID(item_url) + def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: elastic_item = elastic_item if self.forceUpdate: return True if self.uuid: - if self.getUUID(response) == self.uuid: + if self.get_uuid(elastic_item=elastic_item) == self.uuid: logging.info(f"matching requested id: {self.uuid}") return True return False @@ -666,15 +697,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): elastic_item_source: dict = elastic_item.get("_source") # _source is the original JSON body passed for the document at index time # see: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html - if self.shouldImport(response) is False: - logging.debug("Skipping entry {} because shouldImport() returned false".format(str(self.getId(response)))) - return None - if ( - self.getId(response=response, elastic_item=elastic_item) is not None - and self.getHash(response=response, elastic_item_source=elastic_item_source) is not None - ): - if not self.hasChanged(response, elastic_item=elastic_item): - return None # ToDo: look at these (sometimes available) properties later: # - encoding (see: https://dini-ag-kim.github.io/amb/draft/#encoding - OPTIONAL field) @@ -720,7 +742,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): date_published: str = elastic_item_source.get("datePublished") base.add_value("sourceId", self.getId(response, elastic_item=elastic_item)) - base.add_value("hash", self.getHash(response, elastic_item_source=elastic_item)) + base.add_value("hash", self.getHash(response, elastic_item_source=elastic_item_source)) thumbnail_url = str() if "image" in elastic_item_source: thumbnail_url = elastic_item_source.get("image") # thumbnail From 158705b34bfcaa7237281c97b2eb95773c0c045e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 27 Apr 2023 19:13:35 +0200 Subject: [PATCH 293/590] docs: fix DocStrings (role: metadata_provider / metadata_creator) - I noticed during debugging of oersi_spider (and giving feedback to Manuel) that the DocString for LomLifecycleItem had two small oversights -- fix: DocStrings for the 'role'-settings of 'metadata_creator' and 'metadata_provider' Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/items.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/items.py b/converter/items.py index ae0a221a..0fcdb1c3 100644 --- a/converter/items.py +++ b/converter/items.py @@ -66,8 +66,8 @@ class LomLifecycleItem(Item): - 'ccm:lifecyclecontributer_publisher' ('role'-value = 'publisher') - 'ccm:lifecyclecontributer_author' ('role'-value = 'author') - 'ccm:lifecyclecontributer_editor' ('role'-value = 'editor') - - 'ccm:lifecyclecontributer_metadata_creator' ('role'-value = 'metadata_creator') - - 'ccm:lifecyclecontributer_metadata_provider' ('role'-value = 'metadata_provider') + - 'ccm:metadatacontributer_creator' ('role'-value = 'metadata_creator') + - 'ccm:metadatacontributer_provider' ('role'-value = 'metadata_provider') - 'ccm:lifecyclecontributer_unknown' ('role'-value = 'unknown') The role 'unknown' is used for contributors in an unknown capacity ("Mitarbeiter"). From 1b8a35032e5c25e28ff4476d73d2526ab72a021c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 28 Apr 2023 19:25:39 +0200 Subject: [PATCH 294/590] oersi_spider v0.1.3 ("affiliation"-fixes) - fix: "affiliation"-values from "creator"- or "contributor"-fields -- refactor: method for gathering "affiliation"-values creates its own lifecycle items from now on --- since edu-sharing and our crawler framework currently uses vCards v3 spec, "affiliation"-values need to be placed into their own lifecycle fields (e.g. the vCard 'RELATED'-attribute is only available in vCards v4) --- By decoupling the previous "affiliation"-behaviour from both lifecycle methods so the method can be re-used more easily --- this also fixes that "affiliation.id"-values will not bleed over to author or contributor vCards - deactivate: LicenseItem "author" free-text field (until further confirmation/feedback) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 120 +++++++++++++++++++----------- 1 file changed, 75 insertions(+), 45 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 9ea86c52..87c40d3e 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -38,7 +38,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.2" # last update: 2023-04-27 + version = "0.1.3" # last update: 2023-04-28 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 48, @@ -161,7 +161,7 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req item_url: str = elastic_item["_source"]["id"] if item_url: # ToDo: findItem needs to happen here -> replicationsourceuuid - if self.shouldImport(None) is False: + if self.shouldImport(response=None) is False: logging.debug( "Skipping entry {} because shouldImport() returned false".format( str(self.getId(response=None, elastic_item=elastic_item)) @@ -211,6 +211,7 @@ def elastic_pit_delete(self) -> dict: def elastic_query_provider_metadata(self, provider_name, search_after=None): """ Queries OERSI's ElasticSearch API for a metadata from a specific provider. + See: https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#paginate-search-results """ url = "https://oersi.org/resources/api-internal/search/_search" @@ -362,7 +363,7 @@ def get_lifecycle_author( :param lom_base_item_loader: LomBaseItemLoader where the collected metadata should be saved to :param elastic_item_source: the '_source'-field of the currently parsed OERSI elastic item - :param organization_fallback: a temporary set of strings of all affiliation 'name'-values + :param organization_fallback: a temporary set of strings containing all affiliation 'name'-values :param date_created: OERSI 'dateCreated' value (if available) :param date_published: OERSI 'datePublished' value (if available) :returns: list[str] - list of authors (names) for later usage in the LicenseItemLoader @@ -378,20 +379,6 @@ def get_lifecycle_author( lifecycle_author.add_value("date", date_published) elif date_created: lifecycle_author.add_value("date", date_created) - if "affiliation" in creator_item: - affiliation_item = creator_item.get("affiliation") - # affiliation.type is always "Organization" according to - # https://dini-ag-kim.github.io/amb/draft/schemas/affiliation.json - if "name" in affiliation_item: - affiliation_name = affiliation_item.get("name") - lifecycle_author.add_value("organization", affiliation_name) - organization_fallback.add(affiliation_name) - if "id" in affiliation_item: - # according to the AMB spec, the affiliation.id should always be a reference to - # GND, Wikidata or ROR - self.lifecycle_determine_type_of_identifier_and_save_uri( - affiliation_item, lifecycle_item_loader=lifecycle_author - ) if creator_item.get("type") == "Person": lifecycle_author.add_value("role", "author") author_name: str = creator_item.get("name") @@ -417,22 +404,6 @@ def get_lifecycle_author( ) lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) elif creator_item.get("type") == "Organization": - # ToDo: find a solution for edge-case where "creator" is an organization which itself is affiliated - # to an organization, e.g.: - # [ - # { - # "affiliation": { - # "name": "RWTH Aachen", - # "id": "https://ror.org/04xfq0f34", - # "type": "Organization" - # }, - # "name": "OMB+-Konsortium", - # "type": "Organization" - # } - # ], - # the vCard standard 4.0 provides a "RELATED"-property which could be suitable for this edge-case, - # but both edu-sharing and the currently used "vobject"-package only support vCard standard v3.0 - # see: https://www.rfc-editor.org/rfc/rfc6350.html#section-6.6.6 creator_organization_name = creator_item.get("name") lifecycle_author.add_value("role", "author") lifecycle_author.add_value("organization", creator_organization_name) @@ -440,8 +411,66 @@ def get_lifecycle_author( item_dictionary=creator_item, lifecycle_item_loader=lifecycle_author ) lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) + if "affiliation" in creator_item: + affiliation_item = creator_item.get("affiliation") + self.get_affiliation_and_save_to_lifecycle( + affiliation_dict=affiliation_item, + lom_base_item_loader=lom_base_item_loader, + organization_fallback=organization_fallback, + lifecycle_role="author", + ) return authors + def get_affiliation_and_save_to_lifecycle( + self, + affiliation_dict: dict, + lom_base_item_loader: LomBaseItemloader, + organization_fallback: set[str], + lifecycle_role: str, + ): + """ + Retrieves metadata from OERSI's "affiliation"-field (which is typically found within a "creator"- or + "contributor"-item) and tries to save it within a new LOM Lifecycle Item. + + See: https://dini-ag-kim.github.io/amb/draft/#affiliation + """ + # affiliation.type is always "Organization" according to + # see: https://dini-ag-kim.github.io/amb/draft/schemas/affiliation.json // example dict: + # [ + # { + # "affiliation": { + # "name": "RWTH Aachen", + # "id": "https://ror.org/04xfq0f34", + # "type": "Organization" + # }, + # "name": "OMB+-Konsortium", + # "type": "Organization" + # } + # ], + # the vCard standard 4.0 provides a "RELATED"-property which could be suitable for this edge-case, + # but both edu-sharing and the currently used "vobject"-package only support vCard standard v3.0 + # (for future reference: + # vCard v3: https://datatracker.ietf.org/doc/html/rfc2426 + # vCard v4: https://www.rfc-editor.org/rfc/rfc6350.html#section-6.6.6 ) + if "name" in affiliation_dict: + affiliation_name = affiliation_dict.get("name") + lifecycle_affiliated_org = LomLifecycleItemloader() + if affiliation_name: + if affiliation_name not in organization_fallback: + # checking to make sure we don't add the same organization several times to the same role + # (e.g. 5 different authors could be affiliated to the same university, but we most definitely don't + # want to have the organization entry 5 times) + lifecycle_affiliated_org.add_value("role", lifecycle_role) + lifecycle_affiliated_org.add_value("organization", affiliation_name) + organization_fallback.add(affiliation_name) + if "id" in affiliation_dict: + # according to the AMB spec, the affiliation.id is OPTIONAL, but should always be a + # reference to GND, Wikidata or ROR + self.lifecycle_determine_type_of_identifier_and_save_uri( + affiliation_dict, lifecycle_item_loader=lifecycle_affiliated_org + ) + lom_base_item_loader.add_value("lifecycle", lifecycle_affiliated_org.load_item()) + @staticmethod def validate_academic_title_string(honorific_prefix: str) -> str: """ @@ -518,16 +547,12 @@ def get_lifecycle_contributor( # if the dictionary exists, it might contain the following fields: # - id (= URL to GND / ROR / Wikidata) # - name (= string containing the name of the affiliated organization) - if affiliation_dict: - if "id" in affiliation_dict: - self.lifecycle_determine_type_of_identifier_and_save_uri( - item_dictionary=affiliation_dict, lifecycle_item_loader=lifecycle_contributor - ) - if "name" in affiliation_dict: - affiliation_name: str = affiliation_dict["name"] - if affiliation_name: - lifecycle_contributor.add_value("organization", affiliation_name) - organization_fallback.add(affiliation_name) + self.get_affiliation_and_save_to_lifecycle( + affiliation_dict=affiliation_dict, + lom_base_item_loader=lom_base_item_loader, + organization_fallback=organization_fallback, + lifecycle_role="unknown", + ) lom_base_item_loader.add_value("lifecycle", lifecycle_contributor.load_item()) @staticmethod @@ -791,6 +816,10 @@ def parse(self, response: scrapy.http.Response, **kwargs): lom.add_value("technical", technical.load_item()) organizations_from_affiliation_fields: set[str] = set() + # this (temporary) set of strings is used to make a decision for OERSI's "sourceOrganization" field: + # we only store metadata about organizations from this field if an organization didn't appear previously in + # an "affiliation" field of a "creator" or "contributor". If we didn't do this check, we would have duplicate + # entries for organizations in our lifecycle items. authors = self.get_lifecycle_author( lom_base_item_loader=lom, @@ -1000,8 +1029,9 @@ def parse(self, response: scrapy.http.Response, **kwargs): license_url_mapped = license_mapper.get_license_url(license_string=license_url) if license_url_mapped: license_loader.add_value("url", license_url_mapped) - if authors: - license_loader.add_value("author", authors) + # if authors: + # # ToDo: confirm if this workaround is still necessary/desired for future crawler versions + # license_loader.add_value("author", authors) # noinspection DuplicatedCode base.add_value("license", license_loader.load_item()) From 14e9bb5a304692fa611555ba2db4156db82da435 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 May 2023 11:56:39 +0200 Subject: [PATCH 295/590] oersi_spider v0.1.4 (squashed) - change, refactor: 'get_item_url'-method -- reverted changes made in v0.1.2 in regard to item URLs and made it more consistent / obvious where to-be-parsed URLs are extracted from (and in which priority) - fix: 'init' -- necessary for spider arguments like 'resetVersion=true' - docs: DocStrings and code annotations Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 35 ++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 87c40d3e..40c76a13 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -38,7 +38,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.3" # last update: 2023-04-28 + version = "0.1.4" # last update: 2023-05-02 allowed_domains = "oersi.org" custom_settings = { "CONCURRENT_REQUESTS": 48, @@ -118,7 +118,7 @@ class OersiSpider(scrapy.Spider, LomBase): } def __init__(self, **kwargs): - super().__init__(**kwargs) + LomBase.__init__(self, **kwargs) # Fetching a "point in time"-id for the subsequent ElasticSearch queries self.ELASTIC_PIT_ID = self.elastic_pit_get_id(self.elastic_pit_create()) # querying the ElasticSearch API for metadata-sets of specific providers, this allows us to control which @@ -158,9 +158,8 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req If the item already exists, it will be updated (if its hash has changed). Otherwise, creates a new item in the edu-sharing repository. """ - item_url: str = elastic_item["_source"]["id"] + item_url: str = self.get_item_url(elastic_item) if item_url: - # ToDo: findItem needs to happen here -> replicationsourceuuid if self.shouldImport(response=None) is False: logging.debug( "Skipping entry {} because shouldImport() returned false".format( @@ -322,13 +321,33 @@ def getHash(self, response=None, elastic_item_source: dict = dict) -> str: @staticmethod def get_uuid(elastic_item: dict): """ - Builds a UUID from the to-be-parsed target URL and returns it. + Builds a UUID string from the to-be-parsed target URL and returns it. """ # The "getUUID"-method of LomBase couldn't be cleanly overridden because at the point of time when we do this - # check, there is no response available yet. - item_url: str = elastic_item["_source"]["id"] + # check, there is no "Response"-object available yet. + item_url = OersiSpider.get_item_url(elastic_item=elastic_item) return EduSharing.buildUUID(item_url) + @staticmethod + def get_item_url(elastic_item) -> str: + """ + Tries to gather the to-be-parsed URL from OERSI's 'MainEntityOfPage'-field and if that field is not available, + falls back to the '_source.id'-field. Returns an URL-string. + """ + main_entity_of_page: list[dict] = elastic_item["_source"]["mainEntityOfPage"] + if main_entity_of_page: + item_url: str = main_entity_of_page[0]["id"] + # "id" is a REQUIRED sub-field of MainEntityOfPage and will always contain more stable URLs than + # '_source.id' + return item_url + else: + item_url: str = elastic_item["_source"]["id"] + logging.debug( + f"get_uuid fallback activated: The field 'MainEntityOfPage.id' for '{elastic_item['_id']}' was not " + f"available. Using fallback value '_source.id': {item_url} instead." + ) + return item_url + def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: elastic_item = elastic_item if self.forceUpdate: @@ -808,7 +827,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): general.replace_value("identifier", identifier_url) technical.add_value("location", identifier_url) if identifier_url != response.url: - # the identifier_url should be more stable/robust than the (resolved) response.url in the long term, + # the identifier_url should be more stable/robust than the (resolved) response.url in the long run, # so we will save both URLs in case the resolved URL is different technical.add_value("location", response.url) elif not identifier_url: From 57d43ae48153ebd58754f6ac64210079d6d5e89a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 May 2023 16:03:20 +0200 Subject: [PATCH 296/590] oersi_spider v0.1.5 (squashed) feat: control individual crawls via optional ".env"-variable "OERSI_METADATA_PROVIDER" -- example: by setting OERSI_METADATA_PROVIDER="KI Campus" you can specify which metadata provider should be crawled (this setting takes precedence over the ELASTIC_PROVIDERS_TO_CRAWL list) --- if you want to crawl several providers, controlled via your .env file, you should separate them by semicolons: ---- example: OERSI_METADATA_PROVIDER="eGov-Campus;KI Campus;vhb" -- the string-values should be 1:1 exact matches because they will be used for the corresponding ElasticSearch query parameter - fix: 'sourceOrganization'-fallback -- items from "MIT OpenCourseWare" had an additional "lifecycle"-entry (role: "unknown") even though the organization was already part of the "creator"-list Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 40c76a13..7dfb576a 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -32,19 +32,19 @@ class OersiSpider(scrapy.Spider, LomBase): Crawls OERSI.org for metadata from different OER providers. You can control which metadata provider should be crawled by commenting/uncommenting their name within the - ELASTIC_PROVIDERS_TO_CRAWL list. + ELASTIC_PROVIDERS_TO_CRAWL list. Alternatively, you can set the optional '.env'-variable 'OERSI_METADATA_PROVIDER' + to control which provider should be crawled individually. """ name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.4" # last update: 2023-05-02 + version = "0.1.5" # last update: 2023-05-05 allowed_domains = "oersi.org" custom_settings = { - "CONCURRENT_REQUESTS": 48, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 6, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, "WEB_TOOLS": WebEngine.Playwright, } @@ -74,7 +74,7 @@ class OersiSpider(scrapy.Spider, LomBase): "HOOU", "iMoox", "KI Campus", - "langSci Press", # new provider as of 2023-04-27 + # "langSci Press", # new provider as of 2023-04-27 - disappeared on 2023-05-04 "MIT OpenCourseWare", "OEPMS", # new provider as of 2023-04-27 "OER Portal Uni Graz", @@ -135,6 +135,8 @@ def start_requests(self): continue_from_previous_crawl = env.get_bool("CONTINUE_CRAWL", True, False) # checking if a previously aborted crawl should be completed (by skipping updates of previously collected items) if continue_from_previous_crawl: + # ToDo: for time-stable results this feature needs to be reworked: uuids need to be used to keep consistent + # results across longer crawling processes es_id_collector = EduSharingPreCheck() previously_crawled_replication_source_ids: list[str] = es_id_collector.get_replication_source_id_list() for elastic_item in self.ELASTIC_ITEMS_ALL: @@ -248,6 +250,21 @@ def elastic_fetch_all_provider_pages(self): https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#search-after """ all_items: list = list() + # the OERSI_METADATA_PROVIDER '.env'-variable controls which metadata-provider should be crawled: + # e.g. set: OERSI_METADATA_PROVIDER="eGov-Campus" within your .env file if you only want to crawl items from + # 'eGov-Campus'. Since this string is used within ElasticSearch queries as a parameter, it needs to be + # 1:1 identical to the metadata-provider string values on OERSI.org. + provider_target_from_env: str = env.get(key="OERSI_METADATA_PROVIDER", allow_null=True, default=None) + if provider_target_from_env: + logging.info(f"Recognized OERSI_METADATA_PROVIDER .env setting. Value: {provider_target_from_env}") + self.ELASTIC_PROVIDERS_TO_CRAWL = [provider_target_from_env] + if ";" in provider_target_from_env: + provider_list: list[str] = provider_target_from_env.split(";") + logging.info( + f"Recognized multiple providers within OERSI_METADATA_PROVIDER .env setting:" f"{provider_list}" + ) + self.ELASTIC_PROVIDERS_TO_CRAWL = provider_list + has_next_page = True for provider_name in self.ELASTIC_PROVIDERS_TO_CRAWL: pagination_parameter = None @@ -424,6 +441,7 @@ def get_lifecycle_author( lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) elif creator_item.get("type") == "Organization": creator_organization_name = creator_item.get("name") + organization_fallback.add(creator_organization_name) lifecycle_author.add_value("role", "author") lifecycle_author.add_value("organization", creator_organization_name) self.lifecycle_determine_type_of_identifier_and_save_uri( @@ -551,6 +569,7 @@ def get_lifecycle_contributor( ) elif contributor_item.get("type") == "Organization": lifecycle_contributor.add_value("organization", contributor_name) + organization_fallback.add(contributor_name) if "id" in contributor_item: # id points to a URI reference of ORCID, GND, WikiData or ROR # (while this isn't necessary for OMA items yet (as they have no 'id'-field), it will be necessary From 4847660a54d45615b5a09c60e17b89d84d0e6ca0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 11 May 2023 21:41:09 +0200 Subject: [PATCH 297/590] change: tune AUTOTHROTTLE settings - increase "Autothrottle target concurrency"-setting, but reduce amount of requests per domain down to 4 (Scrapy default: 8) -- this change should spread Requests across several metadata-providers, but lessen the load on the individual target servers - code cleanup (remove no longer desired "license.author"-implementation, which was commented-out anyway since v<0.1.5) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 7dfb576a..dfdfb5b2 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -44,7 +44,8 @@ class OersiSpider(scrapy.Spider, LomBase): custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 20, + "CONCURRENT_REQUESTS_PER_DOMAIN": 4, "WEB_TOOLS": WebEngine.Playwright, } @@ -402,7 +403,7 @@ def get_lifecycle_author( :param organization_fallback: a temporary set of strings containing all affiliation 'name'-values :param date_created: OERSI 'dateCreated' value (if available) :param date_published: OERSI 'datePublished' value (if available) - :returns: list[str] - list of authors (names) for later usage in the LicenseItemLoader + :returns: list[str] - list of authors (author names will be used for "contributor"-duplicate-mitigation) """ authors: list[str] = list() if "creator" in elastic_item_source: @@ -1067,9 +1068,6 @@ def parse(self, response: scrapy.http.Response, **kwargs): license_url_mapped = license_mapper.get_license_url(license_string=license_url) if license_url_mapped: license_loader.add_value("url", license_url_mapped) - # if authors: - # # ToDo: confirm if this workaround is still necessary/desired for future crawler versions - # license_loader.add_value("author", authors) # noinspection DuplicatedCode base.add_value("license", license_loader.load_item()) From 4b86532fe265710af9e7684aeb82bf5e1875864b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 12 May 2023 15:17:25 +0200 Subject: [PATCH 298/590] version bump to v0.1.1 to force metadata refresh - (since no version bump was made in February 2023) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/br_rss_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/br_rss_spider.py b/converter/spiders/br_rss_spider.py index 1b81babf..5de4aa19 100644 --- a/converter/spiders/br_rss_spider.py +++ b/converter/spiders/br_rss_spider.py @@ -6,7 +6,7 @@ class BRRSSSpider(RSSListBase): name = "br_rss_spider" friendlyName = "Bayerischer Rundfunk" url = "https://www.br.de/" - version = "0.1.0" + version = "0.1.1" def __init__(self, **kwargs): RSSListBase.__init__(self, "../csv/br_rss.csv", **kwargs) From d7da3feb5f01c759d0726b25934e1df6268d2e0e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 12 May 2023 20:28:06 +0200 Subject: [PATCH 299/590] serlo_spider v0.2.7 - fix: 'general.title' Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index c703a489..43e657f8 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -26,7 +26,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.6" # last update: 2023-03-14 + version = "0.2.7" # last update: 2023-05-12 custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright @@ -157,18 +157,22 @@ def parse(self, response, **kwargs): # # - structure optional # # - aggregationLevel optional general.add_value("identifier", graphql_json["id"]) - title_1st_try: str = graphql_json["headline"] + title_1st_try: str = graphql_json["name"] title_fallback: str = str() # not all materials carry a title in the GraphQL API, therefore we're trying to grab a valid title from # different sources (GraphQL > (DOM) json_ld > (DOM) header > (DOM) last breadcrumb label) if title_1st_try: general.add_value("title", title_1st_try) elif not title_1st_try: - title_2nd_try = json_ld["name"] + title_2nd_try = graphql_json["headline"] + title_3rd_try = json_ld["name"] if title_2nd_try: general.add_value("title", title_2nd_try) title_fallback = title_2nd_try - if not title_1st_try and not title_2nd_try: + elif title_3rd_try: + general.add_value("title", title_3rd_try) + title_fallback = title_3rd_try + if not title_1st_try and not title_2nd_try and not title_3rd_try: title_from_header = response.xpath('//meta[@property="og:title"]/@content').get() if title_from_header: general.add_value("title", title_from_header) From c6590e66349af3f7684e6d2a3c751ea7d99547ab Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 31 May 2023 14:25:57 +0200 Subject: [PATCH 300/590] add: License "CC_BY_NC_ND 2.5" recognition - update: LicenseMapper and Tests for CC-BY-NC-ND 2.5 values - fix: CC-BY-NC 2.0 typo in es_connector - this change was necessary because OERSI's "TIB AV-Portal"-provider used a specific url format for some of its materials which wasn't covered before Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/constants.py | 2 ++ converter/es_connector.py | 5 ++++- converter/util/license_mapper.py | 16 +++++++--------- converter/util/test_license_mapper.py | 1 + 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index 38235b0a..ac29156f 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -9,6 +9,7 @@ class Constants: LICENSE_CC_BY_NC_30: Final[str] = "https://creativecommons.org/licenses/by-nc/3.0/" LICENSE_CC_BY_NC_40: Final[str] = "https://creativecommons.org/licenses/by-nc/4.0/" LICENSE_CC_BY_NC_ND_20: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/2.0/" + LICENSE_CC_BY_NC_ND_25: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/2.5/" LICENSE_CC_BY_NC_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/3.0/" LICENSE_CC_BY_NC_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/4.0/" LICENSE_CC_BY_NC_SA_20: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/2.0/" @@ -33,6 +34,7 @@ class Constants: LICENSE_CC_BY_NC_30, LICENSE_CC_BY_NC_40, LICENSE_CC_BY_NC_ND_20, + LICENSE_CC_BY_NC_ND_25, LICENSE_CC_BY_NC_ND_30, LICENSE_CC_BY_NC_ND_40, LICENSE_CC_BY_NC_SA_20, diff --git a/converter/es_connector.py b/converter/es_connector.py index d69b268a..03367de1 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -242,7 +242,10 @@ def mapLicense(self, spaces, license): spaces["ccm:commonlicense_cc_version"] = "4.0" case Constants.LICENSE_CC_BY_NC_ND_20: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" - spaces["ccm:commonlicense_cc_version"] = "20" + spaces["ccm:commonlicense_cc_version"] = "2.0" + case Constants.LICENSE_CC_BY_NC_ND_25: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" + spaces["ccm:commonlicense_cc_version"] = "2.5" case Constants.LICENSE_CC_BY_NC_ND_30: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" spaces["ccm:commonlicense_cc_version"] = "3.0" diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index 04cfaa35..354e7a00 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -156,8 +156,6 @@ def identify_cc_license(self, license_string: str) -> str | None: if regex_deed_hit: deed_hit = regex_deed_hit.group() license_url_candidate = license_url_candidate[: -len(deed_hit)] - # ToDo: while it (thankfully) hasn't happened yet, we have to assume that URLs ending in "/fr/" or "/es" - # could be problematic as well. Therefore: refactor the if-checks for "/de/" and "/de" asap url_ending_in_two_char_language_code_regex = re.compile(r"/([a-z]{2}/?)$") # RegEx pattern for handling URLs that end in "/de", "/de/", "/fr", "/es/" etc. two_char_language_code_hit = url_ending_in_two_char_language_code_regex.search(license_url_candidate) @@ -169,7 +167,7 @@ def identify_cc_license(self, license_string: str) -> str | None: if url_language_code_trail: # the url_language_code_trail will typically look like "/de/" or "/de", but we only want to cut off # the 2-char language code and its trailing slash, but keep the first slash intact - license_url_candidate = license_url_candidate[: -len(url_language_code_trail) +1] + license_url_candidate = license_url_candidate[: -len(url_language_code_trail) + 1] for valid_license_url in Constants.VALID_LICENSE_URLS: if license_url_candidate in valid_license_url: return valid_license_url @@ -213,9 +211,9 @@ def identify_cc_license(self, license_string: str) -> str | None: if __name__ == "__main__": test_mapper = LicenseMapper() # test-cases for debugging purposes - print(test_mapper.get_license_internal_key("CC BY-NC-ND")) - print(test_mapper.get_license_internal_key("zufälliger CC BY lizenzierter Freitext-String")) - print(test_mapper.get_license_url("a random CC-BY 4.0 string")) - print(test_mapper.get_license_url("https://creativecommons.org/licenses/by-nc/3.0/de/")) - print(test_mapper.identify_cc_license("https://creativecommons.org/licenses/by-nc/3.0/deed.de")) - pass + # print(test_mapper.get_license_internal_key("CC BY-NC-ND")) + # print(test_mapper.get_license_internal_key("zufälliger CC BY lizenzierter Freitext-String")) + # print(test_mapper.get_license_url("a random CC-BY 4.0 string")) + # print(test_mapper.get_license_url("https://creativecommons.org/licenses/by-nc/3.0/de/")) + # print(test_mapper.identify_cc_license("https://creativecommons.org/licenses/by-nc/3.0/deed.de")) + print(test_mapper.identify_cc_license("http://creativecommons.org/licenses/by-nc-nd/2.5/ch/deed.en")) diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 7de1084c..87adc333 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -40,6 +40,7 @@ class TestLicenseMapper: ("http://creativecommons.org/licenses/by/3.0/de", Constants.LICENSE_CC_BY_30), ("https://creativecommons.org/licenses/by/3.0/es/", Constants.LICENSE_CC_BY_30), ("https://creativecommons.org/licenses/by/3.0/fr", Constants.LICENSE_CC_BY_30), + ("http://creativecommons.org/licenses/by-nc-nd/2.5/ch/deed.en", Constants.LICENSE_CC_BY_NC_ND_25), ], ) def test_get_license_url(self, test_input, expected_result): From 2a623de48cb2edf9aa45e246177687a4e7bca9ab Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 27 Jun 2023 13:46:02 +0200 Subject: [PATCH 301/590] fix: multiple "LomGeneralItem.language" values - a small subset of OERSI Items provide multiple language values as a list (e.g. ["zh", "de", "en", "fr"]), but the items.py only consumed the first value and ignored the rest of the collected language codes -- in the aforementioned example this would lead to an item being classified as "Chinese" in 'cclom:general_language', even though it should carry 4 languages - (Thank you, Frank, for quickly showing/reporting this unexpected behaviour to me!) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/items.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/items.py b/converter/items.py index 0fcdb1c3..e688a3f5 100644 --- a/converter/items.py +++ b/converter/items.py @@ -50,7 +50,7 @@ class LomGeneralItem(Item): """Corresponding edu-sharing property: 'cclom:general_identifier' """ keyword = Field(output_processor=JoinMultivalues()) """Corresponding edu-sharing property: 'cclom:general_keyword'""" - language = Field() + language = Field(output_processor=JoinMultivalues()) """Corresponding edu-sharing property: 'cclom:general_language'""" structure = Field() # ToDo: 'structure' is currently not used; no equivalent edu-sharing property From 317cd7c7983c0b028699894d3dad1048776c3a03 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 11 Jul 2023 19:10:03 +0200 Subject: [PATCH 302/590] serlo_spider v0.2.8 (Serlo GraphQL API v1.0.0) Program flow changes: - Serlo's Metadata API v1.0.0 (see: https://github.com/serlo/documentation/wiki/Metadata-API#changelog-100 ) brought some breaking changes and new metadata fields - change: GraphQL Requests now query for "resources" instead of "entries" - feat: OPTIONAL - crawl only those items that were modified (by Serlo) after a specific date by setting "SERLO_MODIFIED_AFTER" in your .env file -- e.g. set SERLO_MODIFIED_AFTER="2023-07-01" to receive only those Serlo items which have been modified (by Serlo) after that date - feat: skip "Papierkorb"-Items if Robot Meta Tags are detected (enabled by default) -- Learning objects on Serlo.org that are marked for deletion should typically not be indexed by crawlers --- Serlo uses the Google Robot Meta Tags Specification for this purpose, therefore our crawler tries to respect this behaviour - code cleanup: code formatting Metadata improvements: - feat: lifecycle author list, extracted from Serlo's new "creator"-property -- all authors carry an affiliation with Serlo, which is saved as a separate lifecycle publisher item - feat: lifecycle metadata provider - change: "learningResourceType" is using the AMB spec (openeduhub LRT vocabulary) - change: use LicenseMapper utility for license URLs - change: thumbnails are using the OpenGraph thumbnail image first, fallback to screenshots only if necessary Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 281 +++++++++++++++++++++++++----- 1 file changed, 235 insertions(+), 46 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 43e657f8..38ca851d 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -1,9 +1,12 @@ import datetime import json +import logging +import dateparser import requests import scrapy +import env from converter.constants import Constants from converter.items import ( BaseItemLoader, @@ -18,6 +21,7 @@ ) from converter.spiders.base_classes import LomBase from converter.web_tools import WebEngine, WebTools +from ..util.license_mapper import LicenseMapper class SerloSpider(scrapy.Spider, LomBase): @@ -26,11 +30,12 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.7" # last update: 2023-05-12 + version = "0.2.8" # last update: 2023-07-11 custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright } + GRAPHQL_MODIFIED_AFTER_PARAMETER: str = "" graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: @@ -52,32 +57,80 @@ class SerloSpider(scrapy.Spider, LomBase): def __init__(self, *a, **kw): super().__init__(*a, **kw) + self.decide_crawl_mode() self.graphql_items = self.fetch_all_graphql_pages() + def decide_crawl_mode(self): + """ + Check the '.env'-file for a 'SERLO_MODIFIED_AFTER'-variable and set the GraphQL API parameter 'modifiedAfter' + accordingly. + + * Default behaviour: The Serlo GraphQL API is crawled COMPLETELY. (The 'modifiedAfter'-parameter will be + omitted in this case.) + * Optional behaviour: If the 'SERLO_MODIFIED_AFTER'-variable is set in your .env file (e.g. "2023-07-01"), + Serlo's GraphQL API shall be queried ONLY for items that have been modified (by Serlo) since that date. + + You can use this '.env'-setting to crawl Serlo more efficiently: Specify a date and only receive items that were + modified since . + """ + graphql_modified_after_param: str = env.get(key="SERLO_MODIFIED_AFTER", allow_null=True, default=None) + if graphql_modified_after_param: + logging.info( + f"INIT: '.env'-Setting 'SERLO_MODIFIED_AFTER': '{graphql_modified_after_param}' detected. " + f"Trying to parse the date string..." + ) + # the 'modifiedAfter'-parameter must be an ISO-formatted string WITH timezone information, e.g.: + # "2023-07-01T00:00:00+00:00". To make future crawler maintenance a bit easier, we use scrapy's dateparser + # module, so you can control crawls by setting the '.env'-parameter: + # "SERLO_MODIFIED_AFTER"-Parameter "2023-07-01" and it will convert the string accordingly + date_parsed = dateparser.parse( + date_string=graphql_modified_after_param, + settings={"TIMEZONE": "Europe/Berlin", "RETURN_AS_TIMEZONE_AWARE": True}, + ) + if date_parsed: + date_parsed_iso = date_parsed.isoformat() + logging.info( + f"INIT: SUCCESS - serlo_spider will ONLY request GraphQL items that were modified (by Serlo) after " + f"'{date_parsed_iso}' ." + ) + self.GRAPHQL_MODIFIED_AFTER_PARAMETER = date_parsed_iso + else: + logging.info("INIT: Starting COMPLETE Serlo crawl (WITHOUT any GraphQL API 'modifiedAfter'-parameter).") + def fetch_all_graphql_pages(self): - all_entities = list() + all_resources = list() pagination_string: str = "" has_next_page = True while has_next_page is True: - current_page = self.query_graphql_page(pagination_string=pagination_string)["data"]["metadata"]["entities"] - all_entities += current_page["nodes"] + current_page = self.query_graphql_page(pagination_string=pagination_string)["data"]["metadata"]["resources"] + all_resources += current_page["nodes"] has_next_page = current_page["pageInfo"]["hasNextPage"] if has_next_page: pagination_string = current_page["pageInfo"]["endCursor"] else: break - return all_entities + return all_resources def query_graphql_page(self, amount_of_nodes: int = 500, pagination_string: str = None) -> dict: amount_of_nodes = amount_of_nodes # specifies the amount of nodes that shall be requested (per page) from the GraphQL API # (default: 100 // max: 500) pagination_string = pagination_string + modified_after: str = "" + if self.GRAPHQL_MODIFIED_AFTER_PARAMETER: + # the 'modifiedAfter'-parameter can be used to only crawl items that have been modified since the last time + # the crawler ran. + # see: https://github.com/serlo/documentation/wiki/Metadata-API#tips-for-api-consumer + modified_after: str = self.GRAPHQL_MODIFIED_AFTER_PARAMETER + if modified_after: + # we only add the (optional) 'modifiedAfter'-parameter if the .env-Setting was recognized. By default, + # the string will stay empty. + modified_after: str = f', modifiedAfter: "{modified_after}"' graphql_metadata_query_body = { "query": f""" query {{ metadata {{ - entities(first: {amount_of_nodes}, after: "{pagination_string}"){{ + resources(first: {amount_of_nodes}, after: "{pagination_string}"{modified_after}){{ nodes pageInfo {{ hasNextPage @@ -134,17 +187,32 @@ def parse(self, response, **kwargs): html_body = playwright_dict.get("html") screenshot_bytes = playwright_dict.get("screenshot_bytes") html_text = playwright_dict.get("text") + selector_playwright: scrapy.Selector = scrapy.Selector(text=html_body) + + robot_meta_tags: list[str] = selector_playwright.xpath("//meta[@name='robots']/@content").getall() + if robot_meta_tags: + # Serlo makes use of the Google's Robot Meta Tag Specification + # (see: https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag) + # Serlo Items that are marked for deletion ("Papierkorb"-Items) carry Robot Meta Tags in the HTML Header, + # therefore we need to respect these tags and skip the items! + if "noindex" in robot_meta_tags or "none" in robot_meta_tags: + logging.info( + f"Robot Meta Tag {robot_meta_tags} identified. Robot Meta Tags 'noindex' or 'none' should " + f"be skipped by the crawler. Dropping item {response.url} ." + ) + return None base = BaseItemLoader() - # # ALL possible keys for the different Item and ItemLoader-classes can be found inside converter/items.py - # # TODO: fill "base"-keys with values for - # # - thumbnail recommended - base.add_value("screenshot_bytes", screenshot_bytes) + og_image: str = selector_playwright.xpath('//meta[@property="og:image"]/@content').get() + if og_image: + # if an OpenGraph image property is available, we'll use that as our thumbnail URL, e.g.: + # + base.add_value("thumbnail", og_image) + else: + base.add_value("screenshot_bytes", screenshot_bytes) base.add_value("sourceId", self.getId(response, graphql_json=graphql_json)) base.add_value("hash", self.getHash(response, graphql_json=graphql_json)) base.add_value("lastModified", graphql_json["dateModified"]) - # thumbnail_url: str = "This string should hold the thumbnail URL" - # base.add_value('thumbnail', thumbnail_url) if "publisher" in json_ld: base.add_value("publisher", json_ld["publisher"]) @@ -192,12 +260,16 @@ def parse(self, response, **kwargs): title_breadcrumb_last_label: str = breadcrumbs[-1]["label"] if title_breadcrumb_last_label: general.replace_value("title", title_breadcrumb_last_label) - # not all GraphQL entries have a description either, therefore we try to grab that from different sources + # Not all GraphQL items have a description, but we need one (otherwise the item would get dropped since Serlo + # provides no keywords either). That's why we try to grab the description from three different sources: # (GraphQL > JSON-LD > DOM header) description_1st_try = str() description_2nd_try = str() if "description" in graphql_json: description_1st_try: str = graphql_json["description"] + # as of Serlo's Metadata API v1.0.0: + # - the "description"-property is only available where a description exists + # see: https://github.com/serlo/documentation/wiki/Metadata-API#changes-to-entity-descriptions if description_1st_try: general.add_value("description", description_1st_try) if not description_1st_try and "description" in json_ld: @@ -210,10 +282,9 @@ def parse(self, response, **kwargs): if description_from_header: general.add_value("description", description_from_header) in_language: list = graphql_json["inLanguage"] + # Serlo provides a list of 2-char-language-codes within its "inLanguage"-property general.add_value("language", in_language) - # ToDo: keywords would be extremely useful, but aren't supplied by neither the API / JSON_LD nor the header - # # once we've added all available values to the necessary keys in our LomGeneralItemLoader, - # # we call the load_item()-method to return a (now filled) LomGeneralItem to the LomBaseItemLoader + # ToDo: keywords would be extremely useful, but aren't supplied by neither the API, JSON_LD nor the HTML header lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() @@ -224,27 +295,22 @@ def parse(self, response, **kwargs): # # - otherPlatformRequirements optional # # - duration optional (only applies to audiovisual content like videos/podcasts) technical.add_value("format", "text/html") # e.g. if the learning object is a web-page - technical.add_value("location", graphql_json["id"]) # we could also use response.url here + if "id" in graphql_json: + graphql_id: str = graphql_json["id"] # e.g.: "https://serlo.org/1495" + technical.add_value("location", graphql_id) + else: + # This case should never occur. The resolved URLs will always be longer and less stable than the shortened + # URI vom the GraphQL 'id'-property above. + technical.add_value("location", response.url) lom.add_value("technical", technical.load_item()) - lifecycle = LomLifecycleItemloader() - # # TODO: fill "lifecycle"-keys with values for - # # - role recommended - # # - firstName recommended - # # - lastName recommended - # # - uuid optional - if "publisher" in json_ld: - lifecycle.add_value("organization", "Serlo Education e. V.") - lifecycle.add_value("role", "publisher") # supported roles: "author" / "editor" / "publisher" - # for available roles mapping, please take a look at converter/es_connector.py - lifecycle.add_value("url", json_ld["publisher"]) - lifecycle.add_value("email", "de@serlo.org") - for language_item in in_language: - if language_item == "en": - lifecycle.replace_value("email", "en@serlo.org") - lifecycle.add_value("date", graphql_json["dateCreated"]) - lom.add_value("lifecycle", lifecycle.load_item()) + self.get_lifecycle_authors(graphql_json=graphql_json, lom_base_item_loader=lom) + # Serlo's new "maintainer"-property holds the identical information as the "creator.affiliaton"-property. + + self.get_lifecycle_metadata_providers(graphql_json=graphql_json, lom_base_item_loader=lom) + + self.get_lifecycle_publishers(graphql_json=graphql_json, lom_base_item_loader=lom) educational = LomEducationalItemLoader() # # TODO: fill "educational"-keys with values for @@ -297,17 +363,28 @@ def parse(self, response, **kwargs): vs.add_value("intendedEndUserRole", intended_end_user_roles) # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) + # ToDo: the graphql_json["about"] field might carry more precise information, but uses the DINI KIM Schulfaecher + # vocabulary. A mapper/resolver might be necessary. Example: + # { + # "about": [ + # { + # "type": "Concept", + # "id": "http://w3id.org/kim/schulfaecher/s1017", + # "inScheme": { + # "id": "http://w3id.org/kim/schulfaecher/" + # } + # } if "about" in json_ld and len(json_ld["about"]) != 0: # not every json_ld-container has an "about"-key, e.g.: https://de.serlo.org/5343/5343 # we need to make sure that we only try to access "about" if it's actually available # making sure that we only try to look for a discipline if the "about"-list actually has list items disciplines = list() - for list_item in json_ld["about"]: - if "de" in list_item["prefLabel"]: - discipline_de: str = list_item["prefLabel"]["de"] + for about_item in json_ld["about"]: + if "de" in about_item["prefLabel"]: + discipline_de: str = about_item["prefLabel"]["de"] disciplines.append(discipline_de) - elif "en" in list_item["prefLabel"]: - discipline_en: str = list_item["prefLabel"]["en"] + elif "en" in about_item["prefLabel"]: + discipline_en: str = about_item["prefLabel"]["en"] disciplines.append(discipline_en) if len(disciplines) > 0: vs.add_value("discipline", disciplines) @@ -334,18 +411,34 @@ def parse(self, response, **kwargs): # only set the price to "kostenpflichtig" if it's explicitly stated, otherwise we'll leave it empty vs.add_value("price", "yes") if graphql_json["learningResourceType"]: + # Serlo is using the learningResourceType vocabulary (as specified in the AMB standard), see: + # https://github.com/serlo/documentation/wiki/Metadata-API#changes-to-the-learningresourcetype-property # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) - vs.add_value("learningResourceType", graphql_json["learningResourceType"]) + learning_resource_types: list[dict] = graphql_json["learningResourceType"] + for lrt_item in learning_resource_types: + if "id" in lrt_item: + learning_resource_type_url: str = lrt_item["id"] + if "/openeduhub/vocabs/learningResourceType/" in learning_resource_type_url: + lrt_key: str = learning_resource_type_url.split("/")[-1] + if lrt_key: + vs.add_value("learningResourceType", lrt_key) + else: + logging.debug( + f"Serlo 'learningResourceType' {learning_resource_type_url} was not recognized " + f"as part of the OpenEduHub 'learningResourceType' vocabulary. Please check the " + f"crawler or the vocab at oeh-metadata-vocabs/learningResourceType.ttl" + ) base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() - # # TODO: fill "license"-keys with values for - # # - author recommended - # # - expirationDate optional (for content that expires, e.g. ÖR-Mediatheken) - license_url = graphql_json["license"]["id"] - if license_url: - lic.add_value("url", license_url) + if "license" in graphql_json: + license_url: str = graphql_json["license"]["id"] + if license_url: + license_mapper = LicenseMapper() + license_url_mapped = license_mapper.get_license_url(license_string=license_url) + if license_url_mapped: + lic.add_value("url", license_url_mapped) base.add_value("license", lic.load_item()) permissions = super().getPermissions(response) @@ -360,3 +453,99 @@ def parse(self, response, **kwargs): base.add_value("response", response_loader.load_item()) yield base.load_item() + + @staticmethod + def get_lifecycle_authors(graphql_json: dict, lom_base_item_loader: LomBaseItemloader): + """Retrieve author metadata from GraphQL 'creator'-items and store it in the provided LomBaseItemLoader.""" + if "creator" in graphql_json: + creators: list[dict] = graphql_json["creator"] + for creator in creators: + # a typical "creator" item currently (2023-07-11) looks like this: + # { + # "type": "Person", + # "id": "https://serlo.org/49129", + # "name": "testaccount", + # "affiliation": { + # "id": "https://serlo.org/organization", + # "name": "Serlo Education e.V.", + # "type": "Organization" + # } + # While the "affiliation" needs to be handled within the lifecycle_publisher item, we can use the 'name' + # and 'id'-field for author information. (the 'id'-field leads to the user-profile on Serlo) + lifecycle_author = LomLifecycleItemloader() + lifecycle_author.add_value("role", "author") + if "name" in creator: + # the "name"-property will hold a Serlo username + lifecycle_author.add_value("firstName", creator["name"]) + if "id" in creator: + # the "id"-property will point towards a serlo profile + lifecycle_author.add_value("url", creator["id"]) + lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) + + @staticmethod + def get_lifecycle_metadata_providers(graphql_json, lom_base_item_loader): + """ + Retrieve metadata-provider metadata from GraphQL 'mainEntityOfPage'-items and store it in the provided + LomBaseItemLoader. + """ + if "mainEntityOfPage" in graphql_json: + maeop_list: list[dict] = graphql_json["mainEntityOfPage"] + for maeop_item in maeop_list: + # for future reference - a single 'mainEntityOfpage'-item might look like this: + # { + # "dateCreated": "2023-07-11T15:24:14.042782898+00:00", + # "dateModified": "2023-07-11T15:24:14.042782898+00:00", + # "id": "https://serlo.org/metadata", + # "provider": { + # "id": "https://serlo.org/organization", + # "name": "Serlo Education e.V.", + # "type": "Organization" + # } + # } + lifecycle_metadata_provider = LomLifecycleItemloader() + lifecycle_metadata_provider.add_value("role", "metadata_provider") + if "dateCreated" in maeop_item: + date_created: str = maeop_item["dateCreated"] + if date_created: + lifecycle_metadata_provider.add_value("date", date_created) + elif "dateModified" in maeop_item: + date_modified: str = maeop_item["dateModified"] + if date_modified: + lifecycle_metadata_provider.add_value("date", date_modified) + if "id" in maeop_item: + maeop_item_url: str = maeop_item["id"] + if maeop_item_url: + lifecycle_metadata_provider.add_value("url", maeop_item_url) + if "provider" in maeop_item: + provider_dict: dict = maeop_item["provider"] + if "id" in provider_dict: + provider_url: str = provider_dict["id"] + if provider_url: + lifecycle_metadata_provider.add_value("url", provider_url) + if "name" in provider_dict: + provider_name: str = provider_dict["name"] + lifecycle_metadata_provider.add_value("organization", provider_name) + lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) + + @staticmethod + def get_lifecycle_publishers(graphql_json, lom_base_item_loader): + """Retrieve publisher metadata from GraphQL 'publisher'-items and store it in the provided LomBaseItemLoader.""" + graphql_publishers: list[dict] = graphql_json["publisher"] + if graphql_publishers: + for publisher_dict in graphql_publishers: + lifecycle_publisher = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + if "name" in graphql_json["publisher"]: + publisher_name: str = publisher_dict["name"] + lifecycle_publisher.add_value("organization", publisher_name) + if "id" in graphql_json["publisher"]: + publisher_url: str = publisher_dict["id"] + lifecycle_publisher.add_value("url", publisher_url) + if "dateCreated" in graphql_json: + date_created: str = graphql_json["dateCreated"] + lifecycle_publisher.add_value("date", date_created) + elif "dateModified" in graphql_json: + date_modified: str = graphql_json["dateModified"] + if date_modified: + lifecycle_publisher.add_value("date", date_modified) + lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) From 0c3b6cd45d2ba5bb9d6b9768e3536b617c03948e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 11 Jul 2023 22:55:14 +0200 Subject: [PATCH 303/590] fix: import for .env settings Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 38ca851d..76ef31f0 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -6,7 +6,7 @@ import requests import scrapy -import env +from converter import env from converter.constants import Constants from converter.items import ( BaseItemLoader, From 2261ae965ea4052435bcc55215164670669893ea Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Jul 2023 01:33:40 +0200 Subject: [PATCH 304/590] sodix_spider v0.3.0 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - feat: mapping for all SODIX 'cost' values - feat: license mapping for "Schulfunk (§47)" - refactor: use LicenseMapper utility instead of crawler-specific solution - fix: weak warnings (function names should be lowercase) - style: code formatting via black - code cleanup Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/sodix_spider.py | 293 ++++++++++++++---------------- 1 file changed, 140 insertions(+), 153 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 4ea7b947..ffa98b59 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -1,4 +1,5 @@ import json +import logging from typing import Iterator import requests @@ -10,6 +11,7 @@ from .base_classes import LomBase from .. import env from ..items import LomLifecycleItemloader +from ..util.license_mapper import LicenseMapper def extract_eaf_codes_to_set(eaf_code_list: list[str]) -> set: @@ -29,14 +31,16 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): Crawler for learning materials from SODIX GraphQL API. This crawler cannot run without login-data. Please make sure that you have the necessary settings saved to your .env file: - SODIX_SPIDER_USERNAME="your_username" - SODIX_SPIDER_PASSWORD="your_password" - SODIX_SPIDER_OER_FILTER=True/False + + * SODIX_SPIDER_USERNAME="your_username" + * SODIX_SPIDER_PASSWORD="your_password" + * SODIX_SPIDER_OER_FILTER=True/False """ + name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.2.9" # last update: 2022-01-10 + version = "0.3.0" # last update: 2023-07-13 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -81,30 +85,25 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): "SONSTIGES": "other", "TEST": "assessment", "TEXT": "text", - "UBUNG": "drill and practice", + "UBUNG": "drill and practice", # (sic!) UBUNG is a typo in the SODIX API "UNTERRICHTSBAUSTEIN": "teaching module", "UNTERRICHTSPLANUNG": "lesson plan", "VERANSCHAULICHUNG": "demonstration", "VIDEO": "video", "WEBSEITE": "web page", "WEBTOOL": ["web page", "tool"], - - } - MAPPING_EDUCONTEXT = { - "Primarbereich": "Primarstufe", - "Fort- und Weiterbildung": "Fortbildung" } + MAPPING_EDUCONTEXT = {"Primarbereich": "Primarstufe", "Fort- und Weiterbildung": "Fortbildung"} MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT = { "Berufsschule": "Berufliche Bildung", "Fachoberschule": "Sekundarstufe II", - # "Förderschule": "Förderschule", "Gesamtschule": "Sekundarstufe I", "Grundschule": "Primarstufe", "Gymnasium": "Sekundarstufe II", "Kindergarten": "Elementarbereich", "Mittel- / Hauptschule": "Sekundarstufe I", - "Realschule": "Sekundarstufe I" + "Realschule": "Sekundarstufe I", } MAPPING_INTENDED_END_USER_ROLE = { @@ -112,22 +111,23 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): } MAPPING_LICENSE_NAMES = { - 'CC BY': Constants.LICENSE_CC_BY_40, - 'CC BY-NC': Constants.LICENSE_CC_BY_NC_40, - 'CC BY-NC-ND': Constants.LICENSE_CC_BY_NC_ND_40, - 'CC BY-NC-SA': Constants.LICENSE_CC_BY_NC_SA_40, - 'CC BY-ND': Constants.LICENSE_CC_BY_ND_40, - 'CC BY-SA': Constants.LICENSE_CC_BY_SA_40, - 'CC0': Constants.LICENSE_CC_ZERO_10, - 'Copyright, freier Zugang': Constants.LICENSE_COPYRIGHT_LAW, - 'Copyright, lizenzpflichtig': Constants.LICENSE_COPYRIGHT_LAW, - 'Gemeinfrei / Public Domain': Constants.LICENSE_PDM, - 'freie Lizenz': Constants.LICENSE_CUSTOM, - 'keine Angaben (gesetzliche Regelung)': Constants.LICENSE_CUSTOM, + "CC BY": Constants.LICENSE_CC_BY_40, + "CC BY-NC": Constants.LICENSE_CC_BY_NC_40, + "CC BY-NC-ND": Constants.LICENSE_CC_BY_NC_ND_40, + "CC BY-NC-SA": Constants.LICENSE_CC_BY_NC_SA_40, + "CC BY-ND": Constants.LICENSE_CC_BY_ND_40, + "CC BY-SA": Constants.LICENSE_CC_BY_SA_40, + "CC0": Constants.LICENSE_CC_ZERO_10, + "Copyright, freier Zugang": Constants.LICENSE_COPYRIGHT_LAW, + "Copyright, lizenzpflichtig": Constants.LICENSE_COPYRIGHT_LAW, + "Gemeinfrei / Public Domain": Constants.LICENSE_PDM, + "freie Lizenz": Constants.LICENSE_CUSTOM, + "keine Angaben (gesetzliche Regelung)": Constants.LICENSE_CUSTOM, + "Schulfunk (§47)": Constants.LICENSE_SCHULFUNK } def __init__(self, oer_filter: str = "False", **kwargs): - if oer_filter.lower() == "true" or env.get_bool(key='SODIX_SPIDER_OER_FILTER', default=False) is True: + if oer_filter.lower() == "true" or env.get_bool(key="SODIX_SPIDER_OER_FILTER", default=False) is True: # Scrapy arguments are always handled as Strings, even if you try to set a boolean # see: https://docs.scrapy.org/en/latest/topics/spiders.html#spider-arguments self.OER_FILTER = True @@ -150,15 +150,15 @@ def getUri(self, response=None) -> str: # or media.originalUrl? return self.get("media.url", json=response.meta["item"]) - def startRequest(self, page=0): + def start_request(self, page=0): access_token = requests.post( "https://api.sodix.de/gql/auth/login", None, { "login": env.get("SODIX_SPIDER_USERNAME"), "password": env.get("SODIX_SPIDER_PASSWORD"), - } - ).json()['access_token'] + }, + ).json()["access_token"] if self.OER_FILTER is True: recordstatus_parameter = ", recordStatus: ACTIVATED" # by using the recordStatus parameter during the GraphQL query, only a subset of available items is returned @@ -169,8 +169,9 @@ def startRequest(self, page=0): return scrapy.Request( url=self.apiUrl, callback=self.parse_request, - body=json.dumps({ - "query": f"""{{ + body=json.dumps( + { + "query": f"""{{ findAllMetadata(page: {page}, pageSize: {self.page_size}{recordstatus_parameter}) {{ id identifier @@ -247,24 +248,25 @@ def startRequest(self, page=0): linkedObjects }} }}""", - "operationName": None - }), + "operationName": None, + } + ), method="POST", headers={ "Accept": "application/json", "Content-Type": "application/json", - "Authorization": "Bearer " + access_token + "Authorization": "Bearer " + access_token, }, meta={"page": page}, ) def start_requests(self): - yield self.startRequest() + yield self.start_request() def parse_request(self, response): results = json.loads(response.body) if results: - metadata_items: dict = results['data']['findAllMetadata'] + metadata_items: dict = results["data"]["findAllMetadata"] # if len(metadata_items) == 0: # return if metadata_items: @@ -272,24 +274,26 @@ def parse_request(self, response): for item in metadata_items: response_copy = response.copy() response_copy.meta["item"] = item - if self.OER_FILTER is True or env.get_bool('SODIX_SPIDER_OER_FILTER', default=False): + if self.OER_FILTER is True or env.get_bool("SODIX_SPIDER_OER_FILTER", default=False): # Since DropItem exceptions can only be raised from within the pipeline, the filtering of items # that aren't strictly OER-licenses needs to happen here. # - controlling the OER-Filter via spider arguments is useful for debugging, but we also need # an easy way to control the spider via the .env file (while running it as a Docker container) if self.license_is_oer(response_copy) is False: self.NOT_OER_THROWAWAY_COUNTER += 1 - self.logger.info(f"Item dropped due to OER-incompatibility. \n" - f"Total amount of items dropped so far: " - f"{self.NOT_OER_THROWAWAY_COUNTER}") + self.logger.info( + f"Item dropped due to OER-incompatibility. \n" + f"Total amount of items dropped so far: " + f"{self.NOT_OER_THROWAWAY_COUNTER}" + ) continue if self.hasChanged(response_copy): - yield self.handleEntry(response_copy) + yield self.handle_entry(response_copy) # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter # specific media types / URLs - yield self.startRequest(response.meta["page"] + 1) + yield self.start_request(response.meta["page"] + 1) - def handleEntry(self, response): + def handle_entry(self, response): return self.parse(response=response) def getBase(self, response) -> BaseItemLoader: @@ -305,20 +309,15 @@ def getBase(self, response) -> BaseItemLoader: base.replace_value("thumbnail", media_thumb_preview) elif source_image_url: base.replace_value("thumbnail", source_image_url) - # for publisher in self.get("publishers", json=response.meta["item"]): - # base.add_value( - # "publisher", publisher['title'] - # ) - # ToDo: the 'publisher'-field in BaseItem will be removed in the future base.add_value("status", self.get("recordStatus", json=response.meta["item"])) last_modified = self.get("updated", json=response.meta["item"]) if last_modified: - base.add_value('lastModified', last_modified) + base.add_value("lastModified", last_modified) source_id: str = self.get("source.id", json=response.meta["item"]) # ToDo: the crawler can't write description text to subfolder names yet # 'source.name' or 'source.description' could be used here to make the subfolders more human-readable if source_id: - base.add_value('origin', source_id) + base.add_value("origin", source_id) self.extract_and_save_eaf_codes_to_custom_field(base, response) return base @@ -351,22 +350,16 @@ def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, respo eaf_code_subjects.update(eaf_code_competencies) eaf_code_combined = list(eaf_code_subjects) eaf_code_combined.sort() - base.add_value('custom', { - 'ccm:taxonentry': eaf_code_combined - }) + base.add_value("custom", {"ccm:taxonentry": eaf_code_combined}) elif eaf_code_subjects or eaf_code_competencies: if eaf_code_subjects: eaf_code_subjects_list: list = list(eaf_code_subjects) eaf_code_subjects_list.sort() - base.add_value('custom', { - 'ccm:taxonentry': eaf_code_subjects_list - }) + base.add_value("custom", {"ccm:taxonentry": eaf_code_subjects_list}) if eaf_code_competencies: eaf_code_competencies_list: list = list(eaf_code_competencies) eaf_code_competencies_list.sort() - base.add_value('custom', { - 'ccm:taxonentry': eaf_code_competencies_list - }) + base.add_value("custom", {"ccm:taxonentry": eaf_code_competencies_list}) def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader | None: lifecycle = LomBase.getLOMLifecycle(response) @@ -378,9 +371,9 @@ def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader | No # edge-case: Some Sodix Items can have a "authorWebsite", but no valid "author"-value (e.g. null). # saving only the authorWebsite would lead to an empty author-symbol in the edu-sharing workspace view, # which is why the current workaround is to only save this field if BOTH values are available and valid. - lifecycle.add_value('role', 'author') - lifecycle.add_value('organization', author) - lifecycle.add_value('url', author_website) + lifecycle.add_value("role", "author") + lifecycle.add_value("organization", author) + lifecycle.add_value("url", author_website) return lifecycle else: return None @@ -391,28 +384,28 @@ def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleIte # Sodix 'publishers'-field is a list of Publishers, therefore we need to iterate through them if publishers: for publisher in publishers: - lifecycle.add_value('role', 'publisher') + lifecycle.add_value("role", "publisher") if "title" in publisher: publisher_name = publisher.get("title") if publisher_name: - lifecycle.add_value('organization', publisher_name) + lifecycle.add_value("organization", publisher_name) if "id" in publisher: publisher_sodix_uuid: str = publisher.get("id") if publisher_sodix_uuid: # this uuid is used by Sodix to differentiate publishers - lifecycle.add_value('uuid', publisher_sodix_uuid) + lifecycle.add_value("uuid", publisher_sodix_uuid) if "officialWebsite" in publisher: publisher_url: str = publisher.get("officialWebsite") if publisher_url: - lifecycle.add_value('url', publisher_url) + lifecycle.add_value("url", publisher_url) published_time = self.get("publishedTime", json=response.meta["item"]) creation_date = self.get("creationDate", json=response.meta["item"]) source: dict = self.get("source", json=response.meta["item"]) if published_time: # the 'publishedTime'-field is 95% null or empty, which is why several fallbacks are needed - lifecycle.add_value('date', published_time) + lifecycle.add_value("date", published_time) elif creation_date: - lifecycle.add_value('date', creation_date) + lifecycle.add_value("date", creation_date) elif source: if "created" in source: # Sodix field 'source.created' is of type LocalDateTime and available most of the time. Its usage @@ -420,7 +413,7 @@ def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleIte # in case the other fields aren't available created_date = source.get("created") if created_date: - lifecycle.add_value('date', created_date) + lifecycle.add_value("date", created_date) yield lifecycle def get_lom_lifecycle_metadata_provider(self, response=None) -> LomLifecycleItemloader: @@ -429,29 +422,26 @@ def get_lom_lifecycle_metadata_provider(self, response=None) -> LomLifecycleItem 'ccm:metadatacontributer_provider'-field. """ lifecycle = LomBase.getLOMLifecycle(response) - source: dict = self.get('source', json=response.meta["item"]) + source: dict = self.get("source", json=response.meta["item"]) if source: - lifecycle.add_value('role', 'metadata_provider') + lifecycle.add_value("role", "metadata_provider") # all 'source'-subfields are of Type: String - if source.get('id'): - lifecycle.add_value('uuid', source.get('id')) - if source.get('name'): - lifecycle.add_value('organization', source.get('name')) - if source.get('created'): + if source.get("id"): + lifecycle.add_value("uuid", source.get("id")) + if source.get("name"): + lifecycle.add_value("organization", source.get("name")) + if source.get("created"): # LocalDateTime within the String, e.g.: "2022-10-17T11:42:49.198" - lifecycle.add_value('date', source.get('created')) + lifecycle.add_value("date", source.get("created")) # ToDo: Sodix 'source.edited'-field also carries a LocalDateTime, but we currently can't make a distinction # between lifecycle metadata_provider dates (e.g. between a creationDate <-> lastModified) - if source.get('website'): - lifecycle.add_value('url', source.get('website')) + if source.get("website"): + lifecycle.add_value("url", source.get("website")) return lifecycle def getLOMGeneral(self, response) -> LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) - general.replace_value( - "title", - self.get("title", json=response.meta["item"]) - ) + general.replace_value("title", self.get("title", json=response.meta["item"])) if "keywords" in response.meta["item"]: keywords: list = self.get("keywords", json=response.meta["item"]) keywords_cleaned_up: list = list() @@ -461,19 +451,19 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: if individual_keyword.strip(): # we're only adding valid keywords, none of the empty (whitespace) strings keywords_cleaned_up.append(individual_keyword) - general.add_value('keyword', individual_keyword) + general.add_value("keyword", individual_keyword) subjects = self.get_subject_dictionary(response) if subjects: subject_names = list(subjects.values()) subject_names.sort() keywords_cleaned_up.extend(subject_names) - general.replace_value('keyword', keywords_cleaned_up) + general.replace_value("keyword", keywords_cleaned_up) if "language" in response.meta["item"]: languages: list = self.get("language", json=response.meta["item"]) if languages and isinstance(languages, list): # Sodix returns empty lists and 'null' occasionally for language in languages: - general.add_value('language', language) + general.add_value("language", language) if "description" in response.meta["item"]: description: str = self.get("description", json=response.meta["item"]) if description: @@ -487,30 +477,24 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: # identify duplicates later in edu-sharing) sodix_identifier: str = self.get("identifier", json=response.meta["item"]) if sodix_identifier: - general.add_value('identifier', sodix_identifier) + general.add_value("identifier", sodix_identifier) sodix_id: str = self.get("id", json=response.meta["item"]) if sodix_id: - general.add_value('identifier', sodix_id) + general.add_value("identifier", sodix_id) return general def getLOMTechnical(self, response) -> LomTechnicalItemLoader: technical = LomBase.getLOMTechnical(self, response) technical.replace_value("format", self.get("media.dataType", json=response.meta["item"])) - technical.replace_value( - "location", self.getUri(response) - ) + technical.replace_value("location", self.getUri(response)) original = self.get("media.originalUrl", json=response.meta["item"]) if original and self.getUri(response) != original: - technical.add_value( - "location", original - ) + technical.add_value("location", original) duration: str = self.get("media.duration", json=response.meta["item"]) if duration and duration != 0: # the API response contains "null"-values, we're making sure to only add valid duration values to our item technical.add_value("duration", duration) - technical.add_value( - "size", self.get("media.size", json=response.meta["item"]) - ) + technical.add_value("size", self.get("media.size", json=response.meta["item"])) return technical def license_is_oer(self, response) -> bool: @@ -533,43 +517,44 @@ def license_is_oer(self, response) -> bool: Constants.LICENSE_CC_BY_SA_30, Constants.LICENSE_CC_BY_SA_40, Constants.LICENSE_CC_ZERO_10, - Constants.LICENSE_PDM] + Constants.LICENSE_PDM, + ] def getLicense(self, response) -> LicenseItemLoader: license_loader = LomBase.getLicense(self, response) - author: str = self.get('author', json=response.meta['item']) + author: str = self.get("author", json=response.meta["item"]) if author: - license_loader.add_value('author', author) + license_loader.add_value("author", author) license_description: str = self.get("license.text", json=response.meta["item"]) additional_license_information: str = self.get("additionalLicenseInformation") # the Sodix field 'additionalLicenseInformation' is empty 95% of the time, but sometimes it might serve as a # fallback for the license description if license_description: - license_loader.add_value('description', license_description) + license_loader.add_value("description", license_description) elif additional_license_information: - license_loader.add_value('description', additional_license_information) + license_loader.add_value("description", additional_license_information) license_name: str = self.get("license.name", json=response.meta["item"]) if license_name: if license_name in self.MAPPING_LICENSE_NAMES: - license_mapped_url = self.MAPPING_LICENSE_NAMES.get(license_name) - # if mapping was successful, license_mapped_url contains a license URL + license_name_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) + # if mapping was successful, license_name_mapped contains a license URL if license_name.startswith("CC"): # for CC-licenses the actual URL is more precise than our 'internal' license mapping # (you would see differences between the 'internal' value and the actual URL from the API, # e.g. a license pointing to v3.0 and v4.0 at the same time) pass else: - if license_mapped_url in [Constants.LICENSE_COPYRIGHT_LAW]: - license_loader.add_value('internal', license_mapped_url) + if license_name_mapped in [Constants.LICENSE_COPYRIGHT_LAW]: + license_loader.add_value("internal", license_name_mapped) else: - license_loader.add_value('url', license_mapped_url) + license_loader.add_value("url", license_name_mapped) if not license_description: # "name"-fields with the "Copyright, freier Zugang"-value don't have "text"-fields, therefore # we're carrying over the custom description, just in case - license_loader.replace_value('description', license_name) + license_loader.replace_value("description", license_name) - license_url: str = self.get("license.url", json=response.meta["item"]) + license_url_raw: str = self.get("license.url", json=response.meta["item"]) # possible license URL values returned by the Sodix API: # license_urls_sorted = ['https://creativecommons.org/licenses/by-nc-nd/2.0/de/', # 'https://creativecommons.org/licenses/by-nc-nd/3.0/de/', @@ -601,33 +586,23 @@ def getLicense(self, response) -> LicenseItemLoader: # 'https://creativecommons.org/licenses/by/4.0/', # 'https://creativecommons.org/publicdomain/mark/1.0/deed.de', # 'https://creativecommons.org/publicdomain/zero/1.0/deed.de'] - if license_url: + if license_url_raw: # making sure to only handle valid license urls, since the API result can be NoneType or empty string ('') - if license_url.endswith("deed.de"): - license_url = license_url[:-len("deed.de")] - if license_url.endswith("/de/"): - license_url = license_url[:-len("de/")] - # cutting off the "de/"-part of the URL while leaving the rest intact - elif license_url.endswith("/fr/"): - license_url = license_url[:-len("fr/")] - license_loader.replace_value('url', license_url) + license_mapper = LicenseMapper() + license_url_mapped: str = license_mapper.get_license_url(license_url_raw) + if license_url_mapped: + license_loader.replace_value("url", license_url_mapped) return license_loader def getLOMEducational(self, response=None) -> LomEducationalItemLoader: educational = LomBase.getLOMEducational(response) - class_level = self.get('classLevel', json=response.meta['item']) + class_level = self.get("classLevel", json=response.meta["item"]) if class_level and len(class_level.split("-")) == 2: split = class_level.split("-") tar = LomAgeRangeItemLoader() # mapping from classLevel to ageRange - tar.add_value( - "fromRange", - int(split[0]) + 5 - ) - tar.add_value( - "toRange", - int(split[1]) + 5 - ) + tar.add_value("fromRange", int(split[0]) + 5) + tar.add_value("toRange", int(split[1]) + 5) educational.add_value("typicalAgeRange", tar.load_item()) return educational @@ -638,14 +613,14 @@ def get_subject_dictionary(self, response) -> dict[str, str] | None: Sodix 'subject.name' as its value. """ subject_dictionary = dict() - if "subject" in response.meta['item'] is not None: + if "subject" in response.meta["item"] is not None: # the "subject"-field does not exist in every item returned by the sodix API - subjects_list: list = self.get('subject', json=response.meta['item']) + subjects_list: list = self.get("subject", json=response.meta["item"]) if subjects_list: # the "subject"-key might exist in the API, but still be of 'None'-value for subject in subjects_list: - subject_name: str = subject['name'] - subject_id: str = subject['id'] + subject_name: str = subject["name"] + subject_id: str = subject["id"] subject_dictionary.update({subject_id: subject_name}) return subject_dictionary else: @@ -658,9 +633,9 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: subject_ids = list(subjects.keys()) if subject_ids: subject_ids.sort() - valuespaces.add_value('discipline', subject_ids) - educational_context_list = self.get('educationalLevels', json=response.meta['item']) - school_types_list = self.get('schoolTypes', json=response.meta['item']) + valuespaces.add_value("discipline", subject_ids) + educational_context_list = self.get("educationalLevels", json=response.meta["item"]) + school_types_list = self.get("schoolTypes", json=response.meta["item"]) educational_context_set = set() if educational_context_list: # the Sodix field 'educationalLevels' is directly mappable to our 'educationalContext' @@ -679,29 +654,42 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: if educational_context_list: valuespaces.add_value("educationalContext", educational_context_list) - target_audience_list = self.get('targetAudience', json=response.meta['item']) + target_audience_list = self.get("targetAudience", json=response.meta["item"]) + # possible 'targetAudience'-values according to the SODIX API Docs: "teacher", "learner", "parent" if target_audience_list: for target_audience_item in target_audience_list: if target_audience_item in self.MAPPING_INTENDED_END_USER_ROLE: target_audience_item = self.MAPPING_INTENDED_END_USER_ROLE.get(target_audience_item) - valuespaces.add_value('intendedEndUserRole', target_audience_item) - - if self.get('cost', json=response.meta['item']) == "FREE": - valuespaces.add_value("price", "no") - potential_lrts = self.get('learnResourceType', json=response.meta['item']) + valuespaces.add_value("intendedEndUserRole", target_audience_item) + + cost: str | None = self.get("cost", json=response.meta["item"]) + if cost: + cost = cost.lower() + match cost: + case "free": + valuespaces.add_value("price", "no") + case "freemium": + valuespaces.add_value("price", "yes_for_additional") + case "fee required": + valuespaces.add_value("price", "yes") + case _: + logging.info( + f"SODIX 'cost' value '{cost}' was not recognized. Please check the SODIX API " + f"Documentation if the possible range of values has changed in the meantime. " + f"(In this case: additional metadata values need to be mapped.)" + ) + potential_lrts = self.get("learnResourceType", json=response.meta["item"]) # attention: Sodix calls their LRT "learnResourceType", not "learningResourceType" if potential_lrts: for potential_lrt in potential_lrts: if potential_lrt in self.MAPPING_LRT: potential_lrt = self.MAPPING_LRT.get(potential_lrt) - valuespaces.add_value('learningResourceType', potential_lrt) + valuespaces.add_value("learningResourceType", potential_lrt) return valuespaces def parse(self, response, **kwargs): if LomBase.shouldImport(response) is False: - self.logger.debug( - f"Skipping entry {str(self.getId(response))} because shouldImport() returned false" - ) + self.logger.debug(f"Skipping entry {str(self.getId(response))} because shouldImport() returned false") return None if self.getId(response) is not None and self.getHash(response) is not None: if not self.hasChanged(response): @@ -713,31 +701,30 @@ def parse(self, response, **kwargs): general = self.getLOMGeneral(response) # "UNTERRICHTSBAUSTEIN"-Materials need to handled as aggregationLevel = 2 (according to LOM-DE) - potential_lrts = self.get('learnResourceType', json=response.meta['item']) + potential_lrts = self.get("learnResourceType", json=response.meta["item"]) if potential_lrts: if "UNTERRICHTSBAUSTEIN" in potential_lrts: - general.add_value('aggregationLevel', 2) - + general.add_value("aggregationLevel", 2) technical = self.getLOMTechnical(response) if self.get("author", json=response.meta["item"]): lifecycle_author = self.get_lom_lifecycle_author(response) if lifecycle_author: - lom.add_value('lifecycle', lifecycle_author.load_item()) + lom.add_value("lifecycle", lifecycle_author.load_item()) if self.get("publishers", json=response.meta["item"]): # theoretically, there can be multiple publisher fields per item, but in reality this doesn't occur (yet). lifecycle_iterator: Iterator[LomLifecycleItemloader] = self.get_lom_lifecycle_publisher(response) for lifecycle_publisher in lifecycle_iterator: - lom.add_value('lifecycle', lifecycle_publisher.load_item()) + lom.add_value("lifecycle", lifecycle_publisher.load_item()) if self.get("source", json=response.meta["item"]): lifecycle_metadata_provider = self.get_lom_lifecycle_metadata_provider(response) - lom.add_value('lifecycle', lifecycle_metadata_provider.load_item()) + lom.add_value("lifecycle", lifecycle_metadata_provider.load_item()) educational = self.getLOMEducational(response) classification = self.getLOMClassification(response) - lom.add_value('general', general.load_item()) - lom.add_value('technical', technical.load_item()) - lom.add_value('educational', educational.load_item()) - lom.add_value('classification', classification.load_item()) + lom.add_value("general", general.load_item()) + lom.add_value("technical", technical.load_item()) + lom.add_value("educational", educational.load_item()) + lom.add_value("classification", classification.load_item()) base.add_value("lom", lom.load_item()) base.add_value("valuespaces", self.getValuespaces(response).load_item()) From 84b359e87c6743f19bfb7e813a676c4ad1f351d6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Jul 2023 01:35:22 +0200 Subject: [PATCH 305/590] LicenseMapper: additional tests for Public Domain and CC-0 URLs Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/util/test_license_mapper.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 87adc333..54ddefc1 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -10,6 +10,7 @@ class TestLicenseMapper: [ ("a random CC-BY 4.0 string", Constants.LICENSE_CC_BY_40), ("CC-0", Constants.LICENSE_CC_ZERO_10), + ("https://creativecommons.org/publicdomain/zero/1.0/deed.de", Constants.LICENSE_CC_ZERO_10), ("the license CC0 is mentioned somewhere", Constants.LICENSE_CC_ZERO_10), ("CC-Zero", Constants.LICENSE_CC_ZERO_10), ("Creative Commons Zero", Constants.LICENSE_CC_ZERO_10), @@ -31,6 +32,7 @@ class TestLicenseMapper: None, ), ("Public Domain", Constants.LICENSE_PDM), + ("https://creativecommons.org/publicdomain/mark/1.0/deed.de", Constants.LICENSE_PDM), ("https://creativecommons.org/licenses/by-nc-nd/3.0/deed.DE", Constants.LICENSE_CC_BY_NC_ND_30), ("https://creativecommons.org/licenses/by-nc-nd/2.0/deed.CA", Constants.LICENSE_CC_BY_NC_ND_20), ("https://creativecommons.org/licenses/by-sa/4.0/deed.es_ES", Constants.LICENSE_CC_BY_SA_40), @@ -63,6 +65,7 @@ def test_get_license_url(self, test_input, expected_result): (" CC BY SA ", "CC_BY_SA"), ("dieser Text ist public domain", "PDM"), ("Gemeinfrei", "PDM"), + ("Gemeinfrei / public domain", "PDM"), ("Frei nutzbares Material", None), (" ", None), ("", None), From 6ea6f009540437506e7fb5b01c459a123418e3b5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Jul 2023 12:21:34 +0200 Subject: [PATCH 306/590] =?UTF-8?q?feat:=20licenses=20for=20=C2=A747=20and?= =?UTF-8?q?=20=C2=A760b?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - add: Constants for "Schulfunk" (§47 UrhG)" and "§60b Unterrichts- und Lehrmedien" to constants.py -- add: checks for these two licenses to es_connector.py -- change: improve readability of match-case statement for license["internal"] Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/constants.py | 2 ++ converter/es_connector.py | 7 ++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index ac29156f..b4ec9dce 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -91,6 +91,8 @@ class Constants: LICENSE_COPYRIGHT_LAW: Final[str] = "COPYRIGHT_FREE" LICENSE_CUSTOM: Final[str] = "CUSTOM" # Custom License, use the license description field for arbitrary values LICENSE_NONPUBLIC: Final[str] = "NONPUBLIC" + LICENSE_SCHULFUNK: Final[str] = "SCHULFUNK" # "Schulfunk (§47 UrhG)" + LICENSE_UNTERRICHTS_UND_SCHULMEDIEN = "UNTERRICHTS_UND_LEHRMEDIEN" # "§60b Unterrichts- und Lehrmedien" NEW_LRT_MATERIAL: Final[str] = "1846d876-d8fd-476a-b540-b8ffd713fedb" NEW_LRT_TOOL: Final[str] = "cefccf75-cba3-427d-9a0f-35b4fedcbba1" diff --git a/converter/es_connector.py b/converter/es_connector.py index 03367de1..8bff4838 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -296,9 +296,10 @@ def mapLicense(self, spaces, license): f"please check if the license-mapping within es_connector.py is up-to-date.") if "internal" in license: match license["internal"]: - case Constants.LICENSE_COPYRIGHT_LAW: - spaces["ccm:commonlicense_key"] = "COPYRIGHT_FREE" - case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM": + case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" \ + | Constants.LICENSE_COPYRIGHT_LAW \ + | Constants.LICENSE_SCHULFUNK \ + | Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN: spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" From aa5c242c80a4a4b668115e45858bdcc4cc63ecbd Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 14 Jul 2023 13:54:19 +0200 Subject: [PATCH 307/590] docs: update .env.example - add: docs for 'sodix_spider'- and 'serlo_spider'-specific settings - style: formatting and ordering Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/.env.example | 58 ++++++++++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 22 deletions(-) diff --git a/converter/.env.example b/converter/.env.example index acac13e6..846ef006 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -1,37 +1,33 @@ +# --- LOGGING-specific settings: # Add a url for your log file. If not set, stdoutput will be used #LOG_FILE = "/var/log/scrapy.log" - -# Level for logs, supported DEBUG, INFO, WARNING, ERROR +# Set the level for logs here. Supported values: "DEBUG", "INFO", "WARNING", "ERROR" LOG_LEVEL = "WARNING" -# MODE (edu-sharing, csv, json, or None) +# --- Crawling-modes: control where crawled items should be stored/exported. +# Available modes: 'edu-sharing', 'csv', 'json' or 'None' MODE = "csv" - -# csv rows to export from dataset (comma seperated, only used if mode == "csv") +# ------ CSV Export settings (Only used if MODE == "csv"!): +# csv rows to export from dataset (comma seperated! field-names according to items.py!) CSV_ROWS = "lom.general.title,lom.general.description,lom.general.keyword,lom.technical.location,valuespaces.discipline,valuespaces.learningResourceType" -# Splash Integration settings for the local container, +# --- 'Splash'-Integration settings for the local container, # for more information, see https://splash.readthedocs.io/en/stable/ DISABLE_SPLASH = False SPLASH_URL = "http://localhost:8050" +# --- headless-browser settings for the local container: # PYPPETEER Integration settings, as needed for the local container (as used in kmap_spider.py) # for more information, see: https://github.com/pyppeteer/pyppeteer PYPPETEER_WS_ENDPOINT="ws://localhost:3000" # Playwright Integration, as needed for the local container (https://hub.docker.com/r/browserless/chrome#playwright) PLAYWRIGHT_WS_ENDPOINT="ws://localhost:3000" -# Edu-Sharing instance that the crawlers should upload to +# --- Edu-Sharing instance that the crawlers should upload to EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" EDU_SHARING_USERNAME = "admin" EDU_SHARING_PASSWORD = "admin" -# Continue / complete a previously aborted crawl process by skipping updates of already known items. -CONTINUE_CRAWL=False -EDU_SHARING_PRECHECK_SAVED_SEARCH_ID="" -# Setting CONTINUE_CRAWL to True will skip all updates of previously crawled items and ONLY crawl new ones! -# ONLY use this mode if you wish to debug/complement/complete huge crawl processes which haven't completed on their own! - # Configure if permissions of edu-sharing nodes are handled by the crawler (default true) # You may want to set this to false if you don't want to apply permissions from crawlers or have a custom implementation in the repository # EDU_SHARING_PERMISSION_CONTROL=true @@ -41,23 +37,41 @@ EDU_SHARING_PRECHECK_SAVED_SEARCH_ID="" # If set to true, don't upload to (above mentioned) Edu-Sharing instance DRY_RUN = True -# you can add one or more custom pipelines here to trigger -# the syntax is: pipeline.package.id:PRIORITY[,pipeline.package.id:PRIORITY,...] +# --- OERSI-specific settings (oersi_spider): +# Only crawl a specific metadata provider from OERSI (separate multiple providers by semicolon!): +OERSI_METADATA_PROVIDER="KI-Campus;iMoox" +# Continue / complete a previously aborted crawl process by skipping updates of already known items. +# CONTINUE_CRAWL=False +# EDU_SHARING_PRECHECK_SAVED_SEARCH_ID="" +# Setting CONTINUE_CRAWL to True will skip all updates of previously crawled items and ONLY crawl new ones! +# ONLY use this mode if you wish to debug/complement/complete huge crawl processes which haven't completed on their own! + +# --- Scrapy Pipeline settings: +# You can add one or more custom pipelines here to trigger. +# The syntax is: pipeline.package.id:PRIORITY[,pipeline.package.id:PRIORITY,...] # Use this if you e.g. want to do custom property mapping for any crawler before storing the data # CUSTOM_PIPELINES = "converter.pipelines.ExampleLoggingPipeline:100" -# Your YouTube API key (required for running the youtube crawler 'youtube_spider'): +# --- YouTube-related Settings (REQUIRED for youtube_spider!) +# Set your YouTube API key before trying to run the YouTube-crawler: YOUTUBE_API_KEY="" # If you only want to crawl a single YouTube channel/playlist, activate the LIMITED crawl mode by setting its URL here: -#YOUTUBE_LIMITED_CRAWL_URL="" # (Please make sure that your 'csv/youtube.csv' contains the same URL!) +#YOUTUBE_LIMITED_CRAWL_URL="" -# only for oeh spider: select the sources you want to fetch from oeh (comma seperated) +# --- oeh_spider settings: +# Select the sources you want to fetch from OpenEduHub (comma seperated): # OEH_IMPORT_SOURCES = 'oeh,wirlernenonline_spider,serlo_spider,youtube_spider' -# only for spiders based on edu_sharing: Use a saved search (object must be published to everyone in edu-sharing) to query from +# Only for spiders based on edu_sharing: Use a saved search (object must be published to everyone in edu-sharing) to query from # EDU_SHARING_IMPORT_SEARCH_ID = "" -# Sodix Spider login data -# SODIX_SPIDER_USERNAME = "" -# SODIX_SPIDER_PASSWORD = "" \ No newline at end of file +# --- SODIX login data (REQUIRED to run sodix_spider!) +# SODIX_SPIDER_USERNAME = "" +# SODIX_SPIDER_PASSWORD = "" +# --- sodix_spider settings: +# SODIX_SPIDER_OER_FILTER=True # OPTIONAL setting for crawling ONLY OER-compatible materials + +# --- serlo_spider (v0.2.8+) settings: +# SERLO_MODIFIED_AFTER="2023-07-01" # Crawl only Serlo Materials which have been modified (by Serlo authors) after +# . Use this setting to improve the crawling speed of periodic crawls. \ No newline at end of file From f7570b8e01429333e05656d47d60f04886eba4ae Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 20 Jul 2023 09:05:52 +0200 Subject: [PATCH 308/590] improve docker build speed when developing --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 344e6d54..dec4cdb2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,13 +6,13 @@ WORKDIR / COPY entrypoint.sh entrypoint.sh COPY requirements.txt requirements.txt +RUN pip3 install -r requirements.txt COPY scrapy.cfg scrapy.cfg COPY setup.cfg setup.cfg COPY converter/ converter/ COPY csv/ csv/ COPY edu_sharing_client/ edu_sharing_client/ COPY valuespace_converter/ valuespace_converter/ -RUN pip3 install -r requirements.txt ENTRYPOINT ["/entrypoint.sh"] From 9c9bdcf1f436c7fd903e9c405ed5f01fa969201e Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 20 Jul 2023 09:06:18 +0200 Subject: [PATCH 309/590] add EDU_SHARING_COOKIE_REBUILD_THRESHOLD config/env variable --- converter/es_connector.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 8bff4838..97e3320a 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -107,6 +107,10 @@ class CreateGroupType(Enum): enabled: bool def __init__(self): + cookie_threshold = env.get('EDU_SHARING_COOKIE_REBUILD_THRESHOLD', True) + if cookie_threshold: + logging.info('Setting COOKIE_REBUILD_THRESHOLD to ' + str(cookie_threshold) + ' seconds') + self.COOKIE_REBUILD_THRESHOLD = cookie_threshold self.enabled = env.get("MODE", default="edu-sharing") == "edu-sharing" if self.enabled: self.initApiClient() From 18d4106ce2bbb4e04bdaabdca8116d3f4d231213 Mon Sep 17 00:00:00 2001 From: Torsten Simon Date: Thu, 20 Jul 2023 11:55:44 +0200 Subject: [PATCH 310/590] fix:log cookie infos --- converter/es_connector.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 97e3320a..dd4b75f2 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -648,6 +648,7 @@ def updateItem(self, spider, uuid, item): @staticmethod def initCookie(): + logging.debug("Init edu sharing cookie...") settings = get_project_settings() auth = requests.get( settings.get("EDU_SHARING_BASE_URL") @@ -659,6 +660,7 @@ def initCookie(): headers={"Accept": "application/json"}, ) isAdmin = json.loads(auth.text)["isAdmin"] + logging.info("Got edu sharing cookie, admin status: " + str(isAdmin)) if isAdmin: cookies = [] for cookie in auth.headers["SET-COOKIE"].split(","): From 4c234edab15c324269132837f4f347402930709d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 12:12:48 +0200 Subject: [PATCH 311/590] fix: re-initialize edu-sharing API client on HTTP-status 401 - edu-sharing sometimes "forgets" the current session cookie of the crawler and throws a 401 error ("Admin rights are required for this endpoint") during a longer crawl process -- when this happens, the session cookie needs to be renegotiated between the crawler and edu-sharing Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index dd4b75f2..d12de135 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -136,6 +136,8 @@ def syncNode(self, spider, type, properties): reset_version=EduSharing.resetVersion, ) except ApiException as e: + # ToDo: + # - error-handling for code 500 ("java.util.concurrent.TimeoutException") jsonError = json.loads(e.body) if jsonError["error"] == "java.lang.IllegalStateException": logging.warning( @@ -749,7 +751,17 @@ def findItem(self, id, spider): properties["ccm:replicationsourcehash"][0], ] except ApiException as e: + if e.status == 401: + # Typically happens when the edu-sharing session cookie is lost and needs to be renegotiated. + # (edu-sharing error-message: "Admin rights are required for this endpoint") + logging.info(f"ES_CONNECTOR - findItem: edu-sharing returned HTTP-statuscode 401.") + logging.debug(f"(HTTP-Body: '{e.body}')") + logging.debug(f"Reason: {e.reason}") + logging.debug(f"HTTP Headers: {e.headers}") + logging.info(f"ES_CONNECTOR: Re-initializing edu-sharing API Client...") + self.initApiClient() if e.status == 404: + logging.debug(f"ES_CONNECTOR - findItem: edu-sharing returned HTTP-statuscode 404.") pass else: raise e From 4f5131e83a8c5f4371fc4572af8c8e67eb8a835e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 12:51:08 +0200 Subject: [PATCH 312/590] fix: weak warnings (PEP8 E712 / E713) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index d12de135..7455a259 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -334,7 +334,8 @@ def transformItem(self, uuid, spider, item): "cclom:location": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, - "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] if "aggregationLevel" in item["lom"]["general"] else None, + "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] if "aggregationLevel" in item["lom"][ + "general"] else None, "cclom:title": item["lom"]["general"]["title"] } if "identifier" in item["lom"]["general"]: @@ -376,7 +377,7 @@ def transformItem(self, uuid, spider, item): if "lifecycle" in item["lom"]: for person in item["lom"]["lifecycle"]: - if not "role" in person: + if "role" not in person: continue if ( not person["role"].lower() @@ -551,7 +552,7 @@ def createGroupsIfNotExists(self, groups, type: CreateGroupType): EduSharing.groupCache.append(result["authorityName"]) def setNodePermissions(self, uuid, item): - if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) == False: + if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is False: logging.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") return if "permissions" in item: @@ -560,7 +561,7 @@ def setNodePermissions(self, uuid, item): "permissions": [], } public = item["permissions"]["public"] - if public == True: + if public is True: if ( "groups" in item["permissions"] or "mediacenters" in item["permissions"] @@ -590,7 +591,7 @@ def setNodePermissions(self, uuid, item): if "groups" in item["permissions"]: if ( "autoCreateGroups" in item["permissions"] - and item["permissions"]["autoCreateGroups"] == True + and item["permissions"]["autoCreateGroups"] is True ): self.createGroupsIfNotExists( item["permissions"]["groups"], @@ -605,7 +606,7 @@ def setNodePermissions(self, uuid, item): if "mediacenters" in item["permissions"]: if ( "autoCreateMediacenters" in item["permissions"] - and item["permissions"]["autoCreateMediacenters"] == True + and item["permissions"]["autoCreateMediacenters"] is True ): self.createGroupsIfNotExists( item["permissions"]["mediacenters"], @@ -671,7 +672,7 @@ def initCookie(): return auth def initApiClient(self): - if EduSharing.cookie == None: + if EduSharing.cookie is None: settings = get_project_settings() auth = self.initCookie() isAdmin = json.loads(auth.text)["isAdmin"] @@ -698,15 +699,17 @@ def initApiClient(self): EduSharing.mediacenterApi = MEDIACENTERV1Api(EduSharing.apiClient) EduSharing.nodeApi = NODEV1Api(EduSharing.apiClient) about = EduSharing.aboutApi.about() - EduSharing.version = list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0]["version"] + EduSharing.version = list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0][ + "version"] version_str = str(EduSharing.version["major"]) + "." + str(EduSharing.version["minor"]) - if EduSharing.version["major"] != 1 or EduSharing.version["minor"] < 0 or EduSharing.version["minor"] > 1: + if EduSharing.version["major"] != 1 or EduSharing.version["minor"] < 0 or EduSharing.version[ + "minor"] > 1: raise Exception( f"Given repository api version is unsupported: " + version_str ) else: logging.info("Detected edu-sharing bulk api with version " + version_str) - if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) == True: + if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is True: EduSharing.groupCache = list( map( lambda x: x["authorityName"], From dee035cd90dd4ce393956ed4f0f876ea9c48c3ab Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 13:13:00 +0200 Subject: [PATCH 313/590] style: code formatting (via black) - settings used: "black -l 120 converter/es_connector.py --target-version py310" Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 172 +++++++++++++++----------------------- 1 file changed, 66 insertions(+), 106 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 7455a259..96ef2e34 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -74,7 +74,8 @@ def deserialize(self, response, response_type): def __getattribute__(self, name): attr = object.__getattribute__(self, name) - if hasattr(attr, '__call__'): + if hasattr(attr, "__call__"): + def newfunc(*args, **kwargs): if time.time() - ESApiClient.lastRequestTime > ESApiClient.COOKIE_REBUILD_THRESHOLD: EduSharing.initCookie() @@ -107,9 +108,9 @@ class CreateGroupType(Enum): enabled: bool def __init__(self): - cookie_threshold = env.get('EDU_SHARING_COOKIE_REBUILD_THRESHOLD', True) + cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) if cookie_threshold: - logging.info('Setting COOKIE_REBUILD_THRESHOLD to ' + str(cookie_threshold) + ' seconds') + logging.info("Setting COOKIE_REBUILD_THRESHOLD to " + str(cookie_threshold) + " seconds") self.COOKIE_REBUILD_THRESHOLD = cookie_threshold self.enabled = env.get("MODE", default="edu-sharing") == "edu-sharing" if self.enabled: @@ -141,7 +142,8 @@ def syncNode(self, spider, type, properties): jsonError = json.loads(e.body) if jsonError["error"] == "java.lang.IllegalStateException": logging.warning( - "Node '" + properties['cm:name'][0] + "' probably blocked for sync: " + jsonError["message"]) + "Node '" + properties["cm:name"][0] + "' probably blocked for sync: " + jsonError["message"] + ) return None raise e return response["node"] @@ -180,12 +182,13 @@ def setPermissions(self, uuid, permissions) -> bool: def setNodeBinaryData(self, uuid, item) -> bool: if "binary" in item: - logging.info(get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"] - ) + logging.info( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/content?mimetype=" + + item["lom"]["technical"]["format"] + ) files = {"file": item["binary"]} response = requests.post( get_project_settings().get("EDU_SHARING_BASE_URL") @@ -202,13 +205,7 @@ def setNodeBinaryData(self, uuid, item) -> bool: def setNodePreview(self, uuid, item) -> bool: if "thumbnail" in item: - key = ( - "large" - if "large" in item["thumbnail"] - else "small" - if "small" in item["thumbnail"] - else None - ) + key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None if key: files = {"image": base64.b64decode(item["thumbnail"][key])} response = requests.post( @@ -297,24 +294,25 @@ def mapLicense(self, spaces, license): case Constants.LICENSE_PDM: spaces["ccm:commonlicense_key"] = "PDM" case _: - logging.warning(f"License.url {license['url']} could not be mapped to a license from Constants.\n" - f"If you are sure that you provided a correct URL to a license, " - f"please check if the license-mapping within es_connector.py is up-to-date.") + logging.warning( + f"License.url {license['url']} could not be mapped to a license from Constants.\n" + f"If you are sure that you provided a correct URL to a license, " + f"please check if the license-mapping within es_connector.py is up-to-date." + ) if "internal" in license: match license["internal"]: - case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" \ - | Constants.LICENSE_COPYRIGHT_LAW \ - | Constants.LICENSE_SCHULFUNK \ - | Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN: + case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" | Constants.LICENSE_COPYRIGHT_LAW | Constants.LICENSE_SCHULFUNK | Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN: spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" if "description" in license: spaces["cclom:rights_description"] = license["description"] case _: - logging.warning(f"Received a value for license['internal'] that is not recognized by es_connector. " - f"Please double-check if the provided value {license['internal']} is correctly " - f"mapped within Constants AND es_connector.") + logging.warning( + f"Received a value for license['internal'] that is not recognized by es_connector. " + f"Please double-check if the provided value {license['internal']} is correctly " + f"mapped within Constants AND es_connector." + ) if "author" in license: spaces["ccm:author_freetext"] = license["author"] @@ -329,14 +327,13 @@ def transformItem(self, uuid, spider, item): "ccm:replicationsourcehash": item["hash"], "ccm:replicationsourceuuid": uuid, "cm:name": item["lom"]["general"]["title"], - "ccm:wwwurl": item["lom"]["technical"]["location"][0] - if "location" in item["lom"]["technical"] else None, - "cclom:location": item["lom"]["technical"]["location"] - if "location" in item["lom"]["technical"] else None, + "ccm:wwwurl": item["lom"]["technical"]["location"][0] if "location" in item["lom"]["technical"] else None, + "cclom:location": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, - "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] if "aggregationLevel" in item["lom"][ - "general"] else None, - "cclom:title": item["lom"]["general"]["title"] + "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] + if "aggregationLevel" in item["lom"]["general"] + else None, + "cclom:title": item["lom"]["general"]["title"], } if "identifier" in item["lom"]["general"]: spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] @@ -345,9 +342,7 @@ def transformItem(self, uuid, spider, item): if "status" in item: spaces["ccm:editorial_state"] = item["status"] if "origin" in item: - spaces["ccm:replicationsourceorigin"] = item[ - "origin" - ] # TODO currently not mapped in edu-sharing + spaces["ccm:replicationsourceorigin"] = item["origin"] # TODO currently not mapped in edu-sharing self.mapLicense(spaces, item["license"]) if "description" in item["lom"]["general"]: @@ -370,8 +365,10 @@ def transformItem(self, uuid, spider, item): # edusharing requires milliseconds duration = int(float(duration) * 1000) except: - logging.debug(f"The supplied 'technical.duration'-value {duration} could not be converted from " - f"seconds to milliseconds. ('cclom:duration' expects ms)") + logging.debug( + f"The supplied 'technical.duration'-value {duration} could not be converted from " + f"seconds to milliseconds. ('cclom:duration' expects ms)" + ) pass spaces["cclom:duration"] = duration @@ -379,26 +376,19 @@ def transformItem(self, uuid, spider, item): for person in item["lom"]["lifecycle"]: if "role" not in person: continue - if ( - not person["role"].lower() - in EduSharingConstants.LIFECYCLE_ROLES_MAPPING - ): + if not person["role"].lower() in EduSharingConstants.LIFECYCLE_ROLES_MAPPING: logging.warning( "The lifecycle role " + person["role"] + " is currently not supported by the edu-sharing connector" ) continue - mapping = EduSharingConstants.LIFECYCLE_ROLES_MAPPING[ - person["role"].lower() - ] + mapping = EduSharingConstants.LIFECYCLE_ROLES_MAPPING[person["role"].lower()] # convert to a vcard string firstName = person["firstName"] if "firstName" in person else "" lastName = person["lastName"] if "lastName" in person else "" title: str = person["title"] if "title" in person else "" - organization = ( - person["organization"] if "organization" in person else "" - ) + organization = person["organization"] if "organization" in person else "" url = person["url"] if "url" in person else "" email = person["email"] if "email" in person else "" date = person["date"] if "date" in person else None @@ -407,14 +397,8 @@ def transformItem(self, uuid, spider, item): id_ror: str = person["id_ror"] if "id_ror" in person else "" id_wikidata: str = person["id_wikidata"] if "id_wikidata" in person else "" vcard = vobject.vCard() - vcard.add("n").value = vobject.vcard.Name( - family=lastName, given=firstName - ) - vcard.add("fn").value = ( - organization - if organization - else (firstName + " " + lastName).strip() - ) + vcard.add("n").value = vobject.vcard.Name(family=lastName, given=firstName) + vcard.add("fn").value = organization if organization else (firstName + " " + lastName).strip() if id_gnd: vcard.add("X-GND-URI").value = id_gnd if id_orcid: @@ -427,7 +411,7 @@ def transformItem(self, uuid, spider, item): vcard.add("title").value = title if date: vcard.add("X-ES-LOM-CONTRIBUTE-DATE").value = date.isoformat() - if person["role"].lower() == 'publisher': + if person["role"].lower() == "publisher": spaces["ccm:published_date"] = date.isoformat() if organization: vcard.add("org") @@ -510,32 +494,20 @@ def transformItem(self, uuid, spider, item): def createGroupsIfNotExists(self, groups, type: CreateGroupType): for group in groups: if type == EduSharing.CreateGroupType.MediaCenter: - uuid = ( - EduSharingConstants.GROUP_PREFIX - + EduSharingConstants.MEDIACENTER_PREFIX - + group - ) + uuid = EduSharingConstants.GROUP_PREFIX + EduSharingConstants.MEDIACENTER_PREFIX + group else: uuid = EduSharingConstants.GROUP_PREFIX + group if uuid in EduSharing.groupCache: - logging.debug( - "Group " + uuid + " is existing in cache, no need to create" - ) + logging.debug("Group " + uuid + " is existing in cache, no need to create") continue logging.debug("Group " + uuid + " is not in cache, checking consistency...") try: group = EduSharing.iamApi.get_group(EduSharingConstants.HOME, uuid) - logging.info( - "Group " - + uuid - + " was found in edu-sharing (cache inconsistency), no need to create" - ) + logging.info("Group " + uuid + " was found in edu-sharing (cache inconsistency), no need to create") EduSharing.groupCache.append(uuid) continue except ApiException as e: - logging.info( - "Group " + uuid + " was not found in edu-sharing, creating it" - ) + logging.info("Group " + uuid + " was not found in edu-sharing, creating it") pass if type == EduSharing.CreateGroupType.MediaCenter: @@ -546,9 +518,7 @@ def createGroupsIfNotExists(self, groups, type: CreateGroupType): ) EduSharing.groupCache.append(result["authorityName"]) else: - result = EduSharing.iamApi.create_group( - repository=EduSharingConstants.HOME, group=group, body={} - ) + result = EduSharing.iamApi.create_group(repository=EduSharingConstants.HOME, group=group, body={}) EduSharing.groupCache.append(result["authorityName"]) def setNodePermissions(self, uuid, item): @@ -562,10 +532,7 @@ def setNodePermissions(self, uuid, item): } public = item["permissions"]["public"] if public is True: - if ( - "groups" in item["permissions"] - or "mediacenters" in item["permissions"] - ): + if "groups" in item["permissions"] or "mediacenters" in item["permissions"]: logging.error( "Invalid state detected: Permissions public is set to true but groups or mediacenters are also set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!" ) @@ -589,15 +556,12 @@ def setNodePermissions(self, uuid, item): # return mergedGroups = [] if "groups" in item["permissions"]: - if ( - "autoCreateGroups" in item["permissions"] - and item["permissions"]["autoCreateGroups"] is True - ): + if "autoCreateGroups" in item["permissions"] and item["permissions"]["autoCreateGroups"] is True: self.createGroupsIfNotExists( item["permissions"]["groups"], EduSharing.CreateGroupType.Regular, ) - mergedGroups = mergedGroups + list( + mergedGroups += list( map( lambda x: EduSharingConstants.GROUP_PREFIX + x, item["permissions"]["groups"], @@ -605,18 +569,18 @@ def setNodePermissions(self, uuid, item): ) if "mediacenters" in item["permissions"]: if ( - "autoCreateMediacenters" in item["permissions"] - and item["permissions"]["autoCreateMediacenters"] is True + "autoCreateMediacenters" in item["permissions"] + and item["permissions"]["autoCreateMediacenters"] is True ): self.createGroupsIfNotExists( item["permissions"]["mediacenters"], EduSharing.CreateGroupType.MediaCenter, ) - mergedGroups = mergedGroups + list( + mergedGroups += list( map( lambda x: EduSharingConstants.GROUP_PREFIX - + EduSharingConstants.MEDIACENTER_PROXY_PREFIX - + x, + + EduSharingConstants.MEDIACENTER_PROXY_PREFIX + + x, item["permissions"]["mediacenters"], ) ) @@ -654,8 +618,7 @@ def initCookie(): logging.debug("Init edu sharing cookie...") settings = get_project_settings() auth = requests.get( - settings.get("EDU_SHARING_BASE_URL") - + "rest/authentication/v1/validateSession", + settings.get("EDU_SHARING_BASE_URL") + "rest/authentication/v1/validateSession", auth=HTTPBasicAuth( settings.get("EDU_SHARING_USERNAME"), settings.get("EDU_SHARING_PASSWORD"), @@ -700,22 +663,22 @@ def initApiClient(self): EduSharing.nodeApi = NODEV1Api(EduSharing.apiClient) about = EduSharing.aboutApi.about() EduSharing.version = list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0][ - "version"] + "version" + ] version_str = str(EduSharing.version["major"]) + "." + str(EduSharing.version["minor"]) - if EduSharing.version["major"] != 1 or EduSharing.version["minor"] < 0 or EduSharing.version[ - "minor"] > 1: - raise Exception( - f"Given repository api version is unsupported: " + version_str - ) + if ( + EduSharing.version["major"] != 1 + or EduSharing.version["minor"] < 0 + or EduSharing.version["minor"] > 1 + ): + raise Exception(f"Given repository api version is unsupported: " + version_str) else: logging.info("Detected edu-sharing bulk api with version " + version_str) if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is True: EduSharing.groupCache = list( map( lambda x: x["authorityName"], - EduSharing.iamApi.search_groups( - EduSharingConstants.HOME, "", max_items=1000000 - )["groups"], + EduSharing.iamApi.search_groups(EduSharingConstants.HOME, "", max_items=1000000)["groups"], ) ) logging.debug("Built up edu-sharing group cache: {}".format(EduSharing.groupCache)) @@ -745,10 +708,7 @@ def findItem(self, id, spider): try: response = EduSharing.bulkApi.find(properties) properties = response["node"]["properties"] - if ( - "ccm:replicationsourcehash" in properties - and "ccm:replicationsourceuuid" in properties - ): + if "ccm:replicationsourcehash" in properties and "ccm:replicationsourceuuid" in properties: return [ properties["ccm:replicationsourceuuid"][0], properties["ccm:replicationsourcehash"][0], From 69adfb770a370254575238dab1de26e9c1088215 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 13:16:07 +0200 Subject: [PATCH 314/590] fix: warnings from getHeaders()-method Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 96ef2e34..624d4424 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -116,11 +116,11 @@ def __init__(self): if self.enabled: self.initApiClient() - def getHeaders(self, contentType="application/json"): + def getHeaders(self, content_type: str | None = "application/json"): return { "COOKIE": EduSharing.cookie, "Accept": "application/json", - "Content-Type": contentType, + "Content-Type": content_type, } def syncNode(self, spider, type, properties): From cb1a4e7fb15268a6ad755419f2b0a675e5739465 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 13:41:17 +0200 Subject: [PATCH 315/590] style/change: follow PEP8 naming guidelines in es_connector.py Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 76 +++++++++++----------- converter/pipelines.py | 10 +-- converter/spiders/base_classes/lom_base.py | 4 +- converter/spiders/oersi_spider.py | 4 +- 4 files changed, 47 insertions(+), 47 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 624d4424..beac128f 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -78,7 +78,7 @@ def __getattribute__(self, name): def newfunc(*args, **kwargs): if time.time() - ESApiClient.lastRequestTime > ESApiClient.COOKIE_REBUILD_THRESHOLD: - EduSharing.initCookie() + EduSharing.init_cookie() self.cookie = EduSharing.cookie # store last request time @@ -114,16 +114,16 @@ def __init__(self): self.COOKIE_REBUILD_THRESHOLD = cookie_threshold self.enabled = env.get("MODE", default="edu-sharing") == "edu-sharing" if self.enabled: - self.initApiClient() + self.init_api_client() - def getHeaders(self, content_type: str | None = "application/json"): + def get_headers(self, content_type: str | None = "application/json"): return { "COOKIE": EduSharing.cookie, "Accept": "application/json", "Content-Type": content_type, } - def syncNode(self, spider, type, properties): + def sync_node(self, spider, type, properties): groupBy = [] if "ccm:replicationsourceorigin" in properties: groupBy = ["ccm:replicationsourceorigin"] @@ -148,14 +148,14 @@ def syncNode(self, spider, type, properties): raise e return response["node"] - def setNodeText(self, uuid, item) -> bool: + def set_node_text(self, uuid, item) -> bool: if "fulltext" in item: response = requests.post( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid + "/textContent?mimetype=text/plain", - headers=self.getHeaders("multipart/form-data"), + headers=self.get_headers("multipart/form-data"), data=item["fulltext"].encode("utf-8"), ) return response.status_code == 200 @@ -167,7 +167,7 @@ def setNodeText(self, uuid, item) -> bool: # print(e) # return False - def setPermissions(self, uuid, permissions) -> bool: + def set_permissions(self, uuid, permissions) -> bool: try: EduSharing.nodeApi.set_permission( repository=EduSharingConstants.HOME, @@ -180,7 +180,7 @@ def setPermissions(self, uuid, permissions) -> bool: except ApiException as e: return False - def setNodeBinaryData(self, uuid, item) -> bool: + def set_node_binary_data(self, uuid, item) -> bool: if "binary" in item: logging.info( get_project_settings().get("EDU_SHARING_BASE_URL") @@ -196,14 +196,14 @@ def setNodeBinaryData(self, uuid, item) -> bool: + uuid + "/content?mimetype=" + item["lom"]["technical"]["format"], - headers=self.getHeaders(None), + headers=self.get_headers(None), files=files, ) return response.status_code == 200 else: return False - def setNodePreview(self, uuid, item) -> bool: + def set_node_preview(self, uuid, item) -> bool: if "thumbnail" in item: key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None if key: @@ -214,14 +214,14 @@ def setNodePreview(self, uuid, item) -> bool: + uuid + "/preview?mimetype=" + item["thumbnail"]["mimetype"], - headers=self.getHeaders(None), + headers=self.get_headers(None), files=files, ) return response.status_code == 200 else: logging.warning("No thumbnail provided for " + uuid) - def mapLicense(self, spaces, license): + def map_license(self, spaces, license): if "url" in license: match license["url"]: # ToDo: refactor this ungodly method asap @@ -320,7 +320,7 @@ def mapLicense(self, spaces, license): if "expirationDate" in license: spaces["ccm:license_to"] = [license["expirationDate"].isoformat()] - def transformItem(self, uuid, spider, item): + def transform_item(self, uuid, spider, item): spaces = { "ccm:replicationsource": spider.name, "ccm:replicationsourceid": item["sourceId"], @@ -344,7 +344,7 @@ def transformItem(self, uuid, spider, item): if "origin" in item: spaces["ccm:replicationsourceorigin"] = item["origin"] # TODO currently not mapped in edu-sharing - self.mapLicense(spaces, item["license"]) + self.map_license(spaces, item["license"]) if "description" in item["lom"]["general"]: spaces["cclom:general_description"] = item["lom"]["general"]["description"] @@ -491,7 +491,7 @@ def transformItem(self, uuid, spider, item): return spaces - def createGroupsIfNotExists(self, groups, type: CreateGroupType): + def create_groups_if_not_exists(self, groups, type: CreateGroupType): for group in groups: if type == EduSharing.CreateGroupType.MediaCenter: uuid = EduSharingConstants.GROUP_PREFIX + EduSharingConstants.MEDIACENTER_PREFIX + group @@ -521,7 +521,7 @@ def createGroupsIfNotExists(self, groups, type: CreateGroupType): result = EduSharing.iamApi.create_group(repository=EduSharingConstants.HOME, group=group, body={}) EduSharing.groupCache.append(result["authorityName"]) - def setNodePermissions(self, uuid, item): + def set_node_permissions(self, uuid, item): if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is False: logging.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") return @@ -557,7 +557,7 @@ def setNodePermissions(self, uuid, item): mergedGroups = [] if "groups" in item["permissions"]: if "autoCreateGroups" in item["permissions"] and item["permissions"]["autoCreateGroups"] is True: - self.createGroupsIfNotExists( + self.create_groups_if_not_exists( item["permissions"]["groups"], EduSharing.CreateGroupType.Regular, ) @@ -572,7 +572,7 @@ def setNodePermissions(self, uuid, item): "autoCreateMediacenters" in item["permissions"] and item["permissions"]["autoCreateMediacenters"] is True ): - self.createGroupsIfNotExists( + self.create_groups_if_not_exists( item["permissions"]["mediacenters"], EduSharing.CreateGroupType.MediaCenter, ) @@ -597,24 +597,24 @@ def setNodePermissions(self, uuid, item): ], } ) - if not self.setPermissions(uuid, permissions): + if not self.set_permissions(uuid, permissions): logging.error( "Failed to set permissions, please check that the given groups/mediacenters are existing in the repository or set the autoCreate mode to true" ) logging.error(item["permissions"]) - def insertItem(self, spider, uuid, item): - node = self.syncNode(spider, "ccm:io", self.transformItem(uuid, spider, item)) - self.setNodePermissions(node["ref"]["id"], item) - self.setNodePreview(node["ref"]["id"], item) - if not self.setNodeBinaryData(node["ref"]["id"], item): - self.setNodeText(node["ref"]["id"], item) + def insert_item(self, spider, uuid, item): + node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) + self.set_node_permissions(node["ref"]["id"], item) + self.set_node_preview(node["ref"]["id"], item) + if not self.set_node_binary_data(node["ref"]["id"], item): + self.set_node_text(node["ref"]["id"], item) - def updateItem(self, spider, uuid, item): - self.insertItem(spider, uuid, item) + def update_item(self, spider, uuid, item): + self.insert_item(spider, uuid, item) @staticmethod - def initCookie(): + def init_cookie(): logging.debug("Init edu sharing cookie...") settings = get_project_settings() auth = requests.get( @@ -634,10 +634,10 @@ def initCookie(): EduSharing.cookie = ";".join(cookies) return auth - def initApiClient(self): + def init_api_client(self): if EduSharing.cookie is None: settings = get_project_settings() - auth = self.initCookie() + auth = self.init_cookie() isAdmin = json.loads(auth.text)["isAdmin"] if isAdmin: configuration = Configuration() @@ -692,13 +692,13 @@ def initApiClient(self): ) @staticmethod - def buildUUID(url): + def build_uuid(url): return str(uuid.uuid5(uuid.NAMESPACE_URL, url)) - def uuidExists(self, uuid): + def uuid_exists(self, uuid): return False - def findItem(self, id, spider): + def find_item(self, id, spider): if not self.enabled: return None properties = { @@ -717,23 +717,23 @@ def findItem(self, id, spider): if e.status == 401: # Typically happens when the edu-sharing session cookie is lost and needs to be renegotiated. # (edu-sharing error-message: "Admin rights are required for this endpoint") - logging.info(f"ES_CONNECTOR - findItem: edu-sharing returned HTTP-statuscode 401.") + logging.info(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode 401.") logging.debug(f"(HTTP-Body: '{e.body}')") logging.debug(f"Reason: {e.reason}") logging.debug(f"HTTP Headers: {e.headers}") logging.info(f"ES_CONNECTOR: Re-initializing edu-sharing API Client...") - self.initApiClient() + self.init_api_client() if e.status == 404: - logging.debug(f"ES_CONNECTOR - findItem: edu-sharing returned HTTP-statuscode 404.") + logging.debug(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode 404.") pass else: raise e return None - def findSource(self, spider): + def find_source(self, spider): return True - def createSource(self, spider): + def create_source(self, spider): # src = self.createNode(EduSharing.etlFolder['ref']['id'], 'ccm:map', {'cm:name' : [spider.name]}) # EduSharing.spiderNodes[spider.name] = src # return src diff --git a/converter/pipelines.py b/converter/pipelines.py index 70ccb593..50f5f120 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -486,11 +486,11 @@ def process_item(self, raw_item, spider): item["hash"] = time.time() # @TODO: May this can be done only once? - if self.findSource(spider) is None: + if self.find_source(spider) is None: log.info("create new source " + spider.name) - self.createSource(spider) + self.create_source(spider) - db_item = self.findItem(item["sourceId"], spider) + db_item = self.find_item(item["sourceId"], spider) if db_item: if item["hash"] != db_item[1]: log.debug("hash has changed, continuing pipelines") @@ -594,8 +594,8 @@ def process_item(self, raw_item, spider): title = "" if "title" in item["lom"]["general"]: title = str(item["lom"]["general"]["title"]) - entryUUID = EduSharing.buildUUID(item["response"]["url"] if "url" in item["response"] else item["hash"]) - self.insertItem(spider, entryUUID, item) + entryUUID = EduSharing.build_uuid(item["response"]["url"] if "url" in item["response"] else item["hash"]) + self.insert_item(spider, entryUUID, item) logging.info("item " + entryUUID + " inserted/updated") # @TODO: We may need to handle Collections diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index cbc15529..545e05a0 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -62,7 +62,7 @@ def getUri(self, response=None) -> str: return response.url def getUUID(self, response=None) -> str: - return EduSharing.buildUUID(self.getUri(response)) + return EduSharing.build_uuid(self.getUri(response)) def hasChanged(self, response=None) -> bool: if self.forceUpdate: @@ -77,7 +77,7 @@ def hasChanged(self, response=None) -> bool: logging.info(f"matching requested id: {self.remoteId}") return True return False - db = EduSharing().findItem(self.getId(response), self) + db = EduSharing().find_item(self.getId(response), self) changed = db is None or db[1] != self.getHash(response) if not changed: logging.info(f"Item {self.getId(response)} (uuid: {db[0]}) has not changed") diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index dfdfb5b2..3f3c5e0a 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -344,7 +344,7 @@ def get_uuid(elastic_item: dict): # The "getUUID"-method of LomBase couldn't be cleanly overridden because at the point of time when we do this # check, there is no "Response"-object available yet. item_url = OersiSpider.get_item_url(elastic_item=elastic_item) - return EduSharing.buildUUID(item_url) + return EduSharing.build_uuid(item_url) @staticmethod def get_item_url(elastic_item) -> str: @@ -380,7 +380,7 @@ def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: logging.info(f"matching requested id: {self.remoteId}") return True return False - db = EduSharing().findItem(self.getId(response, elastic_item=elastic_item), self) + db = EduSharing().find_item(self.getId(response, elastic_item=elastic_item), self) changed = db is None or db[1] != self.getHash(response, elastic_item_source=elastic_item["_source"]) if not changed: logging.info(f"Item {self.getId(response, elastic_item=elastic_item)} (uuid: {db[0]}) has not changed") From 7cd2a95d56398df15ca08a5a4334078ce949dafa Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 15:27:34 +0200 Subject: [PATCH 316/590] logging: increase verbosity of debug messages --- converter/es_connector.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index beac128f..9a02f373 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -714,17 +714,21 @@ def find_item(self, id, spider): properties["ccm:replicationsourcehash"][0], ] except ApiException as e: + # ToDo: + # - find a way to handle statuscode 503 ("Service Temporarily Unavailable") gracefully? if e.status == 401: # Typically happens when the edu-sharing session cookie is lost and needs to be renegotiated. # (edu-sharing error-message: "Admin rights are required for this endpoint") - logging.info(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode 401.") - logging.debug(f"(HTTP-Body: '{e.body}')") - logging.debug(f"Reason: {e.reason}") - logging.debug(f"HTTP Headers: {e.headers}") + logging.info(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} for (replicationsourceid '{id}').") + logging.debug(f"(HTTP-Body: '{e.body}\n')" + f"Reason: {e.reason}\n" + f"HTTP Headers: {e.headers}") logging.info(f"ES_CONNECTOR: Re-initializing edu-sharing API Client...") self.init_api_client() if e.status == 404: - logging.debug(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode 404.") + logging.debug(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} (replicationsourceid '{id}') :\n" + f"HTTP Body: {e.body}\n" + f"HTTP Header: {e.headers}") pass else: raise e From fd2e48847a935715619bf2ba654f95511e32adfa Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 18:54:38 +0200 Subject: [PATCH 317/590] logging: dump edu-sharing ApiException response as string if ApiException 'body'-attribute isn't a deserializable JSON String - when encountering a '400'-response from edu-sharing during the sync_node method, the logging module failed to produce helpful logging messages because the API response could not be parsed as a JSON Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 41 ++++++++++++++++++++++++++------------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 9a02f373..32c900c0 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -139,12 +139,20 @@ def sync_node(self, spider, type, properties): except ApiException as e: # ToDo: # - error-handling for code 500 ("java.util.concurrent.TimeoutException") - jsonError = json.loads(e.body) - if jsonError["error"] == "java.lang.IllegalStateException": - logging.warning( - "Node '" + properties["cm:name"][0] + "' probably blocked for sync: " + jsonError["message"] + try: + json_error: dict = json.loads(e.body) + if json_error["error"] == "java.lang.IllegalStateException": + logging.warning( + "Node '" + properties["cm:name"][0] + "' probably blocked for sync: " + json_error["message"] + ) + return None + except json.JSONDecodeError: + logging.error( + f"ES_CONNECTOR: edu-sharing ApiException 'body'-attribute was't a deserializable JSON " + f"String for item '{properties['cm:name'][0]}' " + f"(replicationsourceid: '{properties['ccm:replicationsourceid']}'). " + f'edu-sharing returned the following exception:\n"{e.body}"' ) - return None raise e return response["node"] @@ -719,17 +727,22 @@ def find_item(self, id, spider): if e.status == 401: # Typically happens when the edu-sharing session cookie is lost and needs to be renegotiated. # (edu-sharing error-message: "Admin rights are required for this endpoint") - logging.info(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} for (replicationsourceid '{id}').") - logging.debug(f"(HTTP-Body: '{e.body}\n')" - f"Reason: {e.reason}\n" - f"HTTP Headers: {e.headers}") - logging.info(f"ES_CONNECTOR: Re-initializing edu-sharing API Client...") + logging.info( + f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} for (replicationsourceid " + f"'{id}')." + ) + logging.debug(f"(HTTP-Body: '{e.body}\n')" f"Reason: {e.reason}\n" f"HTTP Headers: {e.headers}") + logging.info("ES_CONNECTOR: Re-initializing edu-sharing API Client...") self.init_api_client() + return None if e.status == 404: - logging.debug(f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} (replicationsourceid '{id}') :\n" - f"HTTP Body: {e.body}\n" - f"HTTP Header: {e.headers}") - pass + logging.debug( + f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} (replicationsourceid " + f"'{id}'):\n" + f"HTTP Body: {e.body}\n" + f"HTTP Header: {e.headers}" + ) + return None else: raise e return None From 78e36928017da6e28e0a158bf33e87d62fcb5057 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 19:26:19 +0200 Subject: [PATCH 318/590] fix: LisumPipeline LRT mapping (nested lists) - LisumPipeline's LRT_OEH_TO_LISUM Mapping produced nested lists in "oeh_spider" 'valuespaces.learningResourceType'-field, which could not be properly deserialized by edu-sharing -- added additional checks before appending/extending the temporary LRT lists Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 50f5f120..ccab1ee2 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -902,11 +902,12 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy lrt_w3id: str = lrt_item.split(sep='/')[-1] if lrt_w3id in self.LRT_OEH_TO_LISUM: lrt_w3id = self.LRT_OEH_TO_LISUM.get(lrt_w3id) - if lrt_w3id: - # ToDo: workaround + if lrt_w3id and type(lrt_w3id) is str: # making sure to exclude '' strings from populating the list lrt_temporary_list.append(lrt_w3id) - lrt_list = lrt_temporary_list + elif lrt_w3id and type(lrt_w3id) is list: + lrt_temporary_list.extend(lrt_w3id) + lrt_list = list(set(lrt_temporary_list)) # after everything is mapped, we're saving the (updated) list back to our LRT: valuespaces["learningResourceType"] = lrt_list From 8f038c4f54c3c43e00e6d8e153054c2783146551 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 20 Jul 2023 19:28:27 +0200 Subject: [PATCH 319/590] fix: edu_sharing_base "ccm:oeh_quality_login"-fallback - fix match-case detection for "ccm:conditionsOfAccess"-fallback (for items which only have "ccm:oeh_quality_login"-values) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/edu_sharing_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 49da62e8..42243b21 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -268,15 +268,15 @@ def getValuespaces(self, response): # this fallback will lose metadata in the long run since the "conditionsOfAccess"-Vocab has 3 values, while # "ccm:oeh_quality_login" returns only binary string values: # - "0": login required - # - "1": no login required + # - "1": no login necessary oeh_quality_login_value: list = self.getProperty("ccm:oeh_quality_login", response) if oeh_quality_login_value: oeh_quality_login_value: str = oeh_quality_login_value[0] match oeh_quality_login_value: + case "0": + valuespaces.add_value("conditionsOfAccess", "login") case "1": valuespaces.add_value("conditionsOfAccess", "no_login") - case "2": - valuespaces.add_value("conditionsOfAccess", "login") case _: logging.warning(f"edu-sharing property 'ccm:oeh_quality_login' returned an unexpected value: " f"{oeh_quality_login_value} for node-ID {response.meta['item']['ref']['id']}") From 4bbf23591aa4daaced0443f0799f6ee46451e4c6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 1 Aug 2023 12:25:52 +0200 Subject: [PATCH 320/590] chore: update dependencies - "overrides"-package cannot be updated to the latest version without breaking changes -- ToDo: revisit at a later time Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- requirements.txt | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3c10cdd5..925c5f7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,28 +1,28 @@ -wheel==0.38.4 +wheel==0.41.0 image -dateparser==1.1.6 +dateparser==1.1.8 isodate==0.6.1 html2text~=2020.1.16 -scrapy-splash==0.8.0 +scrapy-splash==0.9.0 python-dateutil==2.8.2 -python-dotenv==0.20.0 -Scrapy==2.6.3 -requests==2.28.2 +python-dotenv==1.0.0 +Scrapy==2.9.0 +requests==2.31.0 vobject==0.9.6.1 -xmltodict~=0.12.0 +xmltodict==0.13.0 overrides==3.1.0 -jmespath==1.0.0 -flake8==6.0.0 -pytest==7.2.1 -extruct~=0.13.0 -lxml==4.9.2 -w3lib~=1.22.0 -itemloaders~=1.0.4 -Pillow==9.4.0 -itemadapter==0.5.0 +jmespath==1.0.1 +flake8==6.1.0 +pytest==7.4.0 +extruct==0.16.0 +lxml==4.9.3 +w3lib==2.1.1 +itemloaders==1.1.0 +Pillow==10.0.0 +itemadapter==0.8.0 six==1.16.0 -certifi==2022.12.7 -urllib3~=1.26.09 -playwright==1.30.0 -pyOpenSSL==22.1.0 -black==23.3.0 \ No newline at end of file +certifi==2023.7.22 +urllib3==2.0.4 +playwright==1.36.0 +pyOpenSSL==23.2.0 +black==23.7.0 \ No newline at end of file From b8b5e8ff7bee700023d232a49139533d6c52eac9 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 1 Aug 2023 15:22:17 +0200 Subject: [PATCH 321/590] change: pyproject.toml (replace setuptools with poetry) - the old pyproject.toml was neither used nor maintained, therefore implementing poetry to facilitate easier maintenance and project onboarding in the future - remove: tox (will use pytest instead) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- pyproject.toml | 84 ++++++++++++++++++++++++++++++-------------------- 1 file changed, 51 insertions(+), 33 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 74b187e4..b7518c14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,13 @@ [project] -description = "crawls educational sites for use in wirlernenonline.de" +description = "Crawls educational sites for use in wirlernenonline.de" authors = [ - "torsten simon " + "Torsten Simon " ] maintainers = [ - + "Andreas Schnäpp <981166+Criamos@users.noreply.github.com>" ] -license = "Proprietary" readme = "README.md" -python = "^3.9" +python = "^3.10" homepage = "https://github.com/openeduhub/oeh-search-etl" repository = "https://github.com/openeduhub/oeh-search-etl" documentation = "https://github.com/openeduhub/oeh-search-etl" @@ -18,28 +17,14 @@ keywords = ["metadata", "oer", "crawl", " wirlernenonline"] classifiers = [ "Framework :: Scrapy", "Development Status :: 4 - Beta", - "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Education :: Testing", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", ] -# Requirements -[dependencies] -Click = "^7.0" - -[dev-dependencies] -black = { version = "^18.3-alpha.0", python = "^3.6" } - -[scripts] -poetry = "infer_pyproject.cli:main" - -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" - [tool.black] line-length = 88 -target-version = ['py39'] +target-version = ['py310'] include = '\.pyi?$' exclude = ''' @@ -61,17 +46,50 @@ exclude = ''' ) ''' -[tool.tox] -legacy_tox_ini = """ -[tox] -envlist = py39 -skipsdist=True +[tool.poetry] +name = "oeh-search-etl" +version = "2023.08.01" +description = "Crawls educational sites for use in WirLernenOnline.de" +authors = ["Torsten Simon "] +maintainers = [ + "Andreas Schnäpp <981166+Criamos@users.noreply.github.com>" +] +readme = "Readme.md" +packages = [{include = "converter"}] -[testenv] -deps = - pytest - flake8 +[tool.poetry.dependencies] +python = "^3.10" +wheel = "0.41.0" +black = "^23.7.0" +certifi="2023.7.22" +dateparser="1.1.8" +extruct="0.16.0" +html2text="2020.1.16" +jmespath="1.0.1" +image = "1.5.33" +itemadapter="0.8.0" +itemloaders="1.1.0" +isodate="0.6.1" +lxml="4.9.3" +overrides="3.1.0" +Pillow="10.0.0" +playwright="1.36.0" +pyOpenSSL="23.2.0" +pytest="7.4.0" +python-dateutil="2.8.2" +python-dotenv="1.0.0" +requests="2.31.0" +six="1.16.0" +Scrapy="2.9.0" +scrapy-splash="0.9.0" +urllib3="2.0.4" +vobject="0.9.6.1" +w3lib="2.1.1" +xmltodict="0.13.0" -commands = - scrapy check -""" \ No newline at end of file +[tool.poetry.group.dev.dependencies] +flake8 = "^6.1.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" From 6d56a0b43102f26e959bbd884f81e6a66e77a525 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 1 Aug 2023 15:59:31 +0200 Subject: [PATCH 322/590] add poetry.lock and update requirements.txt (exported with poetry) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- poetry.lock | 1815 ++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 111 ++- 2 files changed, 1898 insertions(+), 28 deletions(-) create mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..c06cd92f --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1815 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "asgiref" +version = "3.7.2" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, + {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "automat" +version = "22.10.0" +description = "Self-service finite-state machines for the programmer on the go." +optional = false +python-versions = "*" +files = [ + {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, + {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +six = "*" + +[package.extras] +visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "23.7.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "click" +version = "8.1.6" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "constantly" +version = "15.1.0" +description = "Symbolic constants in Python" +optional = false +python-versions = "*" +files = [ + {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, + {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, +] + +[[package]] +name = "cryptography" +version = "41.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cssselect" +version = "1.2.0" +description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, + {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, +] + +[[package]] +name = "dateparser" +version = "1.1.8" +description = "Date parsing library designed to parse dates from HTML pages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, + {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = "*" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27" +tzlocal = "*" + +[package.extras] +calendars = ["convertdate", "hijri-converter"] +fasttext = ["fasttext"] +langdetect = ["langdetect"] + +[[package]] +name = "django" +version = "4.2.3" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Django-4.2.3-py3-none-any.whl", hash = "sha256:f7c7852a5ac5a3da5a8d5b35cc6168f31b605971441798dac845f17ca8028039"}, + {file = "Django-4.2.3.tar.gz", hash = "sha256:45a747e1c5b3d6df1b141b1481e193b033fd1fdbda3ff52677dc81afdaacbaed"}, +] + +[package.dependencies] +asgiref = ">=3.6.0,<4" +sqlparse = ">=0.3.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "exceptiongroup" +version = "1.1.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "extruct" +version = "0.16.0" +description = "Extract embedded metadata from HTML markup" +optional = false +python-versions = "*" +files = [ + {file = "extruct-0.16.0-py2.py3-none-any.whl", hash = "sha256:2499ea9e7d22744745ca708acee9542a4aa231871620c4f65f869a1286e64aa8"}, + {file = "extruct-0.16.0.tar.gz", hash = "sha256:d09cb3d86d149a276b277b3bd45b2b867ef3ec78bed9cd58ee0f2ae01ae670c4"}, +] + +[package.dependencies] +html-text = ">=0.5.1" +jstyleson = "*" +lxml = "*" +mf2py = "*" +pyrdfa3 = "*" +rdflib = {version = ">=6.0.0", markers = "python_version >= \"3.7\""} +six = "*" +w3lib = "*" + +[package.extras] +cli = ["requests"] + +[[package]] +name = "filelock" +version = "3.12.2" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.7" +files = [ + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, +] + +[package.extras] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] + +[[package]] +name = "html-text" +version = "0.5.2" +description = "Extract text from HTML" +optional = false +python-versions = "*" +files = [ + {file = "html_text-0.5.2-py2.py3-none-any.whl", hash = "sha256:3f1e063f05eddf3e099a88f0440219c55fdc01c44f1291fe59c66e5228d7fc56"}, + {file = "html_text-0.5.2.tar.gz", hash = "sha256:afd61bbb70651d494a8c32670a29b9140492eccc9690109857beae41c3093ded"}, +] + +[package.dependencies] +lxml = "*" + +[[package]] +name = "html2text" +version = "2020.1.16" +description = "Turn HTML into equivalent Markdown-structured text." +optional = false +python-versions = ">=3.5" +files = [ + {file = "html2text-2020.1.16-py3-none-any.whl", hash = "sha256:c7c629882da0cf377d66f073329ccf34a12ed2adf0169b9285ae4e63ef54c82b"}, + {file = "html2text-2020.1.16.tar.gz", hash = "sha256:e296318e16b059ddb97f7a8a1d6a5c1d7af4544049a01e261731d2d5cc277bbb"}, +] + +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["chardet (>=2.2)", "genshi", "lxml"] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml"] + +[[package]] +name = "hyperlink" +version = "21.0.0" +description = "A featureful, immutable, and correct URL for Python." +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, + {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, +] + +[package.dependencies] +idna = ">=2.5" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "image" +version = "1.5.33" +description = "Django application that provides cropping, resizing, thumbnailing, overlays and masking for images and videos with the ability to set the center of attention," +optional = false +python-versions = "*" +files = [ + {file = "image-1.5.33.tar.gz", hash = "sha256:baa2e09178277daa50f22fd6d1d51ec78f19c12688921cb9ab5808743f097126"}, +] + +[package.dependencies] +django = "*" +pillow = "*" +six = "*" + +[[package]] +name = "incremental" +version = "22.10.0" +description = "\"A small library that versions your Python projects.\"" +optional = false +python-versions = "*" +files = [ + {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, + {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, +] + +[package.extras] +mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] +scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "itemadapter" +version = "0.8.0" +description = "Common interface for data container classes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "itemadapter-0.8.0-py3-none-any.whl", hash = "sha256:2ac1fbcc363b789a18639935ca322e50a65a0a7dfdd8d973c34e2c468e6c0f94"}, + {file = "itemadapter-0.8.0.tar.gz", hash = "sha256:77758485fb0ac10730d4b131363e37d65cb8db2450bfec7a57c3f3271f4a48a9"}, +] + +[[package]] +name = "itemloaders" +version = "1.1.0" +description = "Base library for scrapy's ItemLoader" +optional = false +python-versions = ">=3.7" +files = [ + {file = "itemloaders-1.1.0-py3-none-any.whl", hash = "sha256:c8c82fe0c11fc4cdd08ec04df0b3c43f3cb7190002edb517e02d55de8efc2aeb"}, + {file = "itemloaders-1.1.0.tar.gz", hash = "sha256:21d81c61da6a08b48e5996288cdf3031c0f92e5d0075920a0242527523e14a48"}, +] + +[package.dependencies] +itemadapter = ">=0.1.0" +jmespath = ">=0.9.5" +parsel = ">=1.5.0" +w3lib = ">=1.17.0" + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jstyleson" +version = "0.0.2" +description = "Library to parse JSON with js-style comments." +optional = false +python-versions = "*" +files = [ + {file = "jstyleson-0.0.2.tar.gz", hash = "sha256:680003f3b15a2959e4e6a351f3b858e3c07dd3e073a0d54954e34d8ea5e1308e"}, +] + +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mf2py" +version = "1.1.3" +description = "Python Microformats2 parser" +optional = false +python-versions = ">=2.7" +files = [ + {file = "mf2py-1.1.3-py3-none-any.whl", hash = "sha256:8f9e2c147beadd56f8839644124c7d141d96e879319b9f50d02826c88766bf4d"}, + {file = "mf2py-1.1.3.tar.gz", hash = "sha256:4241e91ed4b644dd666d9fbd2557ed86e5bb7399c196026f7b0a1f413b33f59f"}, +] + +[package.dependencies] +BeautifulSoup4 = ">=4.6.0" +html5lib = ">=1.0.1" +requests = ">=2.18.4" + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "overrides" +version = "3.1.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = "*" +files = [ + {file = "overrides-3.1.0.tar.gz", hash = "sha256:30f761124579e59884b018758c4d7794914ef02a6c038621123fec49ea7599c6"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "parsel" +version = "1.8.1" +description = "Parsel is a library to extract data from HTML and XML using XPath and CSS selectors" +optional = false +python-versions = ">=3.7" +files = [ + {file = "parsel-1.8.1-py2.py3-none-any.whl", hash = "sha256:2708fc74daeeb4ce471e2c2e9089b650ec940c7a218053e57421e69b5b00f82c"}, + {file = "parsel-1.8.1.tar.gz", hash = "sha256:aff28e68c9b3f1a901db2a4e3f158d8480a38724d7328ee751c1a4e1c1801e39"}, +] + +[package.dependencies] +cssselect = ">=0.9" +jmespath = "*" +lxml = "*" +packaging = "*" +w3lib = ">=1.19.0" + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pillow" +version = "10.0.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Pillow-10.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f62406a884ae75fb2f818694469519fb685cc7eaff05d3451a9ebe55c646891"}, + {file = "Pillow-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5db32e2a6ccbb3d34d87c87b432959e0db29755727afb37290e10f6e8e62614"}, + {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf4392b77bdc81f36e92d3a07a5cd072f90253197f4a52a55a8cec48a12483b"}, + {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:520f2a520dc040512699f20fa1c363eed506e94248d71f85412b625026f6142c"}, + {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:8c11160913e3dd06c8ffdb5f233a4f254cb449f4dfc0f8f4549eda9e542c93d1"}, + {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a74ba0c356aaa3bb8e3eb79606a87669e7ec6444be352870623025d75a14a2bf"}, + {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d0dae4cfd56969d23d94dc8e89fb6a217be461c69090768227beb8ed28c0a3"}, + {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22c10cc517668d44b211717fd9775799ccec4124b9a7f7b3635fc5386e584992"}, + {file = "Pillow-10.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dffe31a7f47b603318c609f378ebcd57f1554a3a6a8effbc59c3c69f804296de"}, + {file = "Pillow-10.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:9fb218c8a12e51d7ead2a7c9e101a04982237d4855716af2e9499306728fb485"}, + {file = "Pillow-10.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d35e3c8d9b1268cbf5d3670285feb3528f6680420eafe35cccc686b73c1e330f"}, + {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ed64f9ca2f0a95411e88a4efbd7a29e5ce2cea36072c53dd9d26d9c76f753b3"}, + {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6eb5502f45a60a3f411c63187db83a3d3107887ad0d036c13ce836f8a36f1d"}, + {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c1fbe7621c167ecaa38ad29643d77a9ce7311583761abf7836e1510c580bf3dd"}, + {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cd25d2a9d2b36fcb318882481367956d2cf91329f6892fe5d385c346c0649629"}, + {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, + {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, + {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, + {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, + {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, + {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, + {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, + {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce543ed15570eedbb85df19b0a1a7314a9c8141a36ce089c0a894adbfccb4568"}, + {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:685ac03cc4ed5ebc15ad5c23bc555d68a87777586d970c2c3e216619a5476223"}, + {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d72e2ecc68a942e8cf9739619b7f408cc7b272b279b56b2c83c6123fcfa5cdff"}, + {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, + {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, + {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, + {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, + {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, + {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, + {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, + {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f07ea8d2f827d7d2a49ecf1639ec02d75ffd1b88dcc5b3a61bbb37a8759ad8d"}, + {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:040586f7d37b34547153fa383f7f9aed68b738992380ac911447bb78f2abe530"}, + {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f88a0b92277de8e3ca715a0d79d68dc82807457dae3ab8699c758f07c20b3c51"}, + {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c7cf14a27b0d6adfaebb3ae4153f1e516df54e47e42dcc073d7b3d76111a8d86"}, + {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3400aae60685b06bb96f99a21e1ada7bc7a413d5f49bce739828ecd9391bb8f7"}, + {file = "Pillow-10.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbc02381779d412145331789b40cc7b11fdf449e5d94f6bc0b080db0a56ea3f0"}, + {file = "Pillow-10.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9211e7ad69d7c9401cfc0e23d49b69ca65ddd898976d660a2fa5904e3d7a9baa"}, + {file = "Pillow-10.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:faaf07ea35355b01a35cb442dd950d8f1bb5b040a7787791a535de13db15ed90"}, + {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f72a021fbb792ce98306ffb0c348b3c9cb967dce0f12a49aa4c3d3fdefa967"}, + {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f7c16705f44e0504a3a2a14197c1f0b32a95731d251777dcb060aa83022cb2d"}, + {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:76edb0a1fa2b4745fb0c99fb9fb98f8b180a1bbceb8be49b087e0b21867e77d3"}, + {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:368ab3dfb5f49e312231b6f27b8820c823652b7cd29cfbd34090565a015e99ba"}, + {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:608bfdee0d57cf297d32bcbb3c728dc1da0907519d1784962c5f0c68bb93e5a3"}, + {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c6e3df6bdd396749bafd45314871b3d0af81ff935b2d188385e970052091017"}, + {file = "Pillow-10.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:7be600823e4c8631b74e4a0d38384c73f680e6105a7d3c6824fcf226c178c7e6"}, + {file = "Pillow-10.0.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:92be919bbc9f7d09f7ae343c38f5bb21c973d2576c1d45600fce4b74bafa7ac0"}, + {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8182b523b2289f7c415f589118228d30ac8c355baa2f3194ced084dac2dbba"}, + {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:38250a349b6b390ee6047a62c086d3817ac69022c127f8a5dc058c31ccef17f3"}, + {file = "Pillow-10.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88af2003543cc40c80f6fca01411892ec52b11021b3dc22ec3bc9d5afd1c5334"}, + {file = "Pillow-10.0.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c189af0545965fa8d3b9613cfdb0cd37f9d71349e0f7750e1fd704648d475ed2"}, + {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7b031a6fc11365970e6a5686d7ba8c63e4c1cf1ea143811acbb524295eabed"}, + {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db24668940f82321e746773a4bc617bfac06ec831e5c88b643f91f122a785684"}, + {file = "Pillow-10.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:efe8c0681042536e0d06c11f48cebe759707c9e9abf880ee213541c5b46c5bf3"}, + {file = "Pillow-10.0.0.tar.gz", hash = "sha256:9c82b5b3e043c7af0d95792d0d20ccf68f61a1fec6b3530e718b688422727396"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "playwright" +version = "1.36.0" +description = "A high-level API to automate web browsers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "playwright-1.36.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b7c6ddfca2b141b0385387cc56c125b14ea867902c39e3fc650ddd6c429b17da"}, + {file = "playwright-1.36.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:428a719a6c7e40781c19860ed813840ac2d63678f7587abe12e800ea030d4b7e"}, + {file = "playwright-1.36.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:4e396853034742b76654cdab27422155d238f46e4dc6369ea75854fafb935586"}, + {file = "playwright-1.36.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:72e80076e595f5fcd8ebd89bf6635ad78e4bafa633119faed8b2568d17dbd398"}, + {file = "playwright-1.36.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffbb927679b62fad5071439d5fe0840af46ad1844bc44bf80e1a0ad706140c98"}, + {file = "playwright-1.36.0-py3-none-win32.whl", hash = "sha256:84213339f179fd2a70f77ea7faea0616d74871349d556c53a1ecb7dd5097973c"}, + {file = "playwright-1.36.0-py3-none-win_amd64.whl", hash = "sha256:89ca2261bb00b67d3dff97691cf18f4347ee0529a11e431e47df67b703d4d8fa"}, +] + +[package.dependencies] +greenlet = "2.0.2" +pyee = "9.0.4" + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "protego" +version = "0.2.1" +description = "Pure-Python robots.txt parser with support for modern conventions" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "Protego-0.2.1-py2.py3-none-any.whl", hash = "sha256:04419b18f20e8909f1691c6b678392988271cc2a324a72f9663cb3af838b4bf7"}, + {file = "Protego-0.2.1.tar.gz", hash = "sha256:df666d4304dab774e2dc9feb208bb1ac8d71ea5ceec12f4c99eba30fbd642ff2"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "pyasn1" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pycodestyle" +version = "2.11.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydispatcher" +version = "2.0.7" +description = "Multi-producer multi-consumer in-memory signal dispatch system" +optional = false +python-versions = "*" +files = [ + {file = "PyDispatcher-2.0.7-py3-none-any.whl", hash = "sha256:96543bea04115ffde08f851e1d45cacbfd1ee866ac42127d9b476dc5aefa7de0"}, + {file = "PyDispatcher-2.0.7.tar.gz", hash = "sha256:b777c6ad080dc1bad74a4c29d6a46914fa6701ac70f94b0d66fbcfde62f5be31"}, +] + +[package.extras] +dev = ["tox"] + +[[package]] +name = "pyee" +version = "9.0.4" +description = "A port of node.js's EventEmitter to python." +optional = false +python-versions = "*" +files = [ + {file = "pyee-9.0.4-py2.py3-none-any.whl", hash = "sha256:9f066570130c554e9cc12de5a9d86f57c7ee47fece163bbdaa3e9c933cfbdfa5"}, + {file = "pyee-9.0.4.tar.gz", hash = "sha256:2770c4928abc721f46b705e6a72b0c59480c4a69c9a83ca0b00bb994f1ea4b32"}, +] + +[package.dependencies] +typing-extensions = "*" + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pyopenssl" +version = "23.2.0" +description = "Python wrapper module around the OpenSSL library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, + {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, +] + +[package.dependencies] +cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" + +[package.extras] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] +test = ["flaky", "pretend", "pytest (>=3.0.1)"] + +[[package]] +name = "pyparsing" +version = "3.1.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pypydispatcher" +version = "2.1.2" +description = "Multi-producer-multi-consumer signal dispatching mechanism" +optional = false +python-versions = "*" +files = [ + {file = "PyPyDispatcher-2.1.2.tar.gz", hash = "sha256:b6bec5dfcff9d2535bca2b23c80eae367b1ac250a645106948d315fcfa9130f2"}, +] + +[[package]] +name = "pyrdfa3" +version = "3.5.3" +description = "pyRdfa Libray" +optional = false +python-versions = "*" +files = [ + {file = "pyRdfa3-3.5.3-py3-none-any.whl", hash = "sha256:4da7ed49e8f524b573ed67e4f7bc7f403bff3be00546d7438fe263c924a91ccf"}, + {file = "pyRdfa3-3.5.3.tar.gz", hash = "sha256:157663a92b87df345b6f69bde235dff5f797891608e12fe1e4fa8dad687131ae"}, +] + +[package.dependencies] +html5lib = "*" +rdflib = "*" + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2023.3" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] + +[[package]] +name = "queuelib" +version = "1.6.2" +description = "Collection of persistent (disk-based) and non-persistent (memory-based) queues" +optional = false +python-versions = ">=3.5" +files = [ + {file = "queuelib-1.6.2-py2.py3-none-any.whl", hash = "sha256:4b96d48f650a814c6fb2fd11b968f9c46178b683aad96d68f930fe13a8574d19"}, + {file = "queuelib-1.6.2.tar.gz", hash = "sha256:4b207267f2642a8699a1f806045c56eb7ad1a85a10c0e249884580d139c2fcd2"}, +] + +[[package]] +name = "rdflib" +version = "6.3.2" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, + {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, +] + +[package.dependencies] +isodate = ">=0.6.0,<0.7.0" +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5lib (>=1.0,<2.0)"] +lxml = ["lxml (>=4.3.0,<5.0.0)"] +networkx = ["networkx (>=2.0.0,<3.0.0)"] + +[[package]] +name = "regex" +version = "2023.6.3" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.6" +files = [ + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-file" +version = "1.5.1" +description = "File transport adapter for Requests" +optional = false +python-versions = "*" +files = [ + {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, + {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, +] + +[package.dependencies] +requests = ">=1.0.0" +six = "*" + +[[package]] +name = "scrapy" +version = "2.9.0" +description = "A high-level Web Crawling and Web Scraping framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Scrapy-2.9.0-py2.py3-none-any.whl", hash = "sha256:908fdb7874d235230a16fa288637e3f673813cf27fb177f589b5a22bad00b0f9"}, + {file = "Scrapy-2.9.0.tar.gz", hash = "sha256:564c972b56e54b83141f395ce3f6a25bfe2093d61d13f9b81d05384e19db98da"}, +] + +[package.dependencies] +cryptography = ">=3.4.6" +cssselect = ">=0.9.1" +itemadapter = ">=0.1.0" +itemloaders = ">=1.0.1" +lxml = ">=4.3.0" +packaging = "*" +parsel = ">=1.5.0" +protego = ">=0.1.15" +PyDispatcher = {version = ">=2.0.5", markers = "platform_python_implementation == \"CPython\""} +pyOpenSSL = ">=21.0.0" +PyPyDispatcher = {version = ">=2.1.0", markers = "platform_python_implementation == \"PyPy\""} +queuelib = ">=1.4.2" +service-identity = ">=18.1.0" +setuptools = "*" +tldextract = "*" +Twisted = ">=18.9.0" +w3lib = ">=1.17.0" +"zope.interface" = ">=5.1.0" + +[[package]] +name = "scrapy-splash" +version = "0.9.0" +description = "JavaScript support for Scrapy using Splash" +optional = false +python-versions = "*" +files = [ + {file = "scrapy-splash-0.9.0.tar.gz", hash = "sha256:ecf130264dc08e0c461c8607ecad777468a64ad01dedb2629c0f81bd5fcd7295"}, + {file = "scrapy_splash-0.9.0-py2.py3-none-any.whl", hash = "sha256:1dc8f8a1c4c4e4341b73987d28c17f82f6a5afeaf585f23449c695e5dcfd8b39"}, +] + +[[package]] +name = "service-identity" +version = "23.1.0" +description = "Service identity verification for pyOpenSSL & cryptography." +optional = false +python-versions = ">=3.8" +files = [ + {file = "service_identity-23.1.0-py3-none-any.whl", hash = "sha256:87415a691d52fcad954a500cb81f424d0273f8e7e3ee7d766128f4575080f383"}, + {file = "service_identity-23.1.0.tar.gz", hash = "sha256:ecb33cd96307755041e978ab14f8b14e13b40f1fbd525a4dc78f46d2b986431d"}, +] + +[package.dependencies] +attrs = ">=19.1.0" +cryptography = "*" +pyasn1 = "*" +pyasn1-modules = "*" + +[package.extras] +dev = ["pyopenssl", "service-identity[docs,idna,mypy,tests]"] +docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] +idna = ["idna"] +mypy = ["idna", "mypy", "types-pyopenssl"] +tests = ["coverage[toml] (>=5.0.2)", "pytest"] + +[[package]] +name = "setuptools" +version = "68.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "soupsieve" +version = "2.4.1" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] + +[[package]] +name = "sqlparse" +version = "0.4.4" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, + {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, +] + +[package.extras] +dev = ["build", "flake8"] +doc = ["sphinx"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "tldextract" +version = "3.4.4" +description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." +optional = false +python-versions = ">=3.7" +files = [ + {file = "tldextract-3.4.4-py3-none-any.whl", hash = "sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2"}, + {file = "tldextract-3.4.4.tar.gz", hash = "sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234"}, +] + +[package.dependencies] +filelock = ">=3.0.8" +idna = "*" +requests = ">=2.1.0" +requests-file = ">=1.4" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "twisted" +version = "22.10.0" +description = "An asynchronous networking framework written in Python" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"}, + {file = "Twisted-22.10.0.tar.gz", hash = "sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +Automat = ">=0.8.0" +constantly = ">=15.1" +hyperlink = ">=17.1.1" +incremental = ">=21.3.0" +twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} +typing-extensions = ">=3.6.5" +"zope.interface" = ">=4.4.2" + +[package.extras] +all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] +conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] +contextvars = ["contextvars (>=2.4,<3)"] +dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)"] +dev-release = ["pydoctor (>=22.9.0,<22.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)"] +gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] +macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] +osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] +test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.0,<7.0)"] +tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] +windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] + +[[package]] +name = "twisted-iocpsupport" +version = "1.0.3" +description = "An extension for use in the twisted I/O Completion Ports reactor." +optional = false +python-versions = "*" +files = [ + {file = "twisted-iocpsupport-1.0.3.tar.gz", hash = "sha256:afb00801fdfbaccf0d0173a722626500023d4a19719ac9f129d1347a32e2fc66"}, + {file = "twisted_iocpsupport-1.0.3-cp310-cp310-win32.whl", hash = "sha256:a379ef56a576c8090889f74441bc3822ca31ac82253cc61e8d50631bcb0c26d0"}, + {file = "twisted_iocpsupport-1.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1ea2c3fbdb739c95cc8b3355305cd593d2c9ec56d709207aa1a05d4d98671e85"}, + {file = "twisted_iocpsupport-1.0.3-cp311-cp311-win32.whl", hash = "sha256:7efcdfafb377f32db90f42bd5fc5bb32cd1e3637ee936cdaf3aff4f4786ab3bf"}, + {file = "twisted_iocpsupport-1.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1dbfac706972bf9ec5ce1ddbc735d2ebba406ad363345df8751ffd5252aa1618"}, + {file = "twisted_iocpsupport-1.0.3-cp36-cp36m-win32.whl", hash = "sha256:1ddfc5fa22ec6f913464b736b3f46e642237f17ac41be47eed6fa9bd52f5d0e0"}, + {file = "twisted_iocpsupport-1.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:1bdccbb22199fc69fd7744d6d2dfd22d073c028c8611d994b41d2d2ad0e0f40d"}, + {file = "twisted_iocpsupport-1.0.3-cp37-cp37m-win32.whl", hash = "sha256:db11c80054b52dbdea44d63d5474a44c9a6531882f0e2960268b15123088641a"}, + {file = "twisted_iocpsupport-1.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:67bec1716eb8f466ef366bbf262e1467ecc9e20940111207663ac24049785bad"}, + {file = "twisted_iocpsupport-1.0.3-cp38-cp38-win32.whl", hash = "sha256:98a6f16ab215f8c1446e9fc60aaed0ab7c746d566aa2f3492a23cea334e6bebb"}, + {file = "twisted_iocpsupport-1.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:4f249d0baac836bb431d6fa0178be063a310136bc489465a831e3abd2d7acafd"}, + {file = "twisted_iocpsupport-1.0.3-cp39-cp39-win32.whl", hash = "sha256:aaca8f30c3b7c80d27a33fe9fe0d0bac42b1b012ddc60f677175c30e1becc1f3"}, + {file = "twisted_iocpsupport-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:dff43136c33665c2d117a73706aef6f7d6433e5c4560332a118fe066b16b8695"}, + {file = "twisted_iocpsupport-1.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8faceae553cfadc42ad791b1790e7cdecb7751102608c405217f6a26e877e0c5"}, + {file = "twisted_iocpsupport-1.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6f8c433faaad5d53d30d1da6968d5a3730df415e2efb6864847267a9b51290cd"}, + {file = "twisted_iocpsupport-1.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3f39c41c0213a81a9ce0961e30d0d7650f371ad80f8d261007d15a2deb6d5be3"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "tzlocal" +version = "5.0.1" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"}, + {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + +[[package]] +name = "urllib3" +version = "2.0.4" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "vobject" +version = "0.9.6.1" +description = "A full-featured Python package for parsing and creating iCalendar and vCard files" +optional = false +python-versions = "*" +files = [ + {file = "vobject-0.9.6.1.tar.gz", hash = "sha256:96512aec74b90abb71f6b53898dd7fe47300cc940104c4f79148f0671f790101"}, +] + +[package.dependencies] +python-dateutil = ">=2.4.0" + +[[package]] +name = "w3lib" +version = "2.1.1" +description = "Library of web-related functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "w3lib-2.1.1-py3-none-any.whl", hash = "sha256:7fd5bd7980a95d1a8185e867d05f68a591aa281a3ded4590d2641d7b09086ed4"}, + {file = "w3lib-2.1.1.tar.gz", hash = "sha256:0e1198f1b745195b6b3dd1a4cd66011fbf82f30a4d9dabaee1f9e5c86f020274"}, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "wheel" +version = "0.41.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.41.0-py3-none-any.whl", hash = "sha256:7e9be3bbd0078f6147d82ed9ed957e323e7708f57e134743d2edef3a7b7972a9"}, + {file = "wheel-0.41.0.tar.gz", hash = "sha256:55a0f0a5a84869bce5ba775abfd9c462e3a6b1b7b7ec69d72c0b83d673a5114d"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + +[[package]] +name = "zope-interface" +version = "6.0" +description = "Interfaces for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, + {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, + {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, + {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, + {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, + {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, + {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, + {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, + {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx", "repoze.sphinx.autointerface"] +test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "7b092b22bc516d7063cd30f8ccb835c658a38a9e9d4e8eb93296ee40241877bf" diff --git a/requirements.txt b/requirements.txt index 925c5f7c..7513d4b0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,28 +1,83 @@ -wheel==0.41.0 -image -dateparser==1.1.8 -isodate==0.6.1 -html2text~=2020.1.16 -scrapy-splash==0.9.0 -python-dateutil==2.8.2 -python-dotenv==1.0.0 -Scrapy==2.9.0 -requests==2.31.0 -vobject==0.9.6.1 -xmltodict==0.13.0 -overrides==3.1.0 -jmespath==1.0.1 -flake8==6.1.0 -pytest==7.4.0 -extruct==0.16.0 -lxml==4.9.3 -w3lib==2.1.1 -itemloaders==1.1.0 -Pillow==10.0.0 -itemadapter==0.8.0 -six==1.16.0 -certifi==2023.7.22 -urllib3==2.0.4 -playwright==1.36.0 -pyOpenSSL==23.2.0 -black==23.7.0 \ No newline at end of file +asgiref==3.7.2 ; python_version >= "3.10" and python_version < "4.0" +attrs==23.1.0 ; python_version >= "3.10" and python_version < "4.0" +automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" +beautifulsoup4==4.12.2 ; python_version >= "3.10" and python_version < "4.0" +black==23.7.0 ; python_version >= "3.10" and python_version < "4.0" +certifi==2023.7.22 ; python_version >= "3.10" and python_version < "4.0" +cffi==1.15.1 ; python_version >= "3.10" and python_version < "4.0" +charset-normalizer==3.2.0 ; python_version >= "3.10" and python_version < "4.0" +click==8.1.6 ; python_version >= "3.10" and python_version < "4.0" +colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") +constantly==15.1.0 ; python_version >= "3.10" and python_version < "4.0" +cryptography==41.0.2 ; python_version >= "3.10" and python_version < "4.0" +cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" +dateparser==1.1.8 ; python_version >= "3.10" and python_version < "4.0" +django==4.2.3 ; python_version >= "3.10" and python_version < "4.0" +exceptiongroup==1.1.2 ; python_version >= "3.10" and python_version < "3.11" +extruct==0.16.0 ; python_version >= "3.10" and python_version < "4.0" +filelock==3.12.2 ; python_version >= "3.10" and python_version < "4.0" +greenlet==2.0.2 ; python_version >= "3.10" and python_version < "4.0" +html-text==0.5.2 ; python_version >= "3.10" and python_version < "4.0" +html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" +html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" +hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" +idna==3.4 ; python_version >= "3.10" and python_version < "4.0" +image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" +incremental==22.10.0 ; python_version >= "3.10" and python_version < "4.0" +iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" +isodate==0.6.1 ; python_version >= "3.10" and python_version < "4.0" +itemadapter==0.8.0 ; python_version >= "3.10" and python_version < "4.0" +itemloaders==1.1.0 ; python_version >= "3.10" and python_version < "4.0" +jmespath==1.0.1 ; python_version >= "3.10" and python_version < "4.0" +jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" +lxml==4.9.3 ; python_version >= "3.10" and python_version < "4.0" +mf2py==1.1.3 ; python_version >= "3.10" and python_version < "4.0" +mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" +overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" +packaging==23.1 ; python_version >= "3.10" and python_version < "4.0" +parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" +pathspec==0.11.2 ; python_version >= "3.10" and python_version < "4.0" +pillow==10.0.0 ; python_version >= "3.10" and python_version < "4.0" +platformdirs==3.10.0 ; python_version >= "3.10" and python_version < "4.0" +playwright==1.36.0 ; python_version >= "3.10" and python_version < "4.0" +pluggy==1.2.0 ; python_version >= "3.10" and python_version < "4.0" +protego==0.2.1 ; python_version >= "3.10" and python_version < "4.0" +pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" +pyasn1==0.5.0 ; python_version >= "3.10" and python_version < "4.0" +pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0" +pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" +pyee==9.0.4 ; python_version >= "3.10" and python_version < "4.0" +pyopenssl==23.2.0 ; python_version >= "3.10" and python_version < "4.0" +pyparsing==3.1.1 ; python_version >= "3.10" and python_version < "4.0" +pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" +pyrdfa3==3.5.3 ; python_version >= "3.10" and python_version < "4.0" +pytest==7.4.0 ; python_version >= "3.10" and python_version < "4.0" +python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" +python-dotenv==1.0.0 ; python_version >= "3.10" and python_version < "4.0" +pytz==2023.3 ; python_version >= "3.10" and python_version < "4.0" +queuelib==1.6.2 ; python_version >= "3.10" and python_version < "4.0" +rdflib==6.3.2 ; python_version >= "3.10" and python_version < "4.0" +regex==2023.6.3 ; python_version >= "3.10" and python_version < "4.0" +requests-file==1.5.1 ; python_version >= "3.10" and python_version < "4.0" +requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" +scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" +scrapy==2.9.0 ; python_version >= "3.10" and python_version < "4.0" +service-identity==23.1.0 ; python_version >= "3.10" and python_version < "4.0" +setuptools==68.0.0 ; python_version >= "3.10" and python_version < "4.0" +six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" +soupsieve==2.4.1 ; python_version >= "3.10" and python_version < "4.0" +sqlparse==0.4.4 ; python_version >= "3.10" and python_version < "4.0" +tldextract==3.4.4 ; python_version >= "3.10" and python_version < "4.0" +tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" +twisted-iocpsupport==1.0.3 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" +twisted==22.10.0 ; python_version >= "3.10" and python_version < "4.0" +typing-extensions==4.7.1 ; python_version >= "3.10" and python_version < "4.0" +tzdata==2023.3 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") +tzlocal==5.0.1 ; python_version >= "3.10" and python_version < "4.0" +urllib3==2.0.4 ; python_version >= "3.10" and python_version < "4.0" +vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" +w3lib==2.1.1 ; python_version >= "3.10" and python_version < "4.0" +webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" +wheel==0.41.0 ; python_version >= "3.10" and python_version < "4.0" +xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" +zope-interface==6.0 ; python_version >= "3.10" and python_version < "4.0" From 98c0d5cce2bf75ce2c67451f56b9a22b8c84252d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:36:28 +0200 Subject: [PATCH 323/590] docs: update Readme.md Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- Readme.md | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/Readme.md b/Readme.md index 3f39b1cb..ec54fd58 100644 --- a/Readme.md +++ b/Readme.md @@ -1,9 +1,12 @@ # Open Edu Hub Search ETL +## Step 1: Project Setup - Python (manual approach) + - make sure you have python3 installed () -- (Python 3.9.1 or newer is required) + - (Python 3.10 or newer is required) - go to project root -- Run +- Run the following commands: + ``` sudo apt install python3-dev python3-pip python3-venv libpq-dev -y python3 -m venv .venv @@ -15,17 +18,30 @@ python3 -m venv .venv `pip3 install -r requirements.txt` -If you have Docker installed, use `docker-compose up` to start up the multi-container for `Splash` and `Pyppeteer`-integration. +## Step 1 (alternative): Project Setup - Python (automated, via `poetry`) + +- Step 1: Make sure that you have [Poetry](https://python-poetry.org) v1.5.0+ installed +- Step 2: Open your terminal in the project root directory: + - Step 2.1 (this is an optional, strictly personal preference): If you want to have your `.venv` to be created in the project root directory: + - `poetry config virtualenvs.in-project true` +- Step 3: Install dependencies according to `pyproject.toml`-instructions: `poetry install` + +## Step 2: Project Setup - required Docker Containers +If you have Docker installed, use `docker-compose up` to start up the multi-container for `Splash` and `Playwright`-integration. As a last step, set up your config variables by copying the `.env.example`-file and modifying it if necessary: `cp converter/.env.example converter/.env` -- A crawler can be run with `scrapy crawl `. It assumes that you have an edu-sharing 6.0 instance in your `.env` settings configured which can accept the data. +# Running crawlers + +- A crawler can be run with `scrapy crawl `. + - (It assumes that you have an edu-sharing v6.0+ instance in your `.env` settings configured which can accept the data.) - If a crawler has [Scrapy Spider Contracts](https://docs.scrapy.org/en/latest/topics/contracts.html#spiders-contracts) implemented, you can test those by running `scrapy check ` -## Run via Docker +## Running crawlers via Docker + ```bash git clone https://github.com/openeduhub/oeh-search-etl cd oeh-search-etl @@ -36,7 +52,7 @@ export CRAWLER=your_crawler_id_spider # i.e. wirlernenonline_spider docker compose up ``` -## Building a Crawler +# Building a Crawler - We use Scrapy as a framework. Please check out the guides for Scrapy spider (https://docs.scrapy.org/en/latest/intro/tutorial.html) - To create a new spider, create a file inside `converter/spiders/_spider.py` @@ -45,5 +61,5 @@ docker compose up - As a sample/template, please take a look at the `sample_spider.py` - To learn more about the LOM standard we're using, you'll find useful information at https://en.wikipedia.org/wiki/Learning_object_metadata -## Still have questions? Check out our GitHub-Wiki! +# Still have questions? Check out our GitHub-Wiki! If you need help getting started or setting up your work environment, please don't hesitate to visit our GitHub Wiki at https://github.com/openeduhub/oeh-search-etl/wiki From c6bd7acb71204a05eb788c76655504d955d4d56a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:57:08 +0200 Subject: [PATCH 324/590] fix: deprecated 'Pillow' method call during thumbnail resizing - the 'Pillow'-package deprecated the previously used "image.ANTIALIAS"-method in v2.7 (see: https://pillow.readthedocs.io/en/stable/releasenotes/2.7.0.html#antialias-renamed-to-lanczos) -- (for reference: the most-current version of Pillow is v10.0.0 -> the following Constants have changed: https://pillow.readthedocs.io/en/stable/releasenotes/10.0.0.html#constants) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index ccab1ee2..ba335783 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -318,7 +318,7 @@ def scale_image(img, max_size): while w * h > max_size: w *= 0.9 h *= 0.9 - return img.resize((int(w), int(h)), Image.ANTIALIAS).convert("RGB") + return img.resize((int(w), int(h)), Image.Resampling.LANCZOS).convert("RGB") def process_item(self, raw_item, spider): """ From b879d3bada1c197b6363dfacdc721df4383cae62 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 1 Aug 2023 18:43:43 +0200 Subject: [PATCH 325/590] serlo_spider v0.2.9 (language selection via .env setting) - feat: optional '.env'-Setting for limiting GraphQL API Requests to specific Serlo instances (= language-specific servers) -- example: SERLO_INSTANCE="de" --- this setting will only query the German Serlo instance -- all possible values: "de", "en", "es", "ta", "hi" --- see: https://github.com/serlo/documentation/wiki/Metadata-API#understanding-the-request-payload-and-pagination Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 33 +++++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 76ef31f0..ef05f758 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -30,12 +30,13 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.8" # last update: 2023-07-11 + version = "0.2.9" # last update: 2023-08-01 custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright } GRAPHQL_MODIFIED_AFTER_PARAMETER: str = "" + GRAPHQL_INSTANCE_PARAMETER: str = "" graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: @@ -73,6 +74,11 @@ def decide_crawl_mode(self): You can use this '.env'-setting to crawl Serlo more efficiently: Specify a date and only receive items that were modified since . """ + graphql_instance_param: str = env.get(key="SERLO_INSTANCE", allow_null=True, default=None) + if graphql_instance_param: + logging.info(f"INIT: '.env'-Setting 'SERLO_INSTANCE': {graphql_instance_param} (language) detected. " + f"Limiting query to a single language selection.") + self.GRAPHQL_INSTANCE_PARAMETER = graphql_instance_param graphql_modified_after_param: str = env.get(key="SERLO_MODIFIED_AFTER", allow_null=True, default=None) if graphql_modified_after_param: logging.info( @@ -91,7 +97,7 @@ def decide_crawl_mode(self): date_parsed_iso = date_parsed.isoformat() logging.info( f"INIT: SUCCESS - serlo_spider will ONLY request GraphQL items that were modified (by Serlo) after " - f"'{date_parsed_iso}' ." + f"'{date_parsed_iso}'." ) self.GRAPHQL_MODIFIED_AFTER_PARAMETER = date_parsed_iso else: @@ -126,16 +132,27 @@ def query_graphql_page(self, amount_of_nodes: int = 500, pagination_string: str # we only add the (optional) 'modifiedAfter'-parameter if the .env-Setting was recognized. By default, # the string will stay empty. modified_after: str = f', modifiedAfter: "{modified_after}"' + instance_parameter: str = "" + if self.GRAPHQL_INSTANCE_PARAMETER: + # Serlo allows us to limit the query results to a specific serlo instance (the currently 6 possible language + # codes can be seen here: + # https://github.com/serlo/documentation/wiki/Metadata-API#understanding-the-request-payload-and-pagination + instance_value: str = self.GRAPHQL_INSTANCE_PARAMETER + if instance_value and instance_value in ["de", "en", "es", "ta", "hi", "fr"]: + instance_parameter: str = f'instance: {instance_value}' graphql_metadata_query_body = { "query": f""" query {{ metadata {{ - resources(first: {amount_of_nodes}, after: "{pagination_string}"{modified_after}){{ - nodes - pageInfo {{ - hasNextPage - endCursor - }} + resources( + first: {amount_of_nodes} + after: "{pagination_string}"{modified_after}{instance_parameter} + ){{ + nodes + pageInfo {{ + hasNextPage + endCursor + }} }} }} }} From e4a4233acc3d11fdcbf4ac8ce9accbbefaa3637b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:38:56 +0200 Subject: [PATCH 326/590] docs: serlo_spider 'instance'-selection via '.env'-Setting - update .env.example in regard to the optional 'SERLO_INSTANCE' environment variable -- if the SERLO_INSTANCE setting is active, only one language instance is queried via GraphQL (you cannot limit a query to multiple instances at the same time) -- the default case (if SERLO_INSTANCE is empty or not set all) always returns the complete list of results from ALL serlo instances Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/.env.example | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/converter/.env.example b/converter/.env.example index 846ef006..e183014d 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -74,4 +74,6 @@ YOUTUBE_API_KEY="" # --- serlo_spider (v0.2.8+) settings: # SERLO_MODIFIED_AFTER="2023-07-01" # Crawl only Serlo Materials which have been modified (by Serlo authors) after -# . Use this setting to improve the crawling speed of periodic crawls. \ No newline at end of file +# . Use this setting to improve the crawling speed of periodic crawls. +# SERLO_INSTANCE="de" +# Available Serlo "instance" values (as of 2023-08-02): "de" | "en" | "es" | "fr" | "hi" | "ta" \ No newline at end of file From 825b67a30fd62b49ecfa3c5c0615c1e8e8208947 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 2 Aug 2023 13:44:32 +0200 Subject: [PATCH 327/590] science_in_school_spider v0.0.5 - feat: split list of authors and save them to individual 'lifecycle'-items - change: replace crawler-specific license mapping by LicenseMapper -- this change fixes a small amount edge-cases were unexpected "CC BY-ND" and copyrighted materials could not be mapped --- the old implementation assumed (according to https://www.scienceinschool.org/copyright/) that there could only be 3 different CC licenses, which, in reality, was not the case - refactor: 'getId'- and 'getHash'-method - code formatting via black Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/science_in_school_spider.py | 265 ++++++++++-------- 1 file changed, 152 insertions(+), 113 deletions(-) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index 7dbaa1b9..3afa33c3 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -5,26 +5,29 @@ import w3lib.html from converter.constants import Constants -from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ - LicenseItemLoader, LomAgeRangeItemLoader +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + LomClassificationItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + LomAgeRangeItemLoader, +) from converter.spiders.base_classes import LomBase +from converter.util.license_mapper import LicenseMapper class ScienceInSchoolSpider(scrapy.Spider, LomBase): name = "science_in_school_spider" friendlyName = "Science in School" - start_urls = [ - "https://www.scienceinschool.org/issue/" - ] - version = "0.0.4" # last update: 2022-08-26 - custom_settings = { - "AUTOTHROTTLE_ENABLED": True, - "AUTOTHROTTLE_DEBUG": True - } - allowed_domains = [ - "scienceinschool.org" - ] + start_urls = ["https://www.scienceinschool.org/issue/"] + version = "0.0.5" # last update: 2023-08-02 + custom_settings = {"AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True} + allowed_domains = ["scienceinschool.org"] ALL_ARTICLE_URLS = set() TOPICS_TO_DISCIPLINES_MAPPING = { @@ -34,16 +37,12 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): "Health": "Health education", "Mathematics": "Mathematics", "Physics": "Physics", - "Sustainability": "Sustainability" - } - LICENSE_MAPPING = { - "CC-BY": Constants.LICENSE_CC_BY_40, - "CC-BY-NC-SA": Constants.LICENSE_CC_BY_NC_SA_40, - "CC-BY-NC-ND": Constants.LICENSE_CC_BY_NC_ND_40 + "Sustainability": "Sustainability", } - KEYWORD_EXCLUSION_LIST = [ - "Not applicable", "not applicable" - ] + KEYWORD_EXCLUSION_LIST = ["Not applicable", "not applicable"] + + def __init__(self): + LomBase.__init__(self=self) def start_requests(self): for start_url in self.start_urls: @@ -88,14 +87,31 @@ def parse_article_overview(self, response: scrapy.http.Response) -> scrapy.Reque pass def getId(self, response=None) -> str: - pass + return response.url def getHash(self, response=None) -> str: - pass + date_published: str = self.extract_and_parse_date(response) + hash_value: str = f"{date_published}v{self.version}" + return hash_value + + @staticmethod + def extract_and_parse_date(response): + date_published_raw: str = response.xpath('//p[@class="vf-meta__date"]/text()').get() + date_published = str() + if date_published_raw: + # using dateparser to get a reusable ISO-format from strings like 'January 28, 2016' + # dateparser will show warnings in Python 3.10 (we're waiting for a new dateparser version) + date_parsed = dateparser.parse(date_string=date_published_raw) + if date_parsed: + # the dateparser library can't parse all languages reliably, throws errors with serbian articles + date_published = date_parsed.isoformat() + else: + date_published = datetime.datetime.now() + return date_published def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: """ - Crawls an individual article and extracts metadata. Afterwards creates a BaseItem by filling up metadata-fields + Crawls an individual article and extracts metadata. Afterward creates a BaseItem by filling up metadata-fields by calling .load_item() on the respective ItemLoaders. :param response: scrapy.http.Response @@ -106,8 +122,9 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: @url https://www.scienceinschool.org/article/2006/birdflu/ @returns item 1 """ - multilanguage_article_list: list = response.xpath('//ul[@class="vf-links__list vf-links__list--secondary | ' - 'vf-list"]/li/a/@href').getall() + multilanguage_article_list: list = response.xpath( + '//ul[@class="vf-links__list vf-links__list--secondary | ' 'vf-list"]/li/a/@href' + ).getall() # on the left side of each article is a list of "Available languages", which holds URLs to all available # versions of the (currently visited) article, including its own URL. We need to make sure that we're only # gathering URLs that haven't been parsed before: @@ -116,10 +133,10 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: for article_translation_url in multilanguage_article_list: if article_translation_url not in self.ALL_ARTICLE_URLS: # making sure we're not parsing translated articles more than once or causing loops - if article_translation_url.endswith('.pdf'): + if article_translation_url.endswith(".pdf"): # skipping direct-links to .pdf files because scrapy / splash can't handle these continue - elif "/sr/" in article_translation_url or article_translation_url.endswith('-sr/'): + elif "/sr/" in article_translation_url or article_translation_url.endswith("-sr/"): # Articles that are translated to Serbian currently aren't supported by the dateparser. # Since we don't want to deal with ~40 errors from these URLs, we skip them altogether. continue @@ -131,22 +148,12 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: title: str = response.xpath('//meta[@property="og:title"]/@content').get() if title is None: - title = response.xpath('//head/title/text()').get() + title = response.xpath("//head/title/text()").get() description: str = response.xpath('//meta[@property="og:description"]/@content').get() thumbnail_url: str = response.xpath('//meta[@property="og:image"]/@content').get() - language: list = response.xpath('//html/@lang').getall() + language: list = response.xpath("//html/@lang").getall() - date_published_raw: str = response.xpath('//p[@class="vf-meta__date"]/text()').get() - date_published = str() - if date_published_raw: - # using dateparser to get a reusable ISO-format from strings like 'January 28, 2016' - # dateparser will show warnings in Python 3.10 (we're waiting for a new dateparser version) - date_parsed = dateparser.parse(date_string=date_published_raw) - if date_parsed: - # the dateparser library can't parse all languages reliably, throws errors with serbian articles - date_published = date_parsed.isoformat() - else: - date_published = datetime.datetime.now() + date_published = self.extract_and_parse_date(response) authors_raw: list = response.xpath('//div[@class="vf-author | vf-article-meta-info__author"]/p/text()').getall() authors_clean = list() @@ -154,7 +161,12 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: for author_raw in authors_raw: possible_authors: str = w3lib.html.strip_html5_whitespace(author_raw) if possible_authors: - authors_clean.append(possible_authors) + if "," in possible_authors: + possible_authors_list: list[str] = possible_authors.split(", ") + for author in possible_authors_list: + authors_clean.append(author) + else: + authors_clean.append(possible_authors) # selector for the whole metadata container, in case you want to try it out with Scrapy Shell: # response.xpath('//aside[@class="vf-article-meta-information"]').getall() @@ -167,15 +179,15 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: if metadata_container_ages_topics_keywords: for metadata_container_item in metadata_container_ages_topics_keywords: current_selector = scrapy.Selector(text=metadata_container_item) - current_selector_description = current_selector.xpath('//span/text()').get() + current_selector_description = current_selector.xpath("//span/text()").get() if current_selector_description: if "Ages:" in current_selector_description: - age_ranges_raw_string: str = current_selector.xpath('//p/text()').get() + age_ranges_raw_string: str = current_selector.xpath("//p/text()").get() # a typical string value can be ' 14-16, 16-19' (including the whitespace around single values) if age_ranges_raw_string: # therefore we're splitting up the string by its commas and removing the whitespace around # each value - potential_age_ranges: list = age_ranges_raw_string.split(',') + potential_age_ranges: list = age_ranges_raw_string.split(",") if potential_age_ranges: for age_range_item in potential_age_ranges: if age_range_item in self.KEYWORD_EXCLUSION_LIST: @@ -186,8 +198,8 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: age_ranges.append(age_range_clean) if "Topics:" in current_selector_description: # there can be several topics per article - topic_description_list_raw = current_selector.xpath('//a/text()').getall() - topic_description_urls = current_selector.xpath('//a/@href').getall() + topic_description_list_raw = current_selector.xpath("//a/text()").getall() + topic_description_urls = current_selector.xpath("//a/@href").getall() if topic_description_list_raw and topic_description_urls: # topic_dict = dict(zip(topic_description_list_raw, topic_description_urls)) for potential_topic in topic_description_list_raw.copy(): @@ -199,16 +211,17 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: else: keywords.add(potential_topic) if "Keywords:" in current_selector_description: - keyword_description_list_raw: list = current_selector.xpath('//a/text()').getall() - keyword_description_urls: list = current_selector.xpath('//a/@href').getall() + keyword_description_list_raw: list = current_selector.xpath("//a/text()").getall() + keyword_description_urls: list = current_selector.xpath("//a/@href").getall() if keyword_description_list_raw and keyword_description_urls: # keyword_dict = dict(zip(keyword_description_list_raw, keyword_description_urls)) for potential_keyword in keyword_description_list_raw: keywords.add(potential_keyword) # supporting_materials_selector = response.xpath('//article[@class="sis-materials"]/p/a') - supporting_materials_descriptions: list = \ - response.xpath('//article[@class="sis-materials"]/p/a/text()').getall() + supporting_materials_descriptions: list = response.xpath( + '//article[@class="sis-materials"]/p/a/text()' + ).getall() supporting_materials_urls: list = response.xpath('//article[@class="sis-materials"]/p/a/@href').getall() # on the right-hand side of an article there can (sometimes) be downloadable, additional materials: # - supporting materials (teachers guides etc.) @@ -230,52 +243,63 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: pass base = BaseItemLoader() - base.add_value('sourceId', response.url) - hash_temp: str = f"{date_published}v{self.version}" - base.add_value('hash', hash_temp) + base.add_value("sourceId", self.getId(response)) + base.add_value("hash", self.getHash(response)) if thumbnail_url: - base.add_value('thumbnail', thumbnail_url) + base.add_value("thumbnail", thumbnail_url) lom = LomBaseItemloader() general = LomGeneralItemloader() - general.add_value('identifier', response.url) + general.add_value("identifier", response.url) if title: - general.add_value('title', title) + general.add_value("title", title) if keywords: - general.add_value('keyword', keywords) + general.add_value("keyword", keywords) if description: - general.add_value('description', description) + general.add_value("description", description) if language: for language_item in language: # edu-sharing expects the base.language value to be using underscores - language_underscore: str = language_item.replace('-', '_') - general.add_value('language', language_underscore) + language_underscore: str = language_item.replace("-", "_") + general.add_value("language", language_underscore) # depending on the article language, we're creating sub-folders within edu-sharing: # SYNC_OBJ/science_in_school_spider// - base.add_value('origin', language) + base.add_value("origin", language) else: # if no language code is detected, the main part of the website is always available in English - general.add_value('language', 'en') + general.add_value("language", "en") # noinspection DuplicatedCode - lom.add_value('general', general.load_item()) + lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() - technical.add_value('format', 'text/html') - technical.add_value('location', response.url) - lom.add_value('technical', technical.load_item()) - - lifecycle = LomLifecycleItemloader() - lifecycle.add_value('role', 'publisher') - lifecycle.add_value('organization', 'EIROforum') # EIROforum is the intergovernmental organization/publisher - # behind scienceinschool.org - lifecycle.add_value('url', 'https://www.scienceinschool.org/about-eiroforum/') - lifecycle.add_value('email', 'info@eiroforum.org') - lifecycle.add_value('date', date_published) - lom.add_value('lifecycle', lifecycle.load_item()) + technical.add_value("format", "text/html") + technical.add_value("location", response.url) + lom.add_value("technical", technical.load_item()) + + if authors_clean: + for author in authors_clean: + author_single_split = author.split(sep=" ", maxsplit=1) + if author_single_split: + lifecycle_author = LomLifecycleItemloader() + lifecycle_author.add_value("role", "author") + if len(author_single_split) >= 1: + lifecycle_author.add_value("firstName", author_single_split[0]) + if len(author_single_split) == 2: + lifecycle_author.add_value("lastName", author_single_split[1]) + lom.add_value("lifecycle", lifecycle_author.load_item()) + + lifecycle_publisher = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + lifecycle_publisher.add_value("organization", "EIROforum") # EIROforum is the intergovernmental + # organization/publisher behind scienceinschool.org + lifecycle_publisher.add_value("url", "https://www.scienceinschool.org/about-eiroforum/") + lifecycle_publisher.add_value("email", "info@eiroforum.org") + lifecycle_publisher.add_value("date", date_published) + lom.add_value("lifecycle", lifecycle_publisher.load_item()) educational = LomEducationalItemLoader() if language: - educational.add_value('language', language) + educational.add_value("language", language) # ToDo: the primary website language is always English, but sometimes additional languages are available as well lom_age_range_loader = LomAgeRangeItemLoader() # since we already prepared age_ranges above to only hold valid, already whitespace-stripped strings, we can use @@ -287,66 +311,81 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: if "<" in age_range_item: # "< 11" from_range = 0 - to_range = age_range_item.replace('<', '') + to_range = age_range_item.replace("<", "") to_range = int(to_range) age_range_total.add(from_range) age_range_total.add(to_range) elif "-" in age_range_item: - from_range = int(min(age_range_item.split('-'))) - to_range = int(max(age_range_item.split('-'))) + from_range = int(min(age_range_item.split("-"))) + to_range = int(max(age_range_item.split("-"))) age_range_total.add(from_range) age_range_total.add(to_range) if age_range_total: - lom_age_range_loader.add_value('fromRange', min(age_range_total)) - lom_age_range_loader.add_value('toRange', max(age_range_total)) - educational.add_value('typicalAgeRange', lom_age_range_loader.load_item()) + lom_age_range_loader.add_value("fromRange", min(age_range_total)) + lom_age_range_loader.add_value("toRange", max(age_range_total)) + educational.add_value("typicalAgeRange", lom_age_range_loader.load_item()) - lom.add_value('educational', educational.load_item()) + lom.add_value("educational", educational.load_item()) classification = LomClassificationItemLoader() - lom.add_value('classification', classification.load_item()) + lom.add_value("classification", classification.load_item()) - base.add_value('lom', lom.load_item()) + base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value('discipline', disciplines) - vs.add_value('intendedEndUserRole', 'teacher') - vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') + vs.add_value("discipline", disciplines) + vs.add_value("intendedEndUserRole", "teacher") + vs.add_value("dataProtectionConformity", "generalDataProtectionRegulation") # see: https://www.embl.de/aboutus/privacy_policy/ - vs.add_value('new_lrt', [Constants.NEW_LRT_MATERIAL, - 'b98c0c8c-5696-4537-82fa-dded7236081e', '0f519bd5-069c-4d32-b6d3-a373ac96724c']) + vs.add_value( + "new_lrt", + [ + Constants.NEW_LRT_MATERIAL, + "b98c0c8c-5696-4537-82fa-dded7236081e", + "0f519bd5-069c-4d32-b6d3-a373ac96724c", + ], + ) # "Artikel und Einzelpublikation", "Fachliche News" - vs.add_value('containsAdvertisement', 'no') - vs.add_value('conditionsOfAccess', 'no_login') - vs.add_value('price', 'no') - base.add_value('valuespaces', vs.load_item()) + vs.add_value("containsAdvertisement", "no") + vs.add_value("conditionsOfAccess", "no_login") + vs.add_value("price", "no") + base.add_value("valuespaces", vs.load_item()) license_loader = LicenseItemLoader() if authors_clean: - license_loader.add_value('author', authors_clean) + license_loader.add_value("author", authors_clean) license_raw: str = response.xpath('//a[@href="/copyright"]/text()').get() # see: https://www.scienceinschool.org/copyright/ # the possible string patterns seem to be either "CC-BY", "CC-BY-NC-SA" or "CC-BY-NC-ND" if license_raw: - if license_raw in self.LICENSE_MAPPING: - license_loader.add_value('url', self.LICENSE_MAPPING.get(license_raw)) - # sometimes there is an additional license description available, which always seems to be in the next - #
-container after the copyright -element: - license_description = response.xpath('//div[child::a[@href="/copyright"]]/following-sibling::div' - '/text()').get() - if license_description: - license_description = w3lib.html.strip_html5_whitespace(license_description) - license_loader.add_value('description', license_description) - else: - # as a fallback, we try to set the raw license string - license_loader.add_value('description', license_raw) + license_mapper = LicenseMapper() + license_internal_mapped: str | None = license_mapper.get_license_internal_key(license_string=license_raw) + if license_internal_mapped: + license_loader.add_value("internal", license_internal_mapped) + # sometimes there is an additional license description available, which always seems to be in the next + #
-container after the copyright -element: + license_description = response.xpath( + '//div[child::a[@href="/copyright"]]/following-sibling::div' "/text()" + ).get() + if license_description: + license_description_stripped = w3lib.html.strip_html5_whitespace(license_description) + if license_description_stripped: + license_description_mapped: str | None = license_mapper.get_license_internal_key( + license_description_stripped + ) + if license_description_mapped and not license_internal_mapped: + license_loader.replace_value("internal", license_description_mapped) + license_loader.add_value("description", license_description) + else: + # as a fallback, we try to set the raw license string + license_loader.add_value("description", license_raw) # noinspection DuplicatedCode - base.add_value('license', license_loader.load_item()) + base.add_value("license", license_loader.load_item()) permissions = super().getPermissions(response) - base.add_value('permissions', permissions.load_item()) + base.add_value("permissions", permissions.load_item()) response_loader = super().mapResponse(response) - base.add_value('response', response_loader.load_item()) + base.add_value("response", response_loader.load_item()) yield base.load_item() From 38bb5f8430a3c34e8c61f1e77a077e2dcc61b060 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 2 Aug 2023 17:37:05 +0200 Subject: [PATCH 328/590] style: code formatting (DocStrings) - fix: weak warning (too many blank lines) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/items.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/converter/items.py b/converter/items.py index e688a3f5..3fa5db47 100644 --- a/converter/items.py +++ b/converter/items.py @@ -19,7 +19,7 @@ def replace_processor(value): class JoinMultivalues(object): - def __init__(self, separator=u" "): + def __init__(self, separator=" "): self.separator = separator def __call__(self, values): @@ -34,12 +34,14 @@ class MutlilangItem(Item): class LomGeneralItem(Item): """ General requirements: + - 'description' - 'keyword' - 'title' (If neither 'description' nor 'keyword' are provided, the whole item gets dropped by the pipeline.) """ + aggregationLevel = Field() """Corresponding edu-sharing property: 'cclom:aggregationlevel'""" coverage = Field() @@ -72,6 +74,7 @@ class LomLifecycleItem(Item): The role 'unknown' is used for contributors in an unknown capacity ("Mitarbeiter"). """ + date = Field() """The (publication) date of a contribution. Date values will be automatically transformed/parsed. Corresponding edu-sharing property: 'ccm:published_date'""" @@ -100,7 +103,6 @@ class LomLifecycleItem(Item): Values will be written into the vCard namespace 'X-Wikidata'.""" - class LomTechnicalItem(Item): duration = Field() """Duration of the element (e.g. for video or audio content). Supported formats for automatic transforming include @@ -138,10 +140,12 @@ class LomEducationalItem(Item): by "ValuespaceItem" instead because of vocabularies which need to be mapped. Please DO NOT use/fill the following fields here in "educational", but rather use them in ValuespaceItem: + - intendedEndUserRole (see: 'valuespaces.intendedEndUserRole') - learningResourceType (see: 'valuespaces.learningResourceType') - context (see: 'valuespaces.educationalContext') """ + description = Field() # ToDo: 'description' isn't mapped to any field in edu-sharing difficulty = Field() @@ -177,6 +181,7 @@ class LomClassificationItem(Item): LOM "Classification"-specific metadata. (see: LOM-DE specifications: "Classification"-category) """ + cost = Field() # ToDo: no equivalent property in edu-sharing, might be obsolete (see: 'valuespaces.price') description = Field() @@ -194,6 +199,7 @@ class LomBaseItem(Item): LomBaseItem provides the nested structure for LOM (Sub-)Elements. No metadata is saved here. (Please check the specific class definitions of the nested Items for more information.) """ + classification = Field(serializer=LomClassificationItem) educational = Field(serializer=LomEducationalItem) general = Field(serializer=LomGeneralItem) @@ -206,6 +212,7 @@ class ResponseItem(Item): """ Attributes of ResponseItem are populated by either Playwright or Splash when an item is processed by the pipelines. """ + cookies = Field() headers = Field() har = Field() @@ -220,6 +227,7 @@ class ValuespaceItem(Item): Values provided for attributes of ValuespaceItem are mapped against OEH (SKOS) vocabularies before saving them to edu-sharing. (see: https://github.com/openeduhub/oeh-metadata-vocabs) """ + accessibilitySummary = Field(output_processor=JoinMultivalues()) """Corresponding edu-sharing property: 'ccm:accessibilitysummary'""" conditionsOfAccess = Field(output_processor=JoinMultivalues()) @@ -260,6 +268,7 @@ class LicenseItem(Item): properties. To make sure that licenses are properly recognized by edu-sharing, make sure to provide a valid 'url'-string and if that's not possible, set a correct 'internal'-constant. (see: constants.py) """ + author = Field(output_processor=JoinMultivalues()) """An author freetext string. (Basically, how the author should be named in case this is a 'CC-BY'-license. Corresponding edu-sharing property: 'ccm:author_freetext'""" @@ -285,6 +294,7 @@ class PermissionItem(Item): """ PermissionItem sets the edu-sharing permissions for a crawled item. """ + autoCreateGroups = Field() """Should global groups be created if they don't already exist""" autoCreateMediacenters = Field() @@ -312,6 +322,7 @@ class BaseItem(Item): - ResponseItem - ValuespaceItem """ + binary = Field() """Binary data which should be uploaded to edu-sharing (= raw data, e.g. ".pdf"-files).""" collection = Field(output_processor=JoinMultivalues()) From 71283b39ba74483a8ce4a095be9aff04b4bab093 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 2 Aug 2023 17:38:55 +0200 Subject: [PATCH 329/590] change: 'black' line length setting to 120 - pyCharm uses a default line-length of 120, so we might as well use the same value with 'black' Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b7518c14..81befe3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ ] [tool.black] -line-length = 88 +line-length = 120 target-version = ['py310'] include = '\.pyi?$' exclude = ''' From 9fc3b5ebeb8dfc5df9a51411cd6699b66f51fa89 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 3 Aug 2023 11:16:35 +0200 Subject: [PATCH 330/590] docs: update Readme.md Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- Readme.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Readme.md b/Readme.md index ec54fd58..0764a635 100644 --- a/Readme.md +++ b/Readme.md @@ -22,9 +22,9 @@ python3 -m venv .venv - Step 1: Make sure that you have [Poetry](https://python-poetry.org) v1.5.0+ installed - Step 2: Open your terminal in the project root directory: - - Step 2.1 (this is an optional, strictly personal preference): If you want to have your `.venv` to be created in the project root directory: + - Step 2.1: (this is an optional, strictly personal preference) If you want to have your `.venv` to be created in the project root directory: - `poetry config virtualenvs.in-project true` -- Step 3: Install dependencies according to `pyproject.toml`-instructions: `poetry install` +- Step 3: Install dependencies (according to `pyproject.toml`) by running: `poetry install` ## Step 2: Project Setup - required Docker Containers If you have Docker installed, use `docker-compose up` to start up the multi-container for `Splash` and `Playwright`-integration. @@ -45,10 +45,10 @@ As a last step, set up your config variables by copying the `.env.example`-file ```bash git clone https://github.com/openeduhub/oeh-search-etl cd oeh-search-etl -cp .env.example .env +cp converter/.env.example .env # modify .env with your edu sharing instance -docker compose build scrapy export CRAWLER=your_crawler_id_spider # i.e. wirlernenonline_spider +docker compose build scrapy docker compose up ``` From fc2db79967beb8e348214c41a73b6f11d1f49383 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 3 Aug 2023 11:45:25 +0200 Subject: [PATCH 331/590] chore: move 'flake8' from dev-dependencies to main dependency list - chore: update dependencies / poetry.lock / pyproject.toml / requirements.txt accordingly Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- poetry.lock | 64 ++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- requirements.txt | 10 +++++--- 3 files changed, 40 insertions(+), 36 deletions(-) diff --git a/poetry.lock b/poetry.lock index c06cd92f..a8805cc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -325,34 +325,34 @@ files = [ [[package]] name = "cryptography" -version = "41.0.2" +version = "41.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, - {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, - {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, - {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, - {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, - {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, - {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, - {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, - {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, - {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, + {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, + {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, + {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, + {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, + {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, + {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, + {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, + {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, + {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, ] [package.dependencies] @@ -403,13 +403,13 @@ langdetect = ["langdetect"] [[package]] name = "django" -version = "4.2.3" +version = "4.2.4" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.3-py3-none-any.whl", hash = "sha256:f7c7852a5ac5a3da5a8d5b35cc6168f31b605971441798dac845f17ca8028039"}, - {file = "Django-4.2.3.tar.gz", hash = "sha256:45a747e1c5b3d6df1b141b1481e193b033fd1fdbda3ff52677dc81afdaacbaed"}, + {file = "Django-4.2.4-py3-none-any.whl", hash = "sha256:860ae6a138a238fc4f22c99b52f3ead982bb4b1aad8c0122bcd8c8a3a02e409d"}, + {file = "Django-4.2.4.tar.gz", hash = "sha256:7e4225ec065e0f354ccf7349a22d209de09cc1c074832be9eb84c51c1799c432"}, ] [package.dependencies] @@ -1283,13 +1283,13 @@ files = [ [[package]] name = "rdflib" -version = "6.3.2" +version = "7.0.0" description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.8.1,<4.0.0" files = [ - {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, - {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, + {file = "rdflib-7.0.0-py3-none-any.whl", hash = "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd"}, + {file = "rdflib-7.0.0.tar.gz", hash = "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"}, ] [package.dependencies] @@ -1812,4 +1812,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "7b092b22bc516d7063cd30f8ccb835c658a38a9e9d4e8eb93296ee40241877bf" +content-hash = "a1d4fcb83cb185c2ee82aacb0bed152aecbf6c01b2cc1f384dba999876ed5841" diff --git a/pyproject.toml b/pyproject.toml index 81befe3a..44b502c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,7 @@ black = "^23.7.0" certifi="2023.7.22" dateparser="1.1.8" extruct="0.16.0" +flake8 = "^6.1.0" html2text="2020.1.16" jmespath="1.0.1" image = "1.5.33" @@ -88,7 +89,6 @@ w3lib="2.1.1" xmltodict="0.13.0" [tool.poetry.group.dev.dependencies] -flake8 = "^6.1.0" [build-system] requires = ["poetry-core"] diff --git a/requirements.txt b/requirements.txt index 7513d4b0..0ead161b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,13 +9,14 @@ charset-normalizer==3.2.0 ; python_version >= "3.10" and python_version < "4.0" click==8.1.6 ; python_version >= "3.10" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") constantly==15.1.0 ; python_version >= "3.10" and python_version < "4.0" -cryptography==41.0.2 ; python_version >= "3.10" and python_version < "4.0" +cryptography==41.0.3 ; python_version >= "3.10" and python_version < "4.0" cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" dateparser==1.1.8 ; python_version >= "3.10" and python_version < "4.0" -django==4.2.3 ; python_version >= "3.10" and python_version < "4.0" +django==4.2.4 ; python_version >= "3.10" and python_version < "4.0" exceptiongroup==1.1.2 ; python_version >= "3.10" and python_version < "3.11" extruct==0.16.0 ; python_version >= "3.10" and python_version < "4.0" filelock==3.12.2 ; python_version >= "3.10" and python_version < "4.0" +flake8==6.1.0 ; python_version >= "3.10" and python_version < "4.0" greenlet==2.0.2 ; python_version >= "3.10" and python_version < "4.0" html-text==0.5.2 ; python_version >= "3.10" and python_version < "4.0" html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" @@ -31,6 +32,7 @@ itemloaders==1.1.0 ; python_version >= "3.10" and python_version < "4.0" jmespath==1.0.1 ; python_version >= "3.10" and python_version < "4.0" jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" lxml==4.9.3 ; python_version >= "3.10" and python_version < "4.0" +mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==1.1.3 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" @@ -44,9 +46,11 @@ pluggy==1.2.0 ; python_version >= "3.10" and python_version < "4.0" protego==0.2.1 ; python_version >= "3.10" and python_version < "4.0" pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1==0.5.0 ; python_version >= "3.10" and python_version < "4.0" +pycodestyle==2.11.0 ; python_version >= "3.10" and python_version < "4.0" pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" pyee==9.0.4 ; python_version >= "3.10" and python_version < "4.0" +pyflakes==3.1.0 ; python_version >= "3.10" and python_version < "4.0" pyopenssl==23.2.0 ; python_version >= "3.10" and python_version < "4.0" pyparsing==3.1.1 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" @@ -56,7 +60,7 @@ python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.0 ; python_version >= "3.10" and python_version < "4.0" pytz==2023.3 ; python_version >= "3.10" and python_version < "4.0" queuelib==1.6.2 ; python_version >= "3.10" and python_version < "4.0" -rdflib==6.3.2 ; python_version >= "3.10" and python_version < "4.0" +rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" regex==2023.6.3 ; python_version >= "3.10" and python_version < "4.0" requests-file==1.5.1 ; python_version >= "3.10" and python_version < "4.0" requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" From 17bd082700fa515f8e073799906adbb5b2ae6b9f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 3 Aug 2023 15:04:04 +0200 Subject: [PATCH 332/590] lehreronline_spider v0.0.7 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit performance and maintenance: - performance: improve crawler performance by skipping parse()-method call if items would be dropped (later) anyway -- overwrites getId() and hasChanged() methods - performance: replace unnecessary "dict.keys()"-calls - style: code formatting (via black) - refactor: "getHash"-method - add: drop_item_flag (implements a method that checks if an item should be checked or not) -- (this implementation might be used in the future to refactor the same functionality in LomBase.parse() in a cleaner way) metadata changes: - temporary workaround: hard-coded 'intendedEndUserRole'-value for 'teacher' until ITSJOINTLY-332 is fixed (altLabels aren't complete in index.json) - add: mappings according to Romy's suggestions for "Lehrer-Begleitheft", "Schülerheft", "Software", "Webquest" - feat: collect multiple 'technical.location' values if the resolved URL might be different from the one received by the API Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/lehreronline_spider.py | 470 +++++++++++++---------- 1 file changed, 277 insertions(+), 193 deletions(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index fd988aae..c01051f4 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -1,3 +1,4 @@ +import logging from datetime import datetime import scrapy.selector.unified @@ -5,9 +6,17 @@ from scrapy.spiders import XMLFeedSpider from converter.constants import Constants -from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, \ - LicenseItemLoader +from converter.es_connector import EduSharing +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, +) from converter.spiders.base_classes import LomBase @@ -15,24 +24,24 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): name = "lehreronline_spider" friendlyName = "Lehrer-Online" start_urls = [ - "https://www.lehrer-online.de/?type=3030&limit=10000" + "https://www.lehrer-online.de/?type=3030&limit=10" # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] - version = "0.0.6" # last update: 2023-02-03 + version = "0.0.7" # last update: 2023-08-03 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, # "DUPEFILTER_DEBUG": True } - iterator = 'iternodes' - itertag = 'datensatz' + iterator = "iternodes" + itertag = "datensatz" MAPPING_EDU_CONTEXT = { - 'Elementarbildung': 'Elementarbereich', - 'Fort- und Weiterbildung': 'Fortbildung', - 'Spezieller Förderbedarf': 'Förderschule' + "Elementarbildung": "elementarbereich", + "Fort- und Weiterbildung": "fortbildung", + "Spezieller Förderbedarf": "foerderschule", } MAPPING_LO_LRT_TO_NEW_LRT = { @@ -49,10 +58,11 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): "Experiment": "4735c61a-429b-4909-9f3c-cbf975e2aa0e", # "Experiment" "Folien": "92c7a50c-6243-45d9-8b11-e79cbbda6305", # "Präsentation" # "Hausaufgabe": "", - "Interaktives Quiz": "a120ce77-59f5-4564-8d49-73f4a0de1594", "Lernen, Quiz und Spiel" + "Interaktives Quiz": "a120ce77-59f5-4564-8d49-73f4a0de1594", + "Lernen, Quiz und Spiel" # "Internetressource": "", "Kurs": "4e16015a-7862-49ed-9b5e-6c1c6e0ffcd1", # "Kurs" - # "Lehrer-Begleitheft": "", + "Lehrer-Begleitheft": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" "Lehrerhandreichung": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" # "Lehrerheft": "", "Lernkontrolle": "9cf3c183-f37c-4b6b-8beb-65f530595dff", # "Klausur, Klassenarbeit und Test" @@ -67,78 +77,94 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): "Recherche-Auftrag": "1cac68e6-dafe-4ce4-a52f-f33cde26da59", # "Recherche und Lernauftrag" "Rollenspiel": "ac82dc13-3be1-464d-9cdc-88e608d99c39", # "Rollenspiel" "Schaubild": "1dc4ed81-718c-4b76-86cb-947a86875973", # "Veranschaulichung, Schaubild und Tafelbild" - # "Schülerheft": "", + "Schülerheft": "a33ef73d-9210-4305-97f9-7357bbf43486", # Übungsmaterial # "Schülermagazin": "", - # "Software": "", + "Software": "cefccf75-cba3-427d-9a0f-35b4fedcbba1", # Tool "Stationenlernen": "ee738203-44af-4150-986f-ef01fb883f00", # "Stationenlernen" "Tondokument": "ec2682af-08a9-4ab1-a324-9dca5151e99f", # "Audio" "Video": "7a6e9608-2554-4981-95dc-47ab9ba924de", # Video - # "Webquest": "", + "Webquest": "1cac68e6-dafe-4ce4-a52f-f33cde26da59", # "Recherche- und Lernauftrag" "entdeckendes Lernen": "9a86beb5-1a65-48ca-99c8-e8c789cfe2f8", # "Entdeckendes Lernen (Lehr- und Lernmaterial)" # "kooperatives Lernen": "", "Übung": "a33ef73d-9210-4305-97f9-7357bbf43486", # Übungsmaterial } MAPPING_MATERIAL_TYPE_TO_NEW_LRT = { - 'Blog': '5204fc81-5dac-4cc4-a28b-aad5c241fa19', # "Webblog (dynamisch)" - 'Cartoon': '667f5063-70b9-400c-b1f7-7702ec9487f1', # "Cartoon, Comic" - 'Dossier': '7381f17f-50a6-4ce1-b3a0-9d85a482eec0', # "Unterrichtsplanung" + "Blog": "5204fc81-5dac-4cc4-a28b-aad5c241fa19", # "Webblog (dynamisch)" + "Cartoon": "667f5063-70b9-400c-b1f7-7702ec9487f1", # "Cartoon, Comic" + "Dossier": "7381f17f-50a6-4ce1-b3a0-9d85a482eec0", # "Unterrichtsplanung" # Dossiers are hard to categorize, they typically consist of several types (news, "Unterrichtseinheit" etc.) # that are put together as a "Fokusthema", similar to how Umwelt-im-Unterricht.de groups together several # articles into a "Thema der Woche" - 'Fachartikel': 'b98c0c8c-5696-4537-82fa-dded7236081e', # "Artikel und Einzelpublikation" - 'Fundstueck': 'dc5763ab-6f47-4aa3-9ff3-1303efbeef6e', # "Nachrichten und Neuigkeiten - 'Interaktives': '4665caac-99d7-4da3-b9fb-498d8ece034f', # "Interaktives Medium" - 'Kopiervorlage': '6a15628c-0e59-43e3-9fc5-9a7f7fa261c4', # "Skript, Handout und Handreichung" - 'News': 'dc5763ab-6f47-4aa3-9ff3-1303efbeef6e', # "Nachrichten und Neuigkeiten" - 'Rechtsfall': 'dc5763ab-6f47-4aa3-9ff3-1303efbeef6e', # "Nachrichten und Neuigkeiten" + "Fachartikel": "b98c0c8c-5696-4537-82fa-dded7236081e", # "Artikel und Einzelpublikation" + "Fundstueck": "dc5763ab-6f47-4aa3-9ff3-1303efbeef6e", # "Nachrichten und Neuigkeiten + "Interaktives": "4665caac-99d7-4da3-b9fb-498d8ece034f", # "Interaktives Medium" + "Kopiervorlage": "6a15628c-0e59-43e3-9fc5-9a7f7fa261c4", # "Skript, Handout und Handreichung" + "News": "dc5763ab-6f47-4aa3-9ff3-1303efbeef6e", # "Nachrichten und Neuigkeiten" + "Rechtsfall": "dc5763ab-6f47-4aa3-9ff3-1303efbeef6e", # "Nachrichten und Neuigkeiten" # ToDo: could this be mapped to either "Fachliche News", "Alltags News" or "Pädagogische News"? - 'Unterrichtseinheit': 'ef58097d-c1de-4e6a-b4da-6f10e3716d3d', # "Unterrichtseinheit" - 'Videos': '7a6e9608-2554-4981-95dc-47ab9ba924de' # "Video (Material)" + "Unterrichtseinheit": "ef58097d-c1de-4e6a-b4da-6f10e3716d3d", # "Unterrichtseinheit" + "Videos": "7a6e9608-2554-4981-95dc-47ab9ba924de", # "Video (Material)" } MAPPING_RIGHTS_TO_URLS = { - 'CC-by': Constants.LICENSE_CC_BY_30, - 'CC-by-nc': Constants.LICENSE_CC_BY_NC_30, - 'CC-by-nc-nd': Constants.LICENSE_CC_BY_NC_ND_30, - 'CC-by-nc-nd 4.0': Constants.LICENSE_CC_BY_NC_ND_40, - 'CC-by-nc-sa': Constants.LICENSE_CC_BY_NC_SA_30, - 'CC-by-nc-sa 4.0': Constants.LICENSE_CC_BY_NC_SA_40, - 'CC-by-nd': Constants.LICENSE_CC_BY_ND_30, - 'CC-by-sa': Constants.LICENSE_CC_BY_SA_30, - 'CC-by-sa 4.0': Constants.LICENSE_CC_BY_SA_40, + "CC-by": Constants.LICENSE_CC_BY_30, + "CC-by-nc": Constants.LICENSE_CC_BY_NC_30, + "CC-by-nc-nd": Constants.LICENSE_CC_BY_NC_ND_30, + "CC-by-nc-nd 4.0": Constants.LICENSE_CC_BY_NC_ND_40, + "CC-by-nc-sa": Constants.LICENSE_CC_BY_NC_SA_30, + "CC-by-nc-sa 4.0": Constants.LICENSE_CC_BY_NC_SA_40, + "CC-by-nd": Constants.LICENSE_CC_BY_ND_30, + "CC-by-sa": Constants.LICENSE_CC_BY_SA_30, + "CC-by-sa 4.0": Constants.LICENSE_CC_BY_SA_40, } MAPPING_FACH_TO_DISCIPLINES = { - 'Arbeitsschutz und Arbeitssicherheit': 'Arbeitssicherheit', - 'Berufs- und Arbeitswelt': 'Arbeitslehre', - 'Berufsvorbereitung, Berufsalltag, Arbeitsrecht': 'Arbeitslehre', - 'Ernährung und Gesundheit': ['Ernährung und Hauswirtschaft', 'Gesundheit'], - 'Fächerübergreifender Unterricht': 'Allgemein', - 'Geschichte, Politik und Gesellschaftswissenschaften': ['Geschichte', 'Politik', 'Gesellschaftskunde'], - 'Gesundheit und Gesundheitsschutz': 'Gesundheit', - 'Informationstechnik': 'Informatik', - 'Klima, Umwelt, Nachhaltigkeit': 'Nachhaltigkeit', - 'MINT: Mathematik, Informatik, Naturwissenschaften und Technik': 'MINT', - 'Natur und Umwelt': ['Environmental education', 'Homeland lessons'], # Umwelterziehung, Sachunterricht - 'Religion und Ethik': ['Religion', 'Ethik'], - 'Sport und Bewegung': 'Sport', - 'SoWi': ['Social education', 'Economics'], - 'WiSo': ['Economics', 'Social education'], - 'Wirtschaftslehre': 'Economics' + "Arbeitsschutz und Arbeitssicherheit": "Arbeitssicherheit", + "Berufs- und Arbeitswelt": "Arbeitslehre", + "Berufsvorbereitung, Berufsalltag, Arbeitsrecht": "Arbeitslehre", + "Ernährung und Gesundheit": ["Ernährung und Hauswirtschaft", "Gesundheit"], + "Fächerübergreifender Unterricht": "Allgemein", + "Geschichte, Politik und Gesellschaftswissenschaften": ["Geschichte", "Politik", "Gesellschaftskunde"], + "Gesundheit und Gesundheitsschutz": "Gesundheit", + "Informationstechnik": "Informatik", + "Klima, Umwelt, Nachhaltigkeit": "Nachhaltigkeit", + "MINT: Mathematik, Informatik, Naturwissenschaften und Technik": "MINT", + "Natur und Umwelt": ["Environmental education", "Homeland lessons"], # Umwelterziehung, Sachunterricht + "Religion und Ethik": ["Religion", "Ethik"], + "Sport und Bewegung": "Sport", + "SoWi": ["Social education", "Economics"], + "WiSo": ["Economics", "Social education"], + "Wirtschaftslehre": "Economics", } - def getId(self, response=None) -> str: - return response.url - - def getHash(self, response=None) -> str: - pass + def getId(self, response=None, **kwargs) -> str: + # By the time we call this method, there is no usable 'response.url' available (the URL would point to the API + # for each item), which is why we need to use the metadata dictionary from the API Response here. + try: + material_url = kwargs["kwargs"]["metadata_dict"]["url"] + return material_url + except KeyError: + logging.error(f"'getId'-method could not retrieve metadata_dict['url']. Falling back to 'response.url'") + + def getHash(self, response=None, **kwargs) -> str: + if "kwargs" in kwargs: + if "metadata_dict" in kwargs["kwargs"]: + metadata_dict: dict = kwargs["kwargs"]["metadata_dict"] + hash_value: str = f"{metadata_dict.get('date_published')}v{self.version}" + return hash_value + else: + logging.error( + f"Could not create 'hash' for item. (Failed to retrieve 'metadata_dict' in kwargs of " + f"getHash()-method.)" + ) def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> scrapy.Request: """ Parses the Lehrer-Online API for individual -nodes and yields URLs found within -tags - to the parse()-method. Additionally this method builds a "cleaned up" metadata_dict that gets handed over within - cb_kwargs. + to the parse()-method. Additionally, this method builds a "cleaned up" metadata_dict that gets handed over + within cb_kwargs. + :param response: :param selector: scrapy.selector.unified.Selector :return: scrapy.Request @@ -182,87 +208,87 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc metadata_dict = dict() new_lrts = set() - title_raw: str = selector.xpath('titel/text()').get() + title_raw: str = selector.xpath("titel/text()").get() # self.logger.info(f"the title is: {title_raw}") if title_raw: - metadata_dict.update({'title': title_raw}) + metadata_dict.update({"title": title_raw}) - in_language: str = selector.xpath('sprache/text()').get() + in_language: str = selector.xpath("sprache/text()").get() if in_language: if in_language == "Deutsch": - metadata_dict.update({'language': 'de'}) + metadata_dict.update({"language": "de"}) - description_short: str = selector.xpath('beschreibung/text()').get() + description_short: str = selector.xpath("beschreibung/text()").get() if description_short: - metadata_dict.update({'description_short': description_short}) + metadata_dict.update({"description_short": description_short}) - description_long: str = selector.xpath('beschreibung_lang/text()').get() + description_long: str = selector.xpath("beschreibung_lang/text()").get() if description_long: description_long = w3lib.html.replace_tags(description_long) description_long = w3lib.html.replace_entities(description_long) - metadata_dict.update({'description_long': description_long}) + metadata_dict.update({"description_long": description_long}) - thumbnail_url: str = selector.xpath('bild_url/text()').get() + thumbnail_url: str = selector.xpath("bild_url/text()").get() # ToDo: the "bild_url"-field is rarely useful and only appears in <5% of items, revisit this later if thumbnail_url: - metadata_dict.update({'thumbnail_url': thumbnail_url}) + metadata_dict.update({"thumbnail_url": thumbnail_url}) - keyword_list: list = selector.xpath('schlagwort/text()').getall() + keyword_list: list = selector.xpath("schlagwort/text()").getall() if keyword_list: - metadata_dict.update({'keywords': keyword_list}) + metadata_dict.update({"keywords": keyword_list}) # self.logger.info(f"the keywords are: {keyword_list}") - with_costs_string: str = selector.xpath('kostenpflichtig/text()').get() + with_costs_string: str = selector.xpath("kostenpflichtig/text()").get() # with_costs_string can be either "ja" or "nein" if with_costs_string == "ja": - metadata_dict.update({'price': 'yes'}) + metadata_dict.update({"price": "yes"}) elif with_costs_string == "nein": - metadata_dict.update({'price': 'no'}) + metadata_dict.update({"price": "no"}) - author_raw: str = selector.xpath('autor/text()').get() + author_raw: str = selector.xpath("autor/text()").get() if author_raw: - metadata_dict.update({'author': author_raw}) + metadata_dict.update({"author": author_raw}) - author_email: str = selector.xpath('autor_email/text()').get() + author_email: str = selector.xpath("autor_email/text()").get() if author_email: - metadata_dict.update({'author_email': author_email}) + metadata_dict.update({"author_email": author_email}) - provider_address: str = selector.xpath('anbieter_herkunft/text()').get() + provider_address: str = selector.xpath("anbieter_herkunft/text()").get() # provider_address is (currently?) always the address found in the Impressum if provider_address: - metadata_dict.update({'provider_address': provider_address}) - provider_name: str = selector.xpath('einsteller/text()').get() + metadata_dict.update({"provider_address": provider_address}) + provider_name: str = selector.xpath("einsteller/text()").get() # the value for "einsteller" is currently "Redaktion Lehrer-Online" in 100% of cases if provider_name: - metadata_dict.update({'provider_name': provider_name}) - provider_email: str = selector.xpath('einsteller_email/text()').get() + metadata_dict.update({"provider_name": provider_name}) + provider_email: str = selector.xpath("einsteller_email/text()").get() # the value for "einsteller_email" is currently "redaktion@lehrer-online.de" in 100% of cases if provider_email: - metadata_dict.update({'provider_email': provider_email}) + metadata_dict.update({"provider_email": provider_email}) # both last_modified and date_published will be surrounded by lots of whitespace, tabs and newlines # therefore we need to clean up the string before saving it into our dictionary - last_modified: str = selector.xpath('letzte_aenderung/text()').get() + last_modified: str = selector.xpath("letzte_aenderung/text()").get() if last_modified is not None: last_modified = w3lib.html.strip_html5_whitespace(last_modified) if last_modified: # last_modified is not always available, sometimes it's an empty string - last_modified_datetime: datetime = datetime.strptime(last_modified, '%Y-%m-%d') + last_modified_datetime: datetime = datetime.strptime(last_modified, "%Y-%m-%d") last_modified = last_modified_datetime.isoformat() - metadata_dict.update({'last_modified': last_modified}) + metadata_dict.update({"last_modified": last_modified}) - date_published: str = selector.xpath('publikationsdatum/text()').get() + date_published: str = selector.xpath("publikationsdatum/text()").get() if date_published is not None: date_published = w3lib.html.strip_html5_whitespace(date_published) if date_published: # date_published is not always available in the API, but when it is, it follows a strict syntax date_published: str = w3lib.html.strip_html5_whitespace(date_published) - date_published_datetime: datetime = datetime.strptime(date_published, '%Y-%m-%d') + date_published_datetime: datetime = datetime.strptime(date_published, "%Y-%m-%d") date_published = date_published_datetime.isoformat() - metadata_dict.update({'date_published': date_published}) + metadata_dict.update({"date_published": date_published}) else: # since date_published is used for our hash, we need this fallback in case it isn't available in the API - metadata_dict.update({'date_published': datetime.now().isoformat()}) + metadata_dict.update({"date_published": datetime.now().isoformat()}) # ToDo: there is a -Element, that is (in the API) currently empty 100% of the time, check again # during the next crawler-update if this data is available in the API by then @@ -271,7 +297,7 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # metadata_dict.update({'expiration_date': expiration_date}) # can either be completely empty or there can be several -elements within a - disciplines_or_additional_keywords: list = selector.xpath('fach/text()').getall() + disciplines_or_additional_keywords: list = selector.xpath("fach/text()").getall() individual_disciplines_or_keywords = set() for potential_discipline_or_keyword in disciplines_or_additional_keywords: # to make mapping more precise, we're separating strings like "Politik / WiSo / SoWi / Wirtschaft" into its @@ -304,14 +330,14 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc # values that don't need to be mapped (or can't be mapped) end up in the additional keywords list # once we iterated through all -elements, we can set/update the actual fields in metadata_dict if disciplines_mapped: - metadata_dict.update({'discipline': list(disciplines_mapped)}) + metadata_dict.update({"discipline": list(disciplines_mapped)}) if additional_keywords_from_disciplines: keyword_set = set(keyword_list) keyword_set.update(additional_keywords_from_disciplines) keyword_list = list(keyword_set) - metadata_dict.update({'keywords': keyword_list}) + metadata_dict.update({"keywords": keyword_list}) - educational_context_raw: str = selector.xpath('bildungsebene/text()').get() + educational_context_raw: str = selector.xpath("bildungsebene/text()").get() educational_context_cleaned_up = set() if educational_context_raw is not None: # if this metadata-field is left empty by Lehrer-Online, it will hold a string full of whitespaces, e.g. @@ -337,30 +363,30 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc educational_context.append(edu_context_temp) else: educational_context.append(edu_context_item) - metadata_dict.update({'educational_context': educational_context}) + metadata_dict.update({"educational_context": educational_context}) - material_type_raw: str = selector.xpath('material_type/text()').get() + material_type_raw: str = selector.xpath("material_type/text()").get() if material_type_raw: if material_type_raw in self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT: new_lrt = self.MAPPING_MATERIAL_TYPE_TO_NEW_LRT.get(material_type_raw) new_lrts.add(new_lrt) - metadata_dict.update({'new_lrt': new_lrt}) - metadata_dict.update({'material_type_raw': material_type_raw}) + metadata_dict.update({"new_lrt": new_lrt}) + metadata_dict.update({"material_type_raw": material_type_raw}) - material_id_local: str = selector.xpath('material_id_local/text()').get() + material_id_local: str = selector.xpath("material_id_local/text()").get() if material_id_local: # the material_id_local seems to be a stable string (including an uuid) that is suitable for our sourceId - metadata_dict.update({'source_id': material_id_local}) + metadata_dict.update({"source_id": material_id_local}) - material_url: str = selector.xpath('url_ressource/text()').get() + material_url: str = selector.xpath("url_ressource/text()").get() if material_url is not None: material_url = w3lib.html.strip_html5_whitespace(material_url) if material_url: # checking explicitly for an empty URL-string (2 out of 5688 -tags were empty) # see: https://docs.python.org/3/library/stdtypes.html#truth-value-testing - metadata_dict.update({'url': material_url}) + metadata_dict.update({"url": material_url}) - lrt_raw = selector.xpath('lernressourcentyp/text()').getall() + lrt_raw = selector.xpath("lernressourcentyp/text()").getall() # there can be SEVERAL "lernressourcentyp"-elements per item if lrt_raw: additional_keywords_from_lo_lrt = set() @@ -370,17 +396,17 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc new_lrts.add(new_lrt) else: additional_keywords_from_lo_lrt.add(lrt_possible_value) - metadata_dict.update({'new_lrt': list(new_lrts)}) + metadata_dict.update({"new_lrt": list(new_lrts)}) keyword_set = set(keyword_list) keyword_set.update(additional_keywords_from_lo_lrt) keyword_list = list(keyword_set) - metadata_dict.update({'keywords': keyword_list}) + metadata_dict.update({"keywords": keyword_list}) - intended_end_user_role: str = selector.xpath('zielgruppe/text()').get() + intended_end_user_role: str = selector.xpath("zielgruppe/text()").get() if intended_end_user_role: - metadata_dict.update({'intended_end_user': intended_end_user_role}) + metadata_dict.update({"intended_end_user": intended_end_user_role}) - rights_raw: str = selector.xpath('rechte/text()').get() + rights_raw: str = selector.xpath("rechte/text()").get() if rights_raw: rights_raw: str = w3lib.html.strip_html5_whitespace(rights_raw) if rights_raw: @@ -388,31 +414,31 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc if rights_raw in self.MAPPING_RIGHTS_TO_URLS: license_url = self.MAPPING_RIGHTS_TO_URLS.get(rights_raw) if license_url: - metadata_dict.update({'license_url': license_url}) + metadata_dict.update({"license_url": license_url}) else: - metadata_dict.update({'license_description': rights_raw}) + metadata_dict.update({"license_description": rights_raw}) - free_to_access: str = selector.xpath('frei_zugaenglich/text()').get() + free_to_access: str = selector.xpath("frei_zugaenglich/text()").get() # can be either 'ja' or 'nein', but it has a different meaning when "kostenpflichtig"-element is set to "ja": # frei_zugaenglich (ja) & kostenpflichtig (nein) = truly free to access, no log-in required # frei_zugaenglich (nein) & kostenpflichtig (nein) = available for free, but log-in required (free) # frei_zugaenglich (nein) & kostenpflichtig (ja) = login required, paywalled (premium) content if free_to_access == "ja": if metadata_dict.get("price") == "no": - metadata_dict.update({'conditions_of_access': 'no_login'}) - metadata_dict.update({'origin_folder_name': 'free'}) + metadata_dict.update({"conditions_of_access": "no_login"}) + metadata_dict.update({"origin_folder_name": "free"}) elif free_to_access == "nein": if metadata_dict.get("price") == "yes": - metadata_dict.update({'conditions_of_access': 'login'}) - metadata_dict.update({'origin_folder_name': 'premium_only'}) + metadata_dict.update({"conditions_of_access": "login"}) + metadata_dict.update({"origin_folder_name": "premium_only"}) elif metadata_dict.get("price") == "no": - metadata_dict.update({'conditions_of_access': 'login_for_additional_features'}) - metadata_dict.update({'origin_folder_name': 'free_account_required'}) + metadata_dict.update({"conditions_of_access": "login_for_additional_features"}) + metadata_dict.update({"origin_folder_name": "free_account_required"}) # quelle_id currently holds just the abbreviation "LO" for all elements, check again later # quelle_logo_url is different from bild_url, always holds (the same) URL to the Lehrer-Online logo - source_homepage_url: str = selector.xpath('quelle_homepage_url/text()').get() + source_homepage_url: str = selector.xpath("quelle_homepage_url/text()").get() # Lehrer-Online offers several sub-portals to topic-specific materials. Distinction is possible by using the # quelle_homepage_url field in the API. Possible values: # "https://www.lehrer-online.de" (main website) @@ -423,19 +449,73 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc match source_homepage_url: case "https://www.handwerk-macht-schule.de": origin_prefixed = f"Themenportal_Handwerk_-_{metadata_dict.get('origin_folder_name')}" - metadata_dict.update({'origin_folder_name': origin_prefixed}) + metadata_dict.update({"origin_folder_name": origin_prefixed}) case "https://pubertaet.lehrer-online.de": origin_prefixed = f"Themenportal_Pubertaet_-_{metadata_dict.get('origin_folder_name')}" - metadata_dict.update({'origin_folder_name': origin_prefixed}) + metadata_dict.update({"origin_folder_name": origin_prefixed}) # self.logger.info(f"metadata_dict = {metadata_dict}") if material_url: # not every -element actually holds a valid URL to parse for us - we need to skip those empty # strings otherwise the parse_node() method throws an error on that entry (and skips the rest) - yield scrapy.Request(url=material_url, callback=self.parse, cb_kwargs={'metadata_dict': metadata_dict}) + drop_item_flag: bool = self.check_if_item_should_be_dropped(response, metadata_dict=metadata_dict) + if drop_item_flag is True: + # if the flag is set to True, the item will be dropped and no 'scrapy.Request' shall be yielded + # (this reduces the amount of unnecessary HTTP requests) + pass + else: + yield scrapy.Request(url=material_url, callback=self.parse, cb_kwargs={"metadata_dict": metadata_dict}) else: pass + def hasChanged(self, response=None, **kwargs) -> bool: + """Re-implements LomBase's hasChanged()-method for Lehrer-Online.""" + try: + metadata_dict: dict = kwargs["kwargs"]["metadata_dict"] + identifier_url: str = self.getId(response, kwargs={"metadata_dict": metadata_dict}) + hash_str: str = self.getHash(response, kwargs={"metadata_dict": metadata_dict}) + except KeyError as ke: + logging.error("hasChanged()-method could not access 'metadata_dict'.") + raise ke + if self.forceUpdate: + return True + if self.uuid: + if self.getUUID(response) == self.uuid: + logging.info(f"matching requested id: {self.uuid}") + return True + return False + if self.remoteId: + if identifier_url == self.remoteId: + logging.info(f"matching requested id: {self.remoteId}") + return True + return False + db = EduSharing().find_item(identifier_url, self) + changed = db is None or db[1] != hash_str + if not changed: + logging.info(f"Item {identifier_url} (uuid: {db[0]}) has not changed") + return changed + + def check_if_item_should_be_dropped(self, response, metadata_dict: dict) -> bool: + """ + Re-implements the check at the beginning of LomBase parse()-method to determine if an item needs to be dropped. + This could happen for reasons like "the hash has not changed" (= the object has not changed since the last + crawl) or if the 'shouldImport'-attribute was set to False. + + :param response: scrapy Response + :param metadata_dict: metadata dictionary from the Lehrer-Online API + :return: True if item needs to be dropped. Defaults to: False + """ + drop_item_flag: bool = False # by default, we assume that all items should be crawled + identifier_url: str = self.getId(response, kwargs={"metadata_dict": metadata_dict}) + hash_str: str = self.getHash(response, kwargs={"metadata_dict": metadata_dict}) + if self.shouldImport(response) is False: + logging.debug(f"Skipping entry {identifier_url} because shouldImport() returned false") + drop_item_flag = True + if identifier_url is not None and hash_str is not None: + if not self.hasChanged(response, kwargs={"metadata_dict": metadata_dict}): + drop_item_flag = True + return drop_item_flag + def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: """ Uses the metadata_dict that was built in parse_node() and extracts additional metadata from the DOM itself to @@ -445,128 +525,132 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: :return: BaseItemLoader """ metadata_dict: dict = kwargs.get("metadata_dict") - # self.logger.info(f"Metadata inside PARSE-METHOD for {response.url}: {metadata_dict.keys()}") base = BaseItemLoader() - base.add_value('sourceId', metadata_dict.get("source_id")) - hash_temp: str = metadata_dict.get("date_published") + self.version - base.add_value('hash', hash_temp) - if "last_modified" in metadata_dict.keys(): + base.add_value("sourceId", self.getId(response, kwargs={"metadata_dict": metadata_dict})) + base.add_value("hash", self.getHash(response, kwargs={"metadata_dict": metadata_dict})) + if "last_modified" in metadata_dict: last_modified = metadata_dict.get("last_modified") - base.add_value('lastModified', last_modified) + base.add_value("lastModified", last_modified) else: # if last_modified is not available in the API, we use the publication date instead as a workaround - base.add_value('lastModified', metadata_dict.get("date_published")) - if "provider_address" in metadata_dict.keys(): - base.add_value('publisher', metadata_dict.get("provider_address")) - if "thumbnail_url" in metadata_dict.keys(): + base.add_value("lastModified", metadata_dict.get("date_published")) + if "provider_address" in metadata_dict: + base.add_value("publisher", metadata_dict.get("provider_address")) + if "thumbnail_url" in metadata_dict: thumbnail_url: str = metadata_dict.get("thumbnail_url") if thumbnail_url: - base.add_value('thumbnail', thumbnail_url) - if "origin_folder_name" in metadata_dict.keys(): - base.replace_value('origin', metadata_dict.get("origin_folder_name")) + base.add_value("thumbnail", thumbnail_url) + if "origin_folder_name" in metadata_dict: + base.replace_value("origin", metadata_dict.get("origin_folder_name")) lom = LomBaseItemloader() general = LomGeneralItemloader() - general.add_value('identifier', response.url) - general.add_value('title', metadata_dict.get("title")) - if "keywords" in metadata_dict.keys(): - general.add_value('keyword', metadata_dict.get("keywords")) - if "description_long" in metadata_dict.keys(): - general.add_value('description', metadata_dict.get("description_long")) - elif "description_short" in metadata_dict.keys(): - general.add_value('description', metadata_dict.get("description_short")) - if "language" in metadata_dict.keys(): - general.add_value('language', metadata_dict.get("language")) + general.add_value("identifier", response.url) + general.add_value("title", metadata_dict.get("title")) + if "keywords" in metadata_dict: + general.add_value("keyword", metadata_dict.get("keywords")) + if "description_long" in metadata_dict: + general.add_value("description", metadata_dict.get("description_long")) + elif "description_short" in metadata_dict: + general.add_value("description", metadata_dict.get("description_short")) + if "language" in metadata_dict: + general.add_value("language", metadata_dict.get("language")) # noinspection DuplicatedCode - lom.add_value('general', general.load_item()) + lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() - technical.add_value('format', 'text/html') - technical.add_value('location', response.url) - lom.add_value('technical', technical.load_item()) - - lifecycle = LomLifecycleItemloader() - lifecycle.add_value('role', 'publisher') # supported roles: "author" / "editor" / "publisher" - lifecycle.add_value('date', metadata_dict.get("date_published")) - if "provider_name" in metadata_dict.keys(): - lifecycle.add_value('organization', metadata_dict.get("provider_name")) - if "provider_email" in metadata_dict.keys(): - lifecycle.add_value('email', metadata_dict.get("provider_email")) - lom.add_value('lifecycle', lifecycle.load_item()) + technical.add_value("format", "text/html") + if "url" in metadata_dict and metadata_dict["url"] != response.url: + # in case the resolved URL might be different from the URL that we received by the API: save both + material_url: str = metadata_dict["url"] + technical.add_value("location", material_url) + technical.add_value("location", response.url) + lom.add_value("technical", technical.load_item()) + + lifecycle_publisher = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + lifecycle_publisher.add_value("date", metadata_dict.get("date_published")) + if "provider_name" in metadata_dict: + lifecycle_publisher.add_value("organization", metadata_dict.get("provider_name")) + if "provider_email" in metadata_dict: + lifecycle_publisher.add_value("email", metadata_dict.get("provider_email")) + lom.add_value("lifecycle", lifecycle_publisher.load_item()) educational = LomEducationalItemLoader() - if "description_short" in metadata_dict.keys(): - educational.add_value('description', metadata_dict.get("description_short")) + if "description_short" in metadata_dict: + educational.add_value("description", metadata_dict.get("description_short")) # - typicalLearningTime optional - if "language" in metadata_dict.keys(): - educational.add_value('language', metadata_dict.get("language")) + if "language" in metadata_dict: + educational.add_value("language", metadata_dict.get("language")) # ToDo: RegEx-extract typicalLearningTime? (needs to be a duration; LO serves this metadata as a string) # the time-format on the DOM is a wildly irregular String (from "3 Unterrichtsstunden" to "3x90 Minuten", # "mindestens 12 Unterrichtsstunden plus Lektüre" etc.); maybe consider this for later crawler-versions # learning_time_string = response.xpath('//li[@class="icon-count-hours"]/span/text()').get() - lom.add_value('educational', educational.load_item()) + lom.add_value("educational", educational.load_item()) # classification = super().getLOMClassification() # lom.add_value('classification', classification.load_item()) - base.add_value('lom', lom.load_item()) + base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value('containsAdvertisement', 'yes') - vs.add_value('dataProtectionConformity', 'generalDataProtectionRegulation') + vs.add_value("containsAdvertisement", "yes") + vs.add_value("dataProtectionConformity", "generalDataProtectionRegulation") # see: https://www.eduversum.de/datenschutz/ - if "conditions_of_access" in metadata_dict.keys(): - vs.add_value('conditionsOfAccess', metadata_dict.get("conditions_of_access")) - if "discipline" in metadata_dict.keys(): - vs.add_value('discipline', metadata_dict.get("discipline")) - if "educational_context" in metadata_dict.keys(): - vs.add_value('educationalContext', metadata_dict.get("educational_context")) - if "intended_end_user" in metadata_dict.keys(): - vs.add_value('intendedEndUserRole', metadata_dict.get("intended_end_user")) - if "new_lrt" in metadata_dict.keys(): - vs.add_value('new_lrt', metadata_dict.get("new_lrt")) + if "conditions_of_access" in metadata_dict: + vs.add_value("conditionsOfAccess", metadata_dict.get("conditions_of_access")) + if "discipline" in metadata_dict: + vs.add_value("discipline", metadata_dict.get("discipline")) + if "educational_context" in metadata_dict: + vs.add_value("educationalContext", metadata_dict.get("educational_context")) + if "intended_end_user" in metadata_dict: + vs.add_value("intendedEndUserRole", metadata_dict.get("intended_end_user")) + vs.add_value("intendedEndUserRole", "teacher") # ToDo: remove this hard-coded value as soon + # as the SKOS vocabs altLabel generation is fixed (see: ITSJOINTLY-332) + if "new_lrt" in metadata_dict: + vs.add_value("new_lrt", metadata_dict.get("new_lrt")) else: - vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) - if "price" in metadata_dict.keys(): - vs.add_value('price', metadata_dict.get("price")) - base.add_value('valuespaces', vs.load_item()) + vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) + if "price" in metadata_dict: + vs.add_value("price", metadata_dict.get("price")) + base.add_value("valuespaces", vs.load_item()) license_loader = LicenseItemLoader() - if "license_url" in metadata_dict.keys(): + if "license_url" in metadata_dict: license_url = metadata_dict.get("license_url") - license_loader.add_value('url', license_url) - elif "license_description" in metadata_dict.keys(): + license_loader.add_value("url", license_url) + elif "license_description" in metadata_dict: license_description = metadata_dict.get("license_description") - if license_description == 'Frei nutzbares Material': - license_loader.add_value('internal', Constants.LICENSE_CUSTOM) + if license_description == "Frei nutzbares Material": + license_loader.add_value("internal", Constants.LICENSE_CUSTOM) # just in case the license-description changes over time, we're gathering the description from the DOM license_title: str = response.xpath('//div[@class="license-title"]/text()').get() license_text: str = response.xpath('//div[@class="license-text"]/text()').get() if license_text and license_title: license_full_desc: str = license_text.join(license_title) - license_loader.add_value('description', license_full_desc) + license_loader.add_value("description", license_full_desc) else: - license_loader.add_value('description', license_description) + license_loader.add_value("description", license_description) else: - license_loader.add_value('internal', Constants.LICENSE_COPYRIGHT_LAW) + license_loader.add_value("internal", Constants.LICENSE_COPYRIGHT_LAW) # noinspection DuplicatedCode - if "author" in metadata_dict.keys(): - license_loader.add_value('author', metadata_dict.get("author")) - # if "expiration_date" in metadata_dict.keys(): + if "author" in metadata_dict: + license_loader.add_value("author", metadata_dict.get("author")) + # if "expiration_date" in metadata_dict: # # ToDo: activate gathering of expiration_date once the data is available in the API # # - make sure that the dateparser correctly recognizes the date # expiration_date = metadata_dict.get("expiration_date") # license_loader.add_value('expirationDate', expiration_date) - base.add_value('license', license_loader.load_item()) + base.add_value("license", license_loader.load_item()) permissions = super().getPermissions(response) - base.add_value('permissions', permissions.load_item()) + base.add_value("permissions", permissions.load_item()) response_loader = super().mapResponse(response) - base.add_value('response', response_loader.load_item()) + base.add_value("response", response_loader.load_item()) yield base.load_item() From 3facd7f94b4fbfd78b535c4037d170d9a339bde0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 3 Aug 2023 21:01:22 +0200 Subject: [PATCH 333/590] change: decrease logging verbosity for specific '404'-response from edu-sharing - fix: log spam caused by one specific '404'-response from edu-sharing while still being verbose when encountering other 404 error-codes -- background-information: edu-sharing throws an 'org.edu_sharing.restservices.DAOMissingException' when a node couldn't be found (in the event that it simply didn't exist before) --- this error message would occur for each item that didn't previously exist within the repository, blowing up the logfile Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 32c900c0..ee9ae059 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -736,12 +736,32 @@ def find_item(self, id, spider): self.init_api_client() return None if e.status == 404: - logging.debug( - f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} (replicationsourceid " - f"'{id}'):\n" - f"HTTP Body: {e.body}\n" - f"HTTP Header: {e.headers}" - ) + try: + error_dict: dict = json.loads(e.body) + error_name: str = error_dict["error"] + if error_name and error_name == 'org.edu_sharing.restservices.DAOMissingException': + # when there is no already existing node in the edu-sharing repository, edu-sharing returns + # a "DAOMissingException". The following debug message is commented out to reduce log-spam: + # error_message: str = error_dict["message"] + # logging.debug(f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode 404 " + # f"('{error_message}') for\n '{id}'. \n(This typically means that there was no " + # f"existing node in the edu-sharing repository. Continuing...)") + return None + else: + logging.debug( + f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode {e.status} " + f"(replicationsourceid '{id}'):\n" + f"HTTP Body: {e.body}\n" + f"HTTP Header: {e.headers}" + ) + return None + except json.JSONDecodeError: + logging.debug( + f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode {e.status} " + f"(replicationsourceid '{id}'):\n" + f"HTTP Body: {e.body}\n" + f"HTTP Header: {e.headers}" + ) return None else: raise e From 8bbea9ced1eaf299e38c57bd3fdfd5304d4e9358 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 3 Aug 2023 21:27:14 +0200 Subject: [PATCH 334/590] fix: revert API limit back to 10.000 - I accidentally left the 'limit'-parameter on 10 after debugging v0.0.7 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/lehreronline_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index c01051f4..4518b38f 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -24,7 +24,7 @@ class LehrerOnlineSpider(XMLFeedSpider, LomBase): name = "lehreronline_spider" friendlyName = "Lehrer-Online" start_urls = [ - "https://www.lehrer-online.de/?type=3030&limit=10" + "https://www.lehrer-online.de/?type=3030&limit=10000" # the limit parameter controls the amount of results PER CATEGORY (NOT the total amount of results) # API response with a "limit"-value set to 10.000 might take more than 90s (17.7 MB, 5912 URLs to crawl) ] From fe4b47edff01057b9f46f519b8eecccd267aafbe Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 4 Aug 2023 10:56:55 +0200 Subject: [PATCH 335/590] implement getUri, getUUID - improve readability of hasChanged() - fix: ResponseItemLoader Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/lehreronline_spider.py | 32 +++++++++++++++++++----- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/converter/spiders/lehreronline_spider.py b/converter/spiders/lehreronline_spider.py index 4518b38f..2058d4fe 100644 --- a/converter/spiders/lehreronline_spider.py +++ b/converter/spiders/lehreronline_spider.py @@ -16,6 +16,7 @@ LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, + ResponseItemLoader, ) from converter.spiders.base_classes import LomBase @@ -468,31 +469,48 @@ def parse_node(self, response, selector: scrapy.selector.unified.Selector) -> sc else: pass + def getUri(self, response=None, **kwargs) -> str: + try: + metadata_dict: dict = kwargs["kwargs"]["metadata_dict"] + except KeyError as ke: + logging.error("getUri()-method could not access 'metadata_dict'.") + raise ke + return metadata_dict["url"] + + def getUUID(self, response=None, **kwargs) -> str: + try: + metadata_dict: dict = kwargs["kwargs"]["metadata_dict"] + except KeyError as ke: + logging.error("getUUID()-method could not access 'metadata_dict'.") + raise ke + return EduSharing.build_uuid(self.getUri(response, kwargs={"metadata_dict": metadata_dict})) + def hasChanged(self, response=None, **kwargs) -> bool: """Re-implements LomBase's hasChanged()-method for Lehrer-Online.""" try: metadata_dict: dict = kwargs["kwargs"]["metadata_dict"] - identifier_url: str = self.getId(response, kwargs={"metadata_dict": metadata_dict}) + identifier: str = self.getId(response, kwargs={"metadata_dict": metadata_dict}) hash_str: str = self.getHash(response, kwargs={"metadata_dict": metadata_dict}) + uuid_str: str = self.getUUID(response, kwargs={"metadata_dict": metadata_dict}) except KeyError as ke: logging.error("hasChanged()-method could not access 'metadata_dict'.") raise ke if self.forceUpdate: return True if self.uuid: - if self.getUUID(response) == self.uuid: + if uuid_str == self.uuid: logging.info(f"matching requested id: {self.uuid}") return True return False if self.remoteId: - if identifier_url == self.remoteId: + if identifier == self.remoteId: logging.info(f"matching requested id: {self.remoteId}") return True return False - db = EduSharing().find_item(identifier_url, self) + db = EduSharing().find_item(identifier, self) changed = db is None or db[1] != hash_str if not changed: - logging.info(f"Item {identifier_url} (uuid: {db[0]}) has not changed") + logging.info(f"Item {identifier} (uuid: {db[0]}) has not changed") return changed def check_if_item_should_be_dropped(self, response, metadata_dict: dict) -> bool: @@ -650,7 +668,9 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader = ResponseItemLoader() + response_loader.add_value("headers", response.headers) + response_loader.add_value("url", self.getUri(response, kwargs={"metadata_dict": metadata_dict})) base.add_value("response", response_loader.load_item()) yield base.load_item() From c11973151c3e65a8c552720e513c90782b69693d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 4 Aug 2023 14:13:06 +0200 Subject: [PATCH 336/590] performance optimization, fix 'init'-call - fix: 'init' by calling LomBase explicitly instead of using super() - perf: first optimization pass to increase crawler performance by overwriting hasChanged() and checking earlier in the program flow if an item needs to be dropped -- there might be more room for performance optimization by moving the drop_item check into the start_requests() method, but this approach needs more work --- docs: (I left ToDo-comments for this optimization approach in the code for a future revisit) - change: reworked getId() and getHash() to use try-except statements Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 101 +++++++++++++++++++++++++----- 1 file changed, 84 insertions(+), 17 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index ef05f758..2c0f4fac 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -21,6 +21,7 @@ ) from converter.spiders.base_classes import LomBase from converter.web_tools import WebEngine, WebTools +from ..es_connector import EduSharing from ..util.license_mapper import LicenseMapper @@ -30,7 +31,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.2.9" # last update: 2023-08-01 + version = "0.2.9" # last update: 2023-08-04 custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright @@ -56,8 +57,8 @@ class SerloSpider(scrapy.Spider, LomBase): "student": "learner", } - def __init__(self, *a, **kw): - super().__init__(*a, **kw) + def __init__(self, **kw): + LomBase.__init__(self, **kw) self.decide_crawl_mode() self.graphql_items = self.fetch_all_graphql_pages() @@ -76,8 +77,10 @@ def decide_crawl_mode(self): """ graphql_instance_param: str = env.get(key="SERLO_INSTANCE", allow_null=True, default=None) if graphql_instance_param: - logging.info(f"INIT: '.env'-Setting 'SERLO_INSTANCE': {graphql_instance_param} (language) detected. " - f"Limiting query to a single language selection.") + logging.info( + f"INIT: '.env'-Setting 'SERLO_INSTANCE': {graphql_instance_param} (language) detected. " + f"Limiting query to a single language selection." + ) self.GRAPHQL_INSTANCE_PARAMETER = graphql_instance_param graphql_modified_after_param: str = env.get(key="SERLO_MODIFIED_AFTER", allow_null=True, default=None) if graphql_modified_after_param: @@ -139,7 +142,7 @@ def query_graphql_page(self, amount_of_nodes: int = 500, pagination_string: str # https://github.com/serlo/documentation/wiki/Metadata-API#understanding-the-request-payload-and-pagination instance_value: str = self.GRAPHQL_INSTANCE_PARAMETER if instance_value and instance_value in ["de", "en", "es", "ta", "hi", "fr"]: - instance_parameter: str = f'instance: {instance_value}' + instance_parameter: str = f"instance: {instance_value}" graphql_metadata_query_body = { "query": f""" query {{ @@ -165,8 +168,8 @@ def query_graphql_page(self, amount_of_nodes: int = 500, pagination_string: str def start_requests(self): for graphql_item in self.graphql_items: - # logging.debug(f"{graphql_item}") item_url = graphql_item["id"] + # ToDo: there is room for further optimization if we do the drop_item check here yield scrapy.Request(url=item_url, callback=self.parse, cb_kwargs={"graphql_item": graphql_item}) def getId(self, response=None, graphql_json=None) -> str: @@ -176,27 +179,91 @@ def getId(self, response=None, graphql_json=None) -> str: # e.g.: "id": "https://serlo.org/2097" # "value": "2097" graphql_json: dict = graphql_json - if "identifier" in graphql_json: - if "value" in graphql_json["identifier"]: - identifier_value = graphql_json["identifier"]["value"] - if identifier_value: - return identifier_value - else: - return response.url + try: + identifier_value: str = graphql_json["identifier"]["value"] + if identifier_value: + return identifier_value + else: + return response.url + except KeyError: + logging.debug( + f"getId: Could not retrieve Serlo identifier from 'graphql_json'-dict. Falling back to 'response.url'" + ) def getHash(self, response=None, graphql_json=None) -> str: graphql_json: dict = graphql_json - if "dateModified" in graphql_json: + try: date_modified: str = graphql_json["dateModified"] if date_modified: hash_combined = f"{date_modified}{self.version}" return hash_combined - else: - return f"{datetime.datetime.now().isoformat()}{self.version}" + else: + return f"{datetime.datetime.now().isoformat()}{self.version}" + except KeyError: + logging.debug( + f"getHash: Could not retrieve Serlo 'dateModified' from 'graphql_json'-dict. Falling back to " + f"'datetime.now()'-value for 'hash'." + ) + + def hasChanged(self, response=None, **kwargs) -> bool: + try: + graphql_json: dict = kwargs["kwargs"]["graphql_json"] + identifier: str = self.getId(response, graphql_json) + hash_str: str = self.getHash(response, graphql_json) + uuid_str: str = self.getUUID(response) + # ToDo - further optimization: if we want to save even more time, we could use graphql_json as a parameter + # in the 'getUUID'-method (needs to be overwritten) and check if the item should be dropped in the + # start_requests()-method before yielding the scrapy.Request + except KeyError as ke: + logging.debug(f"hasChanged(): Could not retrieve 'graphql_json' from kwargs.") + raise ke + if self.forceUpdate: + return True + if self.uuid: + if uuid_str == self.uuid: + logging.info(f"matching requested id: {self.uuid}") + return True + return False + if self.remoteId: + if identifier == self.remoteId: + logging.info(f"matching requested id: {self.remoteId}") + return True + return False + db = EduSharing().find_item(identifier, self) + changed = db is None or db[1] != hash_str + if not changed: + logging.info(f"Item {identifier} (uuid: {db[0]}) has not changed") + return changed + + def check_if_item_should_be_dropped(self, response, graphql_json: dict): + """ + Check if item needs to be dropped (before making any further HTTP Requests). + This could happen for reasons like "the hash has not changed" (= the object has not changed since the last + crawl) or if the 'shouldImport'-attribute was set to False. + + :param response: scrapy.http.Response + :param graphql_json: metadata dictionary of an item (from Serlo's GraphQL API) + :return: True if item needs to be dropped. Defaults to: False + """ + drop_item_flag: bool = False # by default, we assume that all items should be crawled + identifier: str = self.getId(response, graphql_json) + hash_str: str = self.getHash(response, graphql_json) + if self.shouldImport(response) is False: + logging.debug(f"Skipping entry {identifier} because shouldImport() returned false") + drop_item_flag = True + return drop_item_flag + if identifier is not None and hash_str is not None: + if not self.hasChanged(response, kwargs={"graphql_json": graphql_json}): + drop_item_flag = True + return drop_item_flag def parse(self, response, **kwargs): graphql_json: dict = kwargs.get("graphql_item") + drop_item_flag = self.check_if_item_should_be_dropped(response, graphql_json) + if drop_item_flag is True: + return None + json_ld = response.xpath('//*[@type="application/ld+json"]/text()').get() json_ld = json.loads(json_ld) From da2c7b863688e0ec4254430a6f15d5b7bec9d54f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 4 Aug 2023 14:14:00 +0200 Subject: [PATCH 337/590] add: pyCharm runConfiguration for serlo_spider Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .run/serlo_spider.run.xml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 .run/serlo_spider.run.xml diff --git a/.run/serlo_spider.run.xml b/.run/serlo_spider.run.xml new file mode 100644 index 00000000..a5980a78 --- /dev/null +++ b/.run/serlo_spider.run.xml @@ -0,0 +1,25 @@ + + + + + + \ No newline at end of file From cd56590b82fb0e741abfe8ae59208ccb84cf9890 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 10 Aug 2023 11:35:31 +0200 Subject: [PATCH 338/590] fix: init kwargs Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/science_in_school_spider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index 3afa33c3..95749fe8 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -41,8 +41,8 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): } KEYWORD_EXCLUSION_LIST = ["Not applicable", "not applicable"] - def __init__(self): - LomBase.__init__(self=self) + def __init__(self, **kwargs): + LomBase.__init__(self=self, **kwargs) def start_requests(self): for start_url in self.start_urls: From d74abc8f94e31ef54a513549d8d15956d6896a6b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 10 Aug 2023 12:05:20 +0200 Subject: [PATCH 339/590] fix: pyCharm runConfigurations (profiler settings) - change: Scrapy crawlers need specific settings so they can be started by the profiler -- the previous approach was by calling the 'scrapy.cmdline' module directly -- restriction: the profiler needs to use scrapy's 'bin'-directory within our .venv folder, so these profiler settings will only work if you adhere to the project's readme.md and have your .venv directory set up in the project root! Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .run/biologie_lernprogramme_spider.run.xml | 6 +++--- .run/br_rss_spider.run.xml | 6 +++--- .run/chemie_lernprogramme_spider.run.xml | 6 +++--- .run/digitallearninglab_spider.run.xml | 6 +++--- .run/dilertube_spider.run.xml | 6 +++--- .run/dwu_spider.run.xml | 6 +++--- .run/edulabs_spider.run.xml | 6 +++--- .run/fobizz_spider.run.xml | 6 +++--- .run/ginkgomaps_spider.run.xml | 6 +++--- .run/grundschulkoenig_spider.run.xml | 6 +++--- .run/kmap_spider.run.xml | 6 +++--- .run/learning_apps_spider.run.xml | 6 +++--- .run/lehreronline_spider.run.xml | 6 +++--- .run/materialnetzwerk_spider.run.xml | 6 +++--- .run/mediothek_pixiothek_spider.run.xml | 6 +++--- .run/memucho_spider.run.xml | 6 +++--- .run/oeh_spider.run.xml | 8 ++++---- .run/oersi_spider.run.xml | 6 +++--- .run/planet_schule_spider.run.xml | 6 +++--- .run/rpi_virtuell_spider.run.xml | 6 +++--- .run/science_in_school_spider.run.xml | 6 +++--- .run/segu_spider.run.xml | 6 +++--- .run/serlo_spider.run.xml | 6 +++--- .run/sodix_spider.run.xml | 6 +++--- .run/tutory_spider.run.xml | 6 +++--- .run/youtube_spider.run.xml | 6 +++--- .run/zdf_rss_spider.run.xml | 6 +++--- .run/zum_deutschlernen_spider.run.xml | 6 +++--- .run/zum_klexikon_spider.run.xml | 6 +++--- .run/zum_mathe_apps_spider.run.xml | 6 +++--- .run/zum_physik_apps_spider.run.xml | 6 +++--- .run/zum_spider.run.xml | 6 +++--- 32 files changed, 97 insertions(+), 97 deletions(-) diff --git a/.run/biologie_lernprogramme_spider.run.xml b/.run/biologie_lernprogramme_spider.run.xml index 278383ef..0fa52fed 100644 --- a/.run/biologie_lernprogramme_spider.run.xml +++ b/.run/biologie_lernprogramme_spider.run.xml @@ -8,16 +8,16 @@ or if that doesn't exist: if description: - general.add_value('description', description) + general.add_value("description", description) elif summary: - general.add_value('description', summary) + general.add_value("description", summary) elif itunes_summary: - general.add_value('description', itunes_summary) - rss_category_channel: list = response.xpath('//rss/channel/category/text()').getall() - rss_category_item: list = response.meta["item"].xpath('category/text()').getall() + general.add_value("description", itunes_summary) + rss_category_channel: list = response.xpath("//rss/channel/category/text()").getall() + rss_category_item: list = response.meta["item"].xpath("category/text()").getall() # see: https://www.rssboard.org/rss-profile#element-channel-item-category itunes_category: list = response.xpath('//*[name()="itunes:category"]/@text').getall() keyword_set = set() @@ -103,7 +93,7 @@ def getLOMGeneral(self, response): keyword_list: list = list(keyword_set) if keyword_list: keyword_list.sort() - general.add_value('keyword', keyword_list) + general.add_value("keyword", keyword_list) return general def getLOMTechnical(self, response): @@ -152,13 +142,13 @@ def getLOMTechnical(self, response): def getLOMLifecycle(self, response): lifecycle = LomBase.getLOMLifecycle(self, response) - lifecycle.add_value('role', 'publisher') + lifecycle.add_value("role", "publisher") channel_author: str = response.xpath("//rss/channel/*[name()='itunes:author']/text()").get() # if appears in /rss/channel, it will carry publisher/organizational information if "publisher" in self.commonProperties: - lifecycle.add_value('organization', self.commonProperties["publisher"]) + lifecycle.add_value("organization", self.commonProperties["publisher"]) elif channel_author: - lifecycle.add_value('organization', channel_author) + lifecycle.add_value("organization", channel_author) # ToDo: optional -element in , as soon as we actually encounter a RSS feed to test it against # see: https://www.rssboard.org/rss-profile#namespace-elements-dublin-creator pub_date = response.meta["item"].xpath("pubDate//text()").get() # according to the RSS 2.0 specs @@ -170,24 +160,31 @@ def getLOMLifecycle(self, response): # according to Apple's RSS Guidelines, some (Atom-inspired) feeds might use instead if pub_date: # is an OPTIONAL sub-element of - lifecycle.add_value('date', pub_date) + lifecycle.add_value("date", pub_date) elif pub_date_variation2: # if isn't available, might be - lifecycle.add_value('date', pub_date_variation2) + lifecycle.add_value("date", pub_date_variation2) elif pub_date_variation3: # if the RSS feed differs from the RSS 2.0 specs, might be available - lifecycle.add_value('date', pub_date_variation3) + lifecycle.add_value("date", pub_date_variation3) return lifecycle def getLicense(self, response=None) -> LicenseItemLoader: license_item_loader = LomBase.getLicense(self, response) - copyright_description: str = response.xpath('//rss/channel/copyright/text()').get() + copyright_description: str = response.xpath("//rss/channel/copyright/text()").get() if copyright_description: - license_item_loader.add_value('internal', Constants.LICENSE_CUSTOM) + license_item_loader.add_value("internal", Constants.LICENSE_CUSTOM) # 'internal' needs to be set to CUSTOM for 'description' to be read - license_item_loader.add_value('description', copyright_description) + license_item_loader.add_value("description", copyright_description) item_author: str = response.meta["item"].xpath("*[name()='itunes:author']/text()").get() if item_author: # if the optional field is nested in /rss/channel/item, it will contain author information - license_item_loader.add_value('author', item_author) + license_item_loader.add_value("author", item_author) return license_item_loader + + def getValuespaces(self, response): + vs_loader = LomBase.getValuespaces(self, response) + # as per team4 request on 2023-08-11 the values for 'conditionsOfAccess' and 'price' are hard-coded: + vs_loader.add_value("conditionsOfAccess", "no login") + vs_loader.add_value("price", "no") + return vs_loader From 39d9698aec305d0ceb0a42041755d4c1dabf2394 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 17 Aug 2023 15:58:42 +0200 Subject: [PATCH 360/590] tutory_spider v0.1.6 - fix/feat: more precise workaround for 'discipline'-vocab-matching (see: ITSJOINTLY-332 ticket) -- now collects all available 'metaValues.subject'-strings to increase chances of actually hitting a prefLabel/altLabel - style: code formatting (via black) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 48 +++++++++++++++++++++++------- 1 file changed, 38 insertions(+), 10 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 50bbd3bf..43ce6aab 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -17,14 +17,14 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.5" # last update: 2023-08-15 + version = "0.1.6" # last update: 2023-08-17 custom_settings = { # "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, "WEB_TOOLS": WebEngine.Playwright, } - api_pagesize_limit = 250 + api_pagesize_limit = 5 # the old API pageSize of 999999 (which was used in 2021) doesn't work anymore and throws a 502 Error (Bad Gateway). # 2023-03: setting pageSize to 5000 appeared to be a reasonable value with an API response time of 12-15s # 2023-08-15: every setting above 500 appears to always return a '502'-Error now. Current response times during api @@ -73,9 +73,7 @@ def parse_api_page(self, response: scrapy.http.TextResponse): item_url = response_copy.url response_copy.meta["item"] = j if self.hasChanged(response_copy): - yield scrapy.Request(url=item_url, callback=self.parse, cb_kwargs={ - "item_dict": j - }) + yield scrapy.Request(url=item_url, callback=self.parse, cb_kwargs={"item_dict": j}) def assemble_tutory_api_url(self, api_page: int): url_current_page = ( @@ -130,10 +128,10 @@ def parse(self, response, **kwargs): base_loader: BaseItemLoader = self.getBase(response) lom_loader: LomBaseItemloader = self.getLOM(response) - lom_loader.add_value('general', self.getLOMGeneral(response)) - lom_loader.add_value('technical', self.getLOMTechnical(response)) + lom_loader.add_value("general", self.getLOMGeneral(response)) + lom_loader.add_value("technical", self.getLOMTechnical(response)) - base_loader.add_value('lom', lom_loader.load_item()) + base_loader.add_value("lom", lom_loader.load_item()) base_loader.add_value("valuespaces", self.getValuespaces(response).load_item()) base_loader.add_value("license", self.getLicense(response).load_item()) base_loader.add_value("permissions", self.getPermissions(response).load_item()) @@ -152,7 +150,8 @@ def getBase(self, response=None): def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) - discipline = list( + disciplines = set() + subject_codes: list[str] = list( map( lambda x: x["code"], filter( @@ -161,7 +160,36 @@ def getValuespaces(self, response): ), ) ) - valuespaces.add_value("discipline", discipline) + if subject_codes: + disciplines.update(subject_codes) + # This is a (temporary) workaround until ITSJOINTLY-332 has been solved: The vocab matching doesn't hit all + # "altLabel"-values because they don't exist in the generated disipline.json. We're therefore trying to collect + # additional strings which could (hopefully) be mapped. + subject_names: list[str] = list( + map( + lambda x: x["name"], + filter( + lambda x: x["type"] == "subject", + response.meta["item"]["metaValues"], + ), + ) + ) + if subject_names: + disciplines.update(subject_names) + if disciplines: + # only one 'discipline'-value will remain after vocab-matching in our pipelines, so duplicate values are + # (for now) no problem, but need to be handled as soon as ITSJOINTLY-332 is solved + # ToDo: confirm that this workaround still works as intended after ITSJOINTLY-332 has been solved + # ToDo: known edge-cases for strings which cannot be mapped to our 'discipline'-vocab yet and should be + # handled after SC 2023: + # - "abu" ("Allg. bildender Unterricht") + # - "betriebswirtschaft" + # - "naturwissenschaft" + # - "technik" + valuespaces.add_value("discipline", list(disciplines)) + + # ToDo: test out similar mapping approach for 'metaValues.classLevel' for: + # - educationalContext valuespaces.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # Arbeitsblatt return valuespaces From ecc35797f90886238c427ead5eb01323341d5488 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 17 Aug 2023 18:16:43 +0200 Subject: [PATCH 361/590] tutory_spider v0.1.7 metadata improvements: - feat: use 'trafilatura'-fallback for missing 'description'-metadata (fulltext extraction) -- change: increase 'description'-fallback nr.4 text cutoff to 2000 chars (from 1000) performance improvements: - perf: set crawler API pageSize back to 250 (debugging oversight) - perf/fix: reduce amount of HTTP Requests to Tutory API -- during profiling it was observed that the inherited mapResponse()-method made an additional, unnecessary call to Splash (even though we're only using Playwright data as a fallback for those cases where no description is available) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 56 +++++++++++++++++++----------- 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 43ce6aab..15ad1f81 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -3,6 +3,7 @@ import urllib.parse import scrapy +import trafilatura from scrapy.selector import Selector from scrapy.spiders import CrawlSpider @@ -17,14 +18,14 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.6" # last update: 2023-08-17 + version = "0.1.7" # last update: 2023-08-17 custom_settings = { # "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, "WEB_TOOLS": WebEngine.Playwright, } - api_pagesize_limit = 5 + API_PAGESIZE_LIMIT = 250 # the old API pageSize of 999999 (which was used in 2021) doesn't work anymore and throws a 502 Error (Bad Gateway). # 2023-03: setting pageSize to 5000 appeared to be a reasonable value with an API response time of 12-15s # 2023-08-15: every setting above 500 appears to always return a '502'-Error now. Current response times during api @@ -77,7 +78,7 @@ def parse_api_page(self, response: scrapy.http.TextResponse): def assemble_tutory_api_url(self, api_page: int): url_current_page = ( - f"{self.baseUrl}worksheet?groupSlug=entdecken&pageSize={str(self.api_pagesize_limit)}" + f"{self.baseUrl}worksheet?groupSlug=entdecken&pageSize={str(self.API_PAGESIZE_LIMIT)}" f"&page={str(api_page)}" ) return url_current_page @@ -125,7 +126,7 @@ def parse(self, response, **kwargs): drop_item_flag: bool = self.check_if_item_should_be_dropped(response) if drop_item_flag is True: return None - + # if we need more metadata from the DOM, this could be a suitable place to move up the call to Playwright base_loader: BaseItemLoader = self.getBase(response) lom_loader: LomBaseItemloader = self.getLOM(response) lom_loader.add_value("general", self.getLOMGeneral(response)) @@ -135,8 +136,7 @@ def parse(self, response, **kwargs): base_loader.add_value("valuespaces", self.getValuespaces(response).load_item()) base_loader.add_value("license", self.getLicense(response).load_item()) base_loader.add_value("permissions", self.getPermissions(response).load_item()) - base_loader.add_value("response", self.mapResponse(response).load_item()) - # ToDo: set fetchData to false to reduce amount of HTTP requests? + base_loader.add_value("response", self.mapResponse(response, fetchData=False).load_item()) yield base_loader.load_item() def getBase(self, response=None): @@ -236,20 +236,36 @@ def getLOMGeneral(self, response=None): # 2nd fallback: general.add_value("description", meta_og_description) else: - html = WebTools.getUrlData(response.url, engine=WebEngine.Playwright)["html"] - if html: - # apparently, the human-readable text is nested within - #
OR
elements - edumark_combined: list[str] = ( - Selector(text=html) - .xpath("//div[contains(@class,'eduMark')]//text()|//div[contains(@class,'noEduMark')]//text()") - .getall() - ) - if edumark_combined: - text_combined: str = " ".join(edumark_combined) - text_combined = urllib.parse.unquote(text_combined) - text_combined = f"{text_combined[:1000]} [...]" - general.add_value("description", text_combined) + # this is where the (expensive) calls to our headless browser start + playwright_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + playwright_html = playwright_dict["html"] + # ToDo: if we need DOM data from Playwright in another method, move the call to Playwright into parse() + # and parametrize the result + if playwright_html: + # 3rd fallback: trying to extract the fulltext with trafilatura + playwright_bytes: bytes = playwright_html.encode() + trafilatura_text = trafilatura.extract(playwright_bytes) + if trafilatura_text: + logging.debug( + f"Item {response.url} did not provide any valid 'description' in its DOM header metadata. " + f"Fallback to trafilatura fulltext..." + ) + trafilatura_shortened: str = f"{trafilatura_text[:2000]} [...]" + general.add_value("description", trafilatura_shortened) + else: + # 4th fallback: resorting to (manual) scraping of DOM elements (via XPaths): + # apparently, the human-readable text is nested within + #
OR
elements + edumark_combined: list[str] = ( + Selector(text=playwright_html) + .xpath("//div[contains(@class,'eduMark')]//text()|//div[contains(@class,'noEduMark')]//text()") + .getall() + ) + if edumark_combined: + text_combined: str = " ".join(edumark_combined) + text_combined = urllib.parse.unquote(text_combined) + text_combined = f"{text_combined[:2000]} [...]" + general.add_value("description", text_combined) return general def getLOMTechnical(self, response=None): From 7eeba8f5a8f6dbc5357e39aa3f21d422471aa6e2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 17 Aug 2023 20:47:17 +0200 Subject: [PATCH 362/590] tutory_spider v0.1.8 - feat: mapping for 'metaValues.classLevel'-values to 'educationalContext'-vocab (according to the team4 feature request by Romy on 2023-08-15) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 55 ++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 15ad1f81..177378c2 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -18,7 +18,7 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.7" # last update: 2023-08-17 + version = "0.1.8" # last update: 2023-08-17 custom_settings = { # "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, @@ -188,8 +188,57 @@ def getValuespaces(self, response): # - "technik" valuespaces.add_value("discipline", list(disciplines)) - # ToDo: test out similar mapping approach for 'metaValues.classLevel' for: - # - educationalContext + potential_classlevel_values: list[str] = list( + map( + lambda x: x["code"], + filter( + lambda x: x["type"] == "classLevel", + response.meta["item"]["metaValues"], + ), + ) + ) + educontext_set: set[str] = set() + if potential_classlevel_values and type(potential_classlevel_values) is list: + potential_classlevel_values.sort() + two_digits_pattern = re.compile(r"\d{1,2}") + classlevel_set: set[str] = set() + classlevel_digits: set[int] = set() + for potential_classlevel in potential_classlevel_values: + # the classLevel field contains a wild mix of string-values + # this is a rough mapping that could be improved with further finetuning (and a more structured + # data-dump of all possible values) + two_digits_pattern_hit = two_digits_pattern.search(potential_classlevel) + if two_digits_pattern_hit: + # 'classLevel'-values will appear as numbers within a string ("3" or "12") and need to be converted + # for our mapping approach + classlevel_candidate = two_digits_pattern_hit.group() + classlevel_set.add(classlevel_candidate) + if "ausbildung" in potential_classlevel: + # typical values: "1-ausbildungsjahr" / "2-ausbildungsjahr" / "3-ausbildungsjahr" + educontext_set.add("berufliche_bildung") + if "e-1" in potential_classlevel or "e-2" in potential_classlevel: + educontext_set.add("sekundarstufe_2") + if classlevel_set and len(classlevel_set) > 0: + classlevels_sorted: list[str] = list(classlevel_set) + classlevels_sorted.sort(key=len) + for classlevel_string in classlevels_sorted: + classlevel_nr: int = int(classlevel_string) + classlevel_digits.add(classlevel_nr) + if classlevel_digits: + classlevel_integers: list[int] = list(classlevel_digits) + if classlevel_integers and type(classlevel_integers) is list: + # classlevel_min: int = min(classlevel_integers) + # classlevel_max: int = max(classlevel_integers) + for int_value in classlevel_integers: + if 0 < int_value <= 4: + educontext_set.add("grundschule") + if 5 <= int_value <= 9: + educontext_set.add("sekundarstufe_1") + if 10 <= int_value <= 13: + educontext_set.add("sekundarstufe_2") + if educontext_set: + educontext_list: list[str] = list(educontext_set) + valuespaces.add_value("educationalContext", educontext_list) valuespaces.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # Arbeitsblatt return valuespaces From f5bc2f40c6d49b75aaef63fd67653e8bf50962d7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 18 Aug 2023 13:21:02 +0200 Subject: [PATCH 363/590] tutory_spider v0.1.9 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - fix: 'classLevel' RegEx pattern - mapping: all "Einführungsphase 1" and "Einführungsphase 2" items are considered as Sekundarstufe 1 + 2 -- reasoning behind this mapping decision (according to Romy's team4 suggestion): --- in federal states with the G8-system (Gymnasium) "Einführungsphase 1" starts BETWEEN Sekundarstufe I and II --- while in federal states with a G9-system (Gymnasium) "Einführungsphase 1" starts in Sekundarstufe II Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 177378c2..74398685 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -18,7 +18,7 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.8" # last update: 2023-08-17 + version = "0.1.9" # last update: 2023-08-18 custom_settings = { # "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, @@ -200,7 +200,7 @@ def getValuespaces(self, response): educontext_set: set[str] = set() if potential_classlevel_values and type(potential_classlevel_values) is list: potential_classlevel_values.sort() - two_digits_pattern = re.compile(r"\d{1,2}") + two_digits_pattern = re.compile(r"^\d{1,2}$") # the whole string must be exactly between 1 and 2 digits classlevel_set: set[str] = set() classlevel_digits: set[int] = set() for potential_classlevel in potential_classlevel_values: @@ -217,6 +217,7 @@ def getValuespaces(self, response): # typical values: "1-ausbildungsjahr" / "2-ausbildungsjahr" / "3-ausbildungsjahr" educontext_set.add("berufliche_bildung") if "e-1" in potential_classlevel or "e-2" in potential_classlevel: + educontext_set.add("sekundarstufe_1") educontext_set.add("sekundarstufe_2") if classlevel_set and len(classlevel_set) > 0: classlevels_sorted: list[str] = list(classlevel_set) From d578e533bccf18ecfd9eacf944c89e513e426b43 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 29 Aug 2023 12:44:55 +0200 Subject: [PATCH 364/590] serlo_spider v0.3.1 (Sommercamp 2023) - this commit implements the Serlo API changes from v1.1.0 (see: https://github.com/serlo/documentation/wiki/Metadata-API#changelog-110) metadata improvements: - feat: more precise 'new_lrt'-values -- during the OER Sommercamp 2023 Kulla and Romy discussed more precise mappings from Serlo's content types to our 'new_lrt'-vocab, which were implemented right away into the Serlo API -- the crawler now collects these 'new_lrt'-values and only uses the old LRTs as a fallback (in case no 'new_lrt' was provided) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 35 ++++++++++++++++++++++--------- 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index b2cf9a94..8249662a 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -31,7 +31,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.3.0" # last update: 2023-08-16 + version = "0.3.1" # last update: 2023-08-29 custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright @@ -549,7 +549,7 @@ def parse(self, response, **kwargs): vs.add_value("discipline", disciplines) # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) # if the json_ld doesn't hold a discipline value for us, we'll try to grab the discipline from the url path - # ToDo: these URL-fallbacks might be obsolete now. Remove in crawler v0.3.1 after further debugging + # ToDo: these URL-fallbacks might be obsolete now. Remove them in crawler v0.3.1+ after further debugging if "/mathe/" in response.url: disciplines_set.add("380") # Mathematik if "/biologie/" in response.url: @@ -576,21 +576,36 @@ def parse(self, response, **kwargs): if graphql_json["learningResourceType"]: # Serlo is using the learningResourceType vocabulary (as specified in the AMB standard), see: # https://github.com/serlo/documentation/wiki/Metadata-API#changes-to-the-learningresourcetype-property - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) learning_resource_types: list[dict] = graphql_json["learningResourceType"] + lrts_new: set[str] = set() + lrts_old: set[str] = set() for lrt_item in learning_resource_types: + # we're checking for 'new_lrt'-values first and use the old (broader) LRT only as fallback if "id" in lrt_item: learning_resource_type_url: str = lrt_item["id"] + if "/openeduhub/vocabs/new_lrt/" in learning_resource_type_url: + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/new_lrt.ttl) + new_lrt_key: str = learning_resource_type_url.split("/")[-1] + if new_lrt_key: + lrts_new.add(new_lrt_key) if "/openeduhub/vocabs/learningResourceType/" in learning_resource_type_url: + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/learningResourceType.ttl) lrt_key: str = learning_resource_type_url.split("/")[-1] if lrt_key: - vs.add_value("learningResourceType", lrt_key) - else: - logging.debug( - f"Serlo 'learningResourceType' {learning_resource_type_url} was not recognized " - f"as part of the OpenEduHub 'learningResourceType' vocabulary. Please check the " - f"crawler or the vocab at oeh-metadata-vocabs/learningResourceType.ttl" - ) + lrts_old.add(lrt_key) + if lrts_new: + # OER Sommercamp 2023: Kulla and Romy defined precise mappings for our 'new_lrt'-vocab. These will + # always be more precise than the old (broader) LRT values. If the API provided 'new_lrt'-values, we'll + # ONLY be using these values. + lrts_new_list: list[str] = list(lrts_new) + if lrts_new_list: + vs.add_value("new_lrt", lrts_new_list) + elif lrts_old: + # OER Sommercamp 2023: For now, the Serlo API provides both the 'learningResourceType' and 'new_lrt' + # values. We'll only use the old LRT values as a fallback if no 'new_lrt'-values were collected. + lrts_old_list: list[str] = list(lrts_old) + if lrts_old_list: + vs.add_value("learningResourceType", lrts_old) base.add_value("valuespaces", vs.load_item()) From 51f06d5574b783b23545b9c16cd095a60351191c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 29 Aug 2023 13:51:42 +0200 Subject: [PATCH 365/590] MediaWiki crawlers: build MediaWiki URLs with underscores MediaWikiBase: - change: URLs to individual wiki pages are built by using underscores -- since the MediaWiki API doesn't provide full URLs to the individual pages, our MediaWiki crawlers need to build the URLs themselves --- this was previously done by URL encoding the whitespace character in case a wiki page title contains one or several spaces (e.g. the space in "Ada Lovelace" would be transformed to its HTTP equivalent '%20' -- during the OER Sommercamp 2023 the MediaWiki experts from idea-sketch informed us that, internally, MediaWiki URLs are built with underscores, (which is also the URL format that will be used when querying the 'getLRMI'-API-Endpoint of edu-sharing) metadata improvements: - change: 'base.fulltext' is populated by trafilatura (instead of the outdated / less precise html2text transformation) - style: code formatting via black zum_spider / zum_klexikon_spider / zum_deutschlernen_spider: - version bump all ZUM MediaWiki Crawlers to reflect the correct URL structure with underscores and force a metadata refresh during the next crawl Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- .../spiders/base_classes/mediawiki_base.py | 163 ++++++++++-------- converter/spiders/zum_deutschlernen_spider.py | 2 +- converter/spiders/zum_klexikon_spider.py | 2 +- converter/spiders/zum_spider.py | 2 +- 4 files changed, 91 insertions(+), 78 deletions(-) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index 8d0f8d83..9afa743e 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -9,6 +9,7 @@ import jmespath import requests import scrapy +import trafilatura from converter.items import BaseItemLoader, LomGeneralItemloader, LomTechnicalItemLoader, LicenseItemLoader from converter.spiders.base_classes.meta_base import SpiderBase @@ -56,21 +57,21 @@ class PossibleTests: """ -jmes_pageids = jmespath.compile('query.allpages[].pageid') -jmes_continue = jmespath.compile('continue') -jmes_title = jmespath.compile('parse.title') +jmes_pageids = jmespath.compile("query.allpages[].pageid") +jmes_continue = jmespath.compile("continue") +jmes_title = jmespath.compile("parse.title") jmes_categories = jmespath.compile('parse.categories[]."*"') jmes_links = jmespath.compile('parse.links[]."*"') -jmes_description = jmespath.compile('parse.properties[?name==\'description\']."*" | [0]') +jmes_description = jmespath.compile("parse.properties[?name=='description'].\"*\" | [0]") jmes_text = jmespath.compile('parse.text."*"') -jmes_pageid = jmespath.compile('parse.pageid') -jmes_revid = jmespath.compile('parse.revid') +jmes_pageid = jmespath.compile("parse.pageid") +jmes_revid = jmespath.compile("parse.revid") def _api_url(url) -> str: p = parse.urlparse(url) path = Path(p.path) - api_path = path / 'api.php' + api_path = path / "api.php" return parse.urljoin(url, str(api_path)) @@ -82,7 +83,7 @@ class MediaWikiBase(LomBase, metaclass=SpiderBase): license = None _default_params = { - 'format': 'json', + "format": "json", # 'formatversion': '2', } @@ -92,10 +93,10 @@ class MediaWikiBase(LomBase, metaclass=SpiderBase): https://www.mediawiki.org/w/api.php?action=help&modules=query%2Ballpages """ _query_params = _default_params | { - 'action': 'query', - 'list': 'allpages', - 'aplimit': '500', # Values between 1 and 500 are allowed by MediaWiki APIs - 'apfilterredir': 'nonredirects' # ignore redirection pages + "action": "query", + "list": "allpages", + "aplimit": "500", # Values between 1 and 500 are allowed by MediaWiki APIs + "apfilterredir": "nonredirects", # ignore redirection pages } # _query_request_url = f"{_api_url(url)}?{parse.urlencode(_query_params)}" @@ -107,34 +108,36 @@ class MediaWikiBase(LomBase, metaclass=SpiderBase): we're using pageid, revid, text, title, links, properties, categories """ _parse_params = _default_params | { - 'action': 'parse', - 'prop': '|'.join([ - 'text', # Gives the parsed text of the wikitext. - # 'langlinks', # Gives the language links in the parsed wikitext. - 'categories', # Gives the categories in the parsed wikitext. - # 'categorieshtml', # Gives the HTML version of the categories. - 'links', # Gives the internal links in the parsed wikitext. - # 'templates', # Gives the templates in the parsed wikitext. - # 'images', # Gives the images in the parsed wikitext. - # 'externallinks', # Gives the external links in the parsed wikitext. - # 'sections', # Gives the sections in the parsed wikitext. - 'revid', # Adds the revision ID of the parsed page. - 'displaytitle', # Adds the title of the parsed wikitext. - # 'subtitle', # Adds the page subtitle for the parsed page. - # 'headhtml', # Gives parsed doctype, opening , element and opening of the page. - # 'modules', # Gives the ResourceLoader modules used on the page. - # 'jsconfigvars', # Gives the JavaScript configuration variables specific to the page. - # 'encodedjsconfigvars', # Gives the JavaScript configuration variables specific to the page as a JSON string. - # 'indicators', # Gives the HTML of page status indicators used on the page. - 'iwlinks', # Gives interwiki links in the parsed wikitext. - # 'wikitext', # Gives the original wikitext that was parsed. - 'properties', # Gives various properties defined in the parsed wikitext. - # 'limitreportdata', # Gives the limit report in a structured way. Gives no data, when disablelimitreport is set. - # 'limitreporthtml', # Gives the HTML version of the limit report. Gives no data, when disablelimitreport is set. - # 'parsetree', # The XML parse tree of revision content (requires content model wikitext) - # 'parsewarnings', # Gives the warnings that occurred while parsing content. - # 'headitems', # Deprecated. Gives items to put in the of the page. - ]) + "action": "parse", + "prop": "|".join( + [ + "text", # Gives the parsed text of the wikitext. + # 'langlinks', # Gives the language links in the parsed wikitext. + "categories", # Gives the categories in the parsed wikitext. + # 'categorieshtml', # Gives the HTML version of the categories. + "links", # Gives the internal links in the parsed wikitext. + # 'templates', # Gives the templates in the parsed wikitext. + # 'images', # Gives the images in the parsed wikitext. + # 'externallinks', # Gives the external links in the parsed wikitext. + # 'sections', # Gives the sections in the parsed wikitext. + "revid", # Adds the revision ID of the parsed page. + "displaytitle", # Adds the title of the parsed wikitext. + # 'subtitle', # Adds the page subtitle for the parsed page. + # 'headhtml', # Gives parsed doctype, opening , element and opening of the page. + # 'modules', # Gives the ResourceLoader modules used on the page. + # 'jsconfigvars', # Gives the JavaScript configuration variables specific to the page. + # 'encodedjsconfigvars', # Gives the JavaScript configuration variables specific to the page as a JSON string. + # 'indicators', # Gives the HTML of page status indicators used on the page. + "iwlinks", # Gives interwiki links in the parsed wikitext. + # 'wikitext', # Gives the original wikitext that was parsed. + "properties", # Gives various properties defined in the parsed wikitext. + # 'limitreportdata', # Gives the limit report in a structured way. Gives no data, when disablelimitreport is set. + # 'limitreporthtml', # Gives the HTML version of the limit report. Gives no data, when disablelimitreport is set. + # 'parsetree', # The XML parse tree of revision content (requires content model wikitext) + # 'parsewarnings', # Gives the warnings that occurred while parsing content. + # 'headitems', # Deprecated. Gives items to put in the of the page. + ] + ), } keywords = {} @@ -145,9 +148,7 @@ def __init__(self, **kwargs): def start_requests(self): keywords = json.loads( - requests.get( - "https://wirlernenonline.de/wp-json/wp/v2/tags/?per_page=100" - ).content.decode("UTF-8") + requests.get("https://wirlernenonline.de/wp-json/wp/v2/tags/?per_page=100").content.decode("UTF-8") ) for keyword in keywords: self.keywords[keyword["id"]] = keyword["name"] @@ -171,42 +172,45 @@ def parse_page_query(self, response: scrapy.http.Response): for pageid in pageids: yield scrapy.FormRequest( url=self.api_url, - formdata=self._parse_params | {'pageid': str(pageid)}, + formdata=self._parse_params | {"pageid": str(pageid)}, callback=self.parse_page_data, - cb_kwargs={"extra": data} + cb_kwargs={"extra": data}, ) - if 'batchcomplete' not in data: + if "batchcomplete" not in data: return - if 'continue' not in data: + if "continue" not in data: return yield self.query_for_pages(jmes_continue.search(data)) def parse_page_data(self, response: scrapy.http.Response, extra=None): data = json.loads(response.body) - response.meta['item'] = data - response.meta['item_extra'] = extra - if error := data.get('error', None): - logging.error(f""" + response.meta["item"] = data + response.meta["item_extra"] = extra + if error := data.get("error", None): + logging.error( + f""" | Wiki Error: {error} | for request {response.request.body} | extra data: {extra} - """) + """ + ) return None return super().parse(response) def getId(self, response=None): - data = response.meta['item'] + data = response.meta["item"] return jmes_pageid.search(data) def getHash(self, response=None): - return str(jmes_revid.search(response.meta['item'])) + self.version + return str(jmes_revid.search(response.meta["item"])) + self.version def mapResponse(self, response, fetchData=True): mr = super().mapResponse(response, fetchData=False) data = json.loads(response.body) - title = jmes_title.search(data) - mr.replace_value('url', f"{self.url}{urllib.parse.quote('wiki/')}{urllib.parse.quote(title)}") + title: str = jmes_title.search(data) + title_underscored: str = title.replace(" ", "_") + mr.replace_value("url", f"{self.url}{urllib.parse.quote('wiki/')}{urllib.parse.quote(title_underscored)}") # response.url can't be used for string concatenation here since it would point to "/api.php" # self.url is overwritten by the children of MediaWikiBase with the URL root return mr @@ -214,44 +218,51 @@ def mapResponse(self, response, fetchData=True): def getBase(self, response=None) -> BaseItemLoader: # r: ParseResponse = response.meta["item"] loader = super().getBase(response) - data = response.meta['item'] - # fulltext = r.parse.text - text = jmes_text.search(response.meta['item']) + data = response.meta["item"] + text = jmes_text.search(response.meta["item"]) if text is None: - print('text of wikipage was empty:') - print(f'{data}') - loader.replace_value("fulltext", self.html2Text(text)) # crashes! + print("text of wikipage was empty:") + print(f"{data}") + trafilatura_text: str = trafilatura.extract(text) + if trafilatura_text: + loader.replace_value("fulltext", trafilatura_text) return loader def getLOMGeneral(self, response=None) -> LomGeneralItemloader: # r: ParseResponse = response.meta["item"] loader = super().getLOMGeneral(response) - data = response.meta['item'] - loader.replace_value('title', jmes_title.search(data)) - loader.add_value('keyword', jmes_links.search(data)) - loader.add_value('description', jmes_description.search(data)) + data = response.meta["item"] + loader.replace_value("title", jmes_title.search(data)) + loader.add_value("keyword", jmes_links.search(data)) + loader.add_value("description", jmes_description.search(data)) return loader def getLicense(self, response=None) -> LicenseItemLoader: loader = super().getLicense(response) - loader.add_value('url', self.license) + loader.add_value("url", self.license) return loader def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: loader = super().getLOMTechnical(response) - loader.replace_value('format', 'text/html') - data = response.meta['item'] - title = jmes_title.search(data) - loader.replace_value('location', f"{self.url}{urllib.parse.quote('wiki/')}{urllib.parse.quote(title)}") + loader.replace_value("format", "text/html") + data = response.meta["item"] + title: str = jmes_title.search(data) + title_underscored: str = title.replace(" ", "_") + # Sommercamp 2023: MediaWiki generates URLs from the title by replacing whitespace chars with underscores. + # Since these URLs can be used to query the edu-sharing 'getLRMI'-API-endpoint, we need to make sure that URLs + # are saved in the same format. + loader.replace_value( + "location", f"{self.url}{urllib.parse.quote('wiki/')}{urllib.parse.quote(title_underscored)}" + ) return loader def getValuespaces(self, response): loader = super().getValuespaces(response) - data = response.meta['item'] + data = response.meta["item"] categories: list[str] = jmes_categories.search(data) # ['Ethik', 'Sekundarstufe_1'] # hard-coded values for all 3 ZUM crawlers as per feature-request on 2023-08-11 from Team4 (Romy): - loader.add_value('conditionsOfAccess', 'no_login') - loader.add_value('price', 'no') + loader.add_value("conditionsOfAccess", "no_login") + loader.add_value("price", "no") if categories: loader.add_value("discipline", categories) loader.add_value("educationalContext", categories) @@ -263,7 +274,9 @@ def getValuespaces(self, response): if "arbeitsblatt" in category: loader.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # "Arbeitsblatt" if "erklärvideo" in category: - loader.add_value("new_lrt", "a0218a48-a008-4975-a62a-27b1a83d454f") # "Erklärvideo und gefilmtes Experiment" + loader.add_value( + "new_lrt", "a0218a48-a008-4975-a62a-27b1a83d454f" + ) # "Erklärvideo und gefilmtes Experiment" if "lernpfad" in category: loader.add_value("new_lrt", "ad9b9299-0913-40fb-8ad3-50c5fd367b6a") # "Lernpfad, Lernobjekt" if "methode" in category: diff --git a/converter/spiders/zum_deutschlernen_spider.py b/converter/spiders/zum_deutschlernen_spider.py index a65e7078..c1a64f64 100644 --- a/converter/spiders/zum_deutschlernen_spider.py +++ b/converter/spiders/zum_deutschlernen_spider.py @@ -22,7 +22,7 @@ class ZUMDeutschLernenSpider(MediaWikiBase, scrapy.Spider): name = "zum_deutschlernen_spider" friendlyName = "ZUM-Deutsch-Lernen" url = "https://deutsch-lernen.zum.de/" - version = "0.1.4" # last update: 2023-08-11 + version = "0.1.5" # last update: 2023-08-29 license = Constants.LICENSE_CC_BY_40 custom_settings = {"WEB_TOOLS": WebEngine.Playwright, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True} diff --git a/converter/spiders/zum_klexikon_spider.py b/converter/spiders/zum_klexikon_spider.py index fd521a67..d2561d79 100644 --- a/converter/spiders/zum_klexikon_spider.py +++ b/converter/spiders/zum_klexikon_spider.py @@ -23,7 +23,7 @@ class ZUMKlexikonSpider(MediaWikiBase, scrapy.Spider): name = "zum_klexikon_spider" friendlyName = "ZUM-Klexikon" url = "https://klexikon.zum.de/" - version = "0.1.6" # last update: 2023-08-11 + version = "0.1.7" # last update: 2023-08-29 license = Constants.LICENSE_CC_BY_SA_40 custom_settings = {"WEB_TOOLS": WebEngine.Playwright, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True} diff --git a/converter/spiders/zum_spider.py b/converter/spiders/zum_spider.py index 23943060..5f33c67c 100644 --- a/converter/spiders/zum_spider.py +++ b/converter/spiders/zum_spider.py @@ -13,7 +13,7 @@ class ZUMSpider(MediaWikiBase, scrapy.Spider): name = "zum_spider" friendlyName = "ZUM-Unterrichten" url = "https://unterrichten.zum.de/" - version = "0.1.4" # last update: 2023-08-11 + version = "0.1.5" # last update: 2023-08-29 license = Constants.LICENSE_CC_BY_SA_40 custom_settings = {"WEB_TOOLS": WebEngine.Playwright, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True} From a07f22e613e0babdd76374421ab79b4cae381521 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Sep 2023 18:30:11 +0200 Subject: [PATCH 366/590] change: rework sodix_spider program flow (temporarily keep SODIX items in-memory) - since this crawler update does not touch any metadata fields, this update comes without a version-bump Performance: - change/perf: reworked program flow of sodix_spider -- this change has the side-effect of using more memory because the (unfiltered) SODIX API provides >80.000 items -- this change was necessary because it was observed (prior to the OER Sommercamp 2023) during complete crawls against Staging that the SODIX index changes while our crawler is still iterating through all API pages --- how it worked earlier: previously the crawler collected 2500 items per page, crawled those individual json items, then requested the next API page --- how it works now: the crawler collects all API pages (and the items within) first and keeps them in-memory during the crawl process -- this change should further increase the crawling performance Fixes: - fix: do hasChanged() check earlier - fix: several warnings (overwritten methods not matching signature of the base method) Features: - feat: basic item count summary/overview when spider is closing -- implemented as basic helper variables that count the expected / crawled / skipped amount of items docs / style: - code cleanup - docs: add missing DocStrings and ToDos for later - docs: update explanation about 'recordStatus'-property - style: rename methods to align them with their purpose and increase readability --- converter/spiders/sodix_spider.py | 419 +++++++++++++++++++++--------- 1 file changed, 300 insertions(+), 119 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index ac5c4392..ebface24 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -1,3 +1,4 @@ +import copy import json import logging from typing import Iterator @@ -10,6 +11,7 @@ from .base_classes import JSONBase from .base_classes import LomBase from .. import env +from ..es_connector import EduSharing from ..items import LomLifecycleItemloader from ..util.license_mapper import LicenseMapper @@ -21,7 +23,7 @@ def extract_eaf_codes_to_set(eaf_code_list: list[str]) -> set: temporary_set = set() for eaf_code in eaf_code_list: if eaf_code: - # while this might be (theoretically) unnecessary, we're make sure to never grab empty strings + # while this might be (theoretically) unnecessary, we're making sure to never grab empty strings temporary_set.add(eaf_code) return temporary_set @@ -40,7 +42,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.3.0" # last update: 2023-07-13 + version = "0.3.0" # last update: 2023-09-08 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -52,7 +54,13 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): # control the modes either # - via spider arguments: "scrapy crawl sodix_spider -a oer_filter=true" # - or by setting SODIX_SPIDER_OER_FILTER=True in your .env file - NOT_OER_THROWAWAY_COUNTER = 0 # counts the amount of skipped items, in case that the OER-Filter is enabled + COUNTER_ITEM_IS_NOT_OER = 0 # counts the amount of skipped items, in case that the OER-Filter is enabled + COUNTER_ITEMS_IN_SODIX_API = 0 # counts the amount of extracted items from all API responses + COUNTER_ITEMS_TO_BE_CRAWLED = 0 # counts the amount of to-be-crawled items (new or to be updated items) + COUNTER_ITEMS_TO_BE_SKIPPED = 0 # counts the amount of items to be skipped (same version exists already) + SODIX_ITEMS: list[dict] = list() # the crawler will collect all SODIX items from the API first before iterating + # over them. (This memory-intensive workaround is necessary because it was observed that index positions of items in + # the SODIX API change while the crawler is still iterating over the API pages.) MAPPING_LRT = { "APP": "application", @@ -133,22 +141,61 @@ def __init__(self, oer_filter: str = "False", **kwargs): self.OER_FILTER = True LomBase.__init__(self, **kwargs) - def mapResponse(self, response): - r = LomBase.mapResponse(self, response, fetchData=False) - r.replace_value("text", "") - r.replace_value("html", "") - r.replace_value("url", response.meta["item"].get("media").get("url")) - return r - - def getId(self, response): - return response.meta["item"].get("id") + def close(self, reason): + """Print a quick overview (of different item counts) to the log when the crawler is shutting down.""" + logging.info( + f"SODIX crawler (close reason: {reason}) summary:\n" + f"SODIX items counted during API Pagination: {self.COUNTER_ITEMS_IN_SODIX_API} .\n" + f"SODIX items to be crawled: {self.COUNTER_ITEMS_TO_BE_CRAWLED} .\n" + f"SODIX items to be skipped (item exists already / no update necessary) in total: " + f"{self.COUNTER_ITEMS_TO_BE_SKIPPED} .\n" + f"SODIX items to be skipped due to OER-Filter (sub-amount): {self.COUNTER_ITEM_IS_NOT_OER} .\n" + ) - def getHash(self, response): - return f"{response.meta['item'].get('updated')}v{self.version}" + def mapResponse(self, response=None, **kwargs): + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"mapResponse(): Could not access SODIX item.") + raise ke + r = ResponseItemLoader() + r.replace_value("text", "") # ToDo: this might be obsolete + r.replace_value("html", "") # ToDo: this might be obsolete + r.replace_value("url", sodix_item["media"]["url"]) + return r - def getUri(self, response=None) -> str: + def getId(self, response=None, **kwargs) -> str: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getId(): Could not access SODIX item.") + raise ke + return sodix_item["id"] + + def getHash(self, response=None, **kwargs) -> str: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getHash(): Could not access SODIX item.") + raise ke + return f"{sodix_item['updated']}v{self.version}" + + def getUUID(self, response=None, **kwargs) -> str: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getUUID(): Could not access SODIX item.") + raise ke + return EduSharing.build_uuid(self.getUri(response, sodix_item=sodix_item)) + + def getUri(self, response=None, **kwargs) -> str: # or media.originalUrl? - return self.get("media.url", json=response.meta["item"]) + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getUri(): Could not access SODIX item.") + raise ke + return self.get("media.url", json=sodix_item) def start_request(self, page=0): access_token = requests.post( @@ -162,13 +209,14 @@ def start_request(self, page=0): if self.OER_FILTER is True: recordstatus_parameter = ", recordStatus: ACTIVATED" # by using the recordStatus parameter during the GraphQL query, only a subset of available items is returned - # by the Sodix API: OER-only items carry the recordStatus: ACTIVATED + # by the Sodix API: Items which were marked/selected by the customer for export (in the (log-in restricted) + # SODIX web-interface) carry the 'recordStatus: ACTIVATED' property. else: recordstatus_parameter = "" # if OER-Filter is off (default), the GraphQL query will return all items (including non-OER materials) return scrapy.Request( url=self.apiUrl, - callback=self.parse_request, + callback=self.parse_api_page, body=json.dumps( { "query": f"""{{ @@ -281,7 +329,36 @@ def start_requests(self): else: yield self.start_request() - def parse_request(self, response): + def hasChanged(self, response=None, **kwargs) -> bool: + try: + sodix_item: dict = kwargs["sodix_item"] + identifier: str = self.getId(response, sodix_item=sodix_item) + hash_str: str = self.getHash(response, sodix_item=sodix_item) + uuid_str: str = self.getUUID(response, sodix_item=sodix_item) + except KeyError as ke: + logging.error(f"hasChanged(): Could not retrieve SODIX item.") + raise ke + if self.forceUpdate: + return True + if self.uuid: + if uuid_str == self.uuid: + logging.info(f"matching requested id: {self.uuid}") + return True + return False + if self.remoteId: + if identifier == self.remoteId: + logging.info(f"matching requested id: {self.remoteId}") + return True + return False + db = EduSharing().find_item(identifier, self) + changed = db is None or db[1] != hash_str + if not changed: + logging.info(f"Item {identifier} (uuid: {db[0]}) has not changed") + self.COUNTER_ITEMS_TO_BE_SKIPPED += 1 + return changed + + def parse_api_page(self, response): + """Parse the SODIX API response and paginate to the next API page if there were any results.""" results = json.loads(response.body) if results: metadata_items: dict = results["data"]["findAllMetadata"] @@ -289,57 +366,110 @@ def parse_request(self, response): # return if metadata_items: # lists and dictionaries only become True if they have >0 entries, empty lists are considered False - for item in metadata_items: - response_copy = response.copy() - response_copy.meta["item"] = item + for sodix_item in metadata_items: + self.COUNTER_ITEMS_IN_SODIX_API += 1 + sodix_item_copy = copy.deepcopy(sodix_item) + # ToDo: this deepcopy might not be necessary after further program flow optimizations if self.OER_FILTER is True or env.get_bool("SODIX_SPIDER_OER_FILTER", default=False): # Since DropItem exceptions can only be raised from within the pipeline, the filtering of items # that aren't strictly OER-licenses needs to happen here. # - controlling the OER-Filter via spider arguments is useful for debugging, but we also need # an easy way to control the spider via the .env file (while running it as a Docker container) - if self.license_is_oer(response_copy) is False: - self.NOT_OER_THROWAWAY_COUNTER += 1 + if self.license_is_oer(sodix_item_copy) is False: + self.COUNTER_ITEM_IS_NOT_OER += 1 + self.COUNTER_ITEMS_TO_BE_SKIPPED += 1 self.logger.info( f"Item dropped due to OER-incompatibility. \n" f"Total amount of items dropped so far: " - f"{self.NOT_OER_THROWAWAY_COUNTER}" + f"{self.COUNTER_ITEM_IS_NOT_OER}" ) continue - if self.hasChanged(response_copy): - yield self.handle_entry(response_copy) + if self.hasChanged(response, sodix_item=sodix_item_copy): + # Sommercamp 2023 observation: handle_entry needs to be called AFTER the complete item list has + # been extracted, otherwise the items' index positions change while we're still iterating + # through the API. + self.SODIX_ITEMS.append(sodix_item_copy) # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter # specific media types / URLs yield self.start_request(response.meta["page"] + 1) - - def handle_entry(self, response): - return self.parse(response=response) - - def getBase(self, response) -> BaseItemLoader: - base = LomBase.getBase(self, response) + else: + if "errors" in results: + error_dict: dict = results["errors"] + try: + error_message: str = error_dict["message"] + logging.error( + f"API Pagination: The SODIX API returned the following error-message while requesting page " + f"{response.meta['page']} :\n{error_message}" + ) + except KeyError: + logging.error( + f"API Pagination: The SODIX API returned the following error while requesting page " + f"{response.meta['page']} :\n{error_dict}" + ) + else: + # once we've reached the last API page, the 'findAllMetadata'-list will be empty, which is our + # signal to start the actual parsing of individual items + logging.info( + f"API Pagination: Reached the last API page {response.meta['page']}. " + f"Beginning crawling of {len(self.SODIX_ITEMS)} SODIX items..." + ) + yield from self.handle_extracted_sodix_items() + + def handle_extracted_sodix_items(self): + """Checks if any items were collected from the SODIX API and yield the individual objects to be handled.""" + if self.SODIX_ITEMS: + # if the crawler collected any items from the API, we're popping them 1-by-1 to reduce the initial memory + # footprint of the crawler + while self.SODIX_ITEMS: + self.COUNTER_ITEMS_TO_BE_CRAWLED += 1 + next_item: dict = self.SODIX_ITEMS.pop() + yield self.handle_entry(next_item) + # ToDo: if we don't notice any side-effects of the above method, delete the below for-loop in v0.3.1+ + # for sodix_item in self.SODIX_ITEMS: + # self.COUNTER_ITEMS_TO_BE_CRAWLED += 1 + # yield self.handle_entry(sodix_item) + else: + logging.info( + f"The amount of extracted (and to be crawled) SODIX items is: {len(self.SODIX_ITEMS)}. " + f"Stopping crawl-process..." + ) + + def handle_entry(self, sodix_item: dict): + return self.parse(cb_kwargs={"sodix_item": sodix_item}) + + def getBase(self, response=None, **kwargs) -> BaseItemLoader: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getBase(): Could not access SODIX item.") + raise ke + base = BaseItemLoader() + base.add_value("sourceId", self.getId(response, sodix_item=sodix_item)) + base.add_value("hash", self.getHash(response, sodix_item=sodix_item)) # thumbnail-priority from different fields: # 1) media.thumbDetails (480x360) 2) media.thumbPreview (256x256) 3) source.imageUrl (480x360) - media_thumb_details = self.get("media.thumbDetails", json=response.meta["item"]) - media_thumb_preview = self.get("media.thumbPreview", json=response.meta["item"]) - source_image_url = self.get("source.imageUrl", json=response.meta["item"]) + media_thumb_details = self.get("media.thumbDetails", json=sodix_item) + media_thumb_preview = self.get("media.thumbPreview", json=sodix_item) + source_image_url = self.get("source.imageUrl", json=sodix_item) if media_thumb_details: base.replace_value("thumbnail", media_thumb_details) elif media_thumb_preview: base.replace_value("thumbnail", media_thumb_preview) elif source_image_url: base.replace_value("thumbnail", source_image_url) - base.add_value("status", self.get("recordStatus", json=response.meta["item"])) - last_modified = self.get("updated", json=response.meta["item"]) + base.add_value("status", self.get("recordStatus", json=sodix_item)) + last_modified = self.get("updated", json=sodix_item) if last_modified: base.add_value("lastModified", last_modified) - source_id: str = self.get("source.id", json=response.meta["item"]) + source_id: str = self.get("source.id", json=sodix_item) # ToDo: the crawler can't write description text to subfolder names yet # 'source.name' or 'source.description' could be used here to make the subfolders more human-readable if source_id: base.add_value("origin", source_id) - self.extract_and_save_eaf_codes_to_custom_field(base, response) + self.extract_and_save_eaf_codes_to_custom_field(base, sodix_item=sodix_item) return base - def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, response): + def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, sodix_item: dict): """ Extracts eafCodes as a String from two Sodix API fields ('eafCode', 'competencies.id') and saves them to 'base.custom' as a dictionary. @@ -348,12 +478,12 @@ def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, respo """ eaf_code_subjects = set() eaf_code_competencies = set() - eaf_code_subjects_list = self.get("eafCode", json=response.meta["item"]) + eaf_code_subjects_list = self.get("eafCode", json=sodix_item) # Extracting eafCodes from 'subject.id': if eaf_code_subjects_list: eaf_code_subjects: set = extract_eaf_codes_to_set(eaf_code_subjects_list) # attention: eafCodes from Sodix field 'eafCode' and 'subject.id' carry the same information - eaf_code_competencies_list: list[dict] = self.get("competencies", json=response.meta["item"]) + eaf_code_competencies_list: list[dict] = self.get("competencies", json=sodix_item) # eafCodes from Sodix field 'competencies.id' are not listed within the 'eafCode' field, therefore we're # gathering them separately and merge them with the other collected eafCodes if necessary if eaf_code_competencies_list: @@ -379,12 +509,17 @@ def extract_and_save_eaf_codes_to_custom_field(self, base: BaseItemLoader, respo eaf_code_competencies_list.sort() base.add_value("custom", {"ccm:taxonentry": eaf_code_competencies_list}) - def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader | None: - lifecycle = LomBase.getLOMLifecycle(response) + def get_lom_lifecycle_author(self, response=None, **kwargs) -> LomLifecycleItemloader | None: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"get_lom_lifecycle_author(): Could not access SODIX item.") + raise ke + lifecycle = LomLifecycleItemloader() # the Sodix 'author'-field returns a wild mix of agencies, persons, usernames and project-names # therfore all author-strings from Sodix are treated as "organization"-values - author = self.get("author", json=response.meta["item"]) - author_website = self.get("authorWebsite", json=response.meta["item"]) + author = self.get("author", json=sodix_item) + author_website = self.get("authorWebsite", json=sodix_item) if author and author_website: # edge-case: Some Sodix Items can have a "authorWebsite", but no valid "author"-value (e.g. null). # saving only the authorWebsite would lead to an empty author-symbol in the edu-sharing workspace view, @@ -396,9 +531,14 @@ def get_lom_lifecycle_author(self, response=None) -> LomLifecycleItemloader | No else: return None - def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleItemloader]: - lifecycle = LomBase.getLOMLifecycle(response) - publishers: list[dict] = self.get("publishers", json=response.meta["item"]) + def get_lom_lifecycle_publisher(self, response=None, **kwargs) -> Iterator[LomLifecycleItemloader]: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"get_lom_lifecycle_publisher(): Could not access SODIX item.") + raise ke + lifecycle = LomLifecycleItemloader() + publishers: list[dict] = self.get("publishers", json=sodix_item) # Sodix 'publishers'-field is a list of Publishers, therefore we need to iterate through them if publishers: for publisher in publishers: @@ -416,9 +556,9 @@ def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleIte publisher_url: str = publisher.get("officialWebsite") if publisher_url: lifecycle.add_value("url", publisher_url) - published_time = self.get("publishedTime", json=response.meta["item"]) - creation_date = self.get("creationDate", json=response.meta["item"]) - source: dict = self.get("source", json=response.meta["item"]) + published_time = self.get("publishedTime", json=sodix_item) + creation_date = self.get("creationDate", json=sodix_item) + source: dict = self.get("source", json=sodix_item) if published_time: # the 'publishedTime'-field is 95% null or empty, which is why several fallbacks are needed lifecycle.add_value("date", published_time) @@ -434,13 +574,18 @@ def get_lom_lifecycle_publisher(self, response=None) -> Iterator[LomLifecycleIte lifecycle.add_value("date", created_date) yield lifecycle - def get_lom_lifecycle_metadata_provider(self, response=None) -> LomLifecycleItemloader: + def get_lom_lifecycle_metadata_provider(self, response=None, **kwargs) -> LomLifecycleItemloader: """ Collects metadata from Sodix 'source'-field with the purpose of saving it to edu-sharing's 'ccm:metadatacontributer_provider'-field. """ - lifecycle = LomBase.getLOMLifecycle(response) - source: dict = self.get("source", json=response.meta["item"]) + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"get_lom_lifecycle_metadata_provider: Could not access SODIX item.") + raise ke + lifecycle = LomLifecycleItemloader() + source: dict = self.get("source", json=sodix_item) if source: lifecycle.add_value("role", "metadata_provider") # all 'source'-subfields are of Type: String @@ -457,11 +602,16 @@ def get_lom_lifecycle_metadata_provider(self, response=None) -> LomLifecycleItem lifecycle.add_value("url", source.get("website")) return lifecycle - def getLOMGeneral(self, response) -> LomGeneralItemloader: - general = LomBase.getLOMGeneral(self, response) - general.replace_value("title", self.get("title", json=response.meta["item"])) - if "keywords" in response.meta["item"]: - keywords: list = self.get("keywords", json=response.meta["item"]) + def getLOMGeneral(self, response=None, **kwargs) -> LomGeneralItemloader: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getLomGeneral(): Could not access SODIX item.") + raise ke + general = LomGeneralItemloader() + general.replace_value("title", self.get("title", json=sodix_item)) + if "keywords" in sodix_item: + keywords: list = self.get("keywords", json=sodix_item) keywords_cleaned_up: list = list() if keywords: # making sure that we're not receiving an empty list @@ -470,20 +620,20 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: # we're only adding valid keywords, none of the empty (whitespace) strings keywords_cleaned_up.append(individual_keyword) general.add_value("keyword", individual_keyword) - subjects = self.get_subject_dictionary(response) + subjects = self.get_subject_dictionary(sodix_item=sodix_item) if subjects: subject_names = list(subjects.values()) subject_names.sort() keywords_cleaned_up.extend(subject_names) general.replace_value("keyword", keywords_cleaned_up) - if "language" in response.meta["item"]: - languages: list = self.get("language", json=response.meta["item"]) + if "language" in sodix_item: + languages: list = self.get("language", json=sodix_item) if languages and isinstance(languages, list): # Sodix returns empty lists and 'null' occasionally for language in languages: general.add_value("language", language) - if "description" in response.meta["item"]: - description: str = self.get("description", json=response.meta["item"]) + if "description" in sodix_item: + description: str = self.get("description", json=sodix_item) if description: # Sodix sometimes returns the 'description'-field as null general.add_value("description", description) @@ -493,35 +643,40 @@ def getLOMGeneral(self, response) -> LomGeneralItemloader: # the Sodix field 'id' is an uuid without further explanation # If both are available, they're saved as a [String] to 'cclom:general_identifier' (this might be necessary to # identify duplicates later in edu-sharing) - sodix_identifier: str = self.get("identifier", json=response.meta["item"]) + sodix_identifier: str = self.get("identifier", json=sodix_item) if sodix_identifier: general.add_value("identifier", sodix_identifier) - sodix_id: str = self.get("id", json=response.meta["item"]) + sodix_id: str = self.get("id", json=sodix_item) if sodix_id: general.add_value("identifier", sodix_id) return general - def getLOMTechnical(self, response) -> LomTechnicalItemLoader: - technical = LomBase.getLOMTechnical(self, response) - technical.replace_value("format", self.get("media.dataType", json=response.meta["item"])) - technical.replace_value("location", self.getUri(response)) - original = self.get("media.originalUrl", json=response.meta["item"]) - if original and self.getUri(response) != original: + def getLOMTechnical(self, response=None, **kwargs) -> LomTechnicalItemLoader: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getLomTechnical(): Could not access SODIX item.") + raise ke + technical = LomTechnicalItemLoader() + technical.replace_value("format", self.get("media.dataType", json=sodix_item)) + technical.replace_value("location", self.getUri(response, sodix_item=sodix_item)) + original = self.get("media.originalUrl", json=sodix_item) + if original and self.getUri(response, sodix_item=sodix_item) != original: technical.add_value("location", original) - duration: str = self.get("media.duration", json=response.meta["item"]) + duration: str = self.get("media.duration", json=sodix_item) if duration and duration != 0: # the API response contains "null"-values, we're making sure to only add valid duration values to our item technical.add_value("duration", duration) - technical.add_value("size", self.get("media.size", json=response.meta["item"])) + technical.add_value("size", self.get("media.size", json=sodix_item)) return technical - def license_is_oer(self, response) -> bool: + def license_is_oer(self, sodix_item: dict) -> bool: """ Checks if the Item is licensed under an OER-compatible license. Returns True if license is OER-compatible. (CC-BY/CC-BY-SA/CC0/PublicDomain) Otherwise returns False. """ - license_name: str = self.get("license.name", json=response.meta["item"]) + license_name: str = self.get("license.name", json=sodix_item) if license_name: if license_name in self.MAPPING_LICENSE_NAMES: license_internal_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) @@ -538,13 +693,18 @@ def license_is_oer(self, response) -> bool: Constants.LICENSE_PDM, ] - def getLicense(self, response) -> LicenseItemLoader: - license_loader = LomBase.getLicense(self, response) + def getLicense(self, response=None, **kwargs) -> LicenseItemLoader: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getLicense(): Could not access SODIX item.") + raise ke + license_loader = LicenseItemLoader() - author: str = self.get("author", json=response.meta["item"]) + author: str = self.get("author", json=sodix_item) if author: license_loader.add_value("author", author) - license_description: str = self.get("license.text", json=response.meta["item"]) + license_description: str = self.get("license.text", json=sodix_item) additional_license_information: str = self.get("additionalLicenseInformation") # the Sodix field 'additionalLicenseInformation' is empty 95% of the time, but sometimes it might serve as a # fallback for the license description @@ -552,7 +712,7 @@ def getLicense(self, response) -> LicenseItemLoader: license_loader.add_value("description", license_description) elif additional_license_information: license_loader.add_value("description", additional_license_information) - license_name: str = self.get("license.name", json=response.meta["item"]) + license_name: str = self.get("license.name", json=sodix_item) if license_name: if license_name in self.MAPPING_LICENSE_NAMES: license_name_mapped = self.MAPPING_LICENSE_NAMES.get(license_name) @@ -572,7 +732,7 @@ def getLicense(self, response) -> LicenseItemLoader: # we're carrying over the custom description, just in case license_loader.replace_value("description", license_name) - license_url_raw: str = self.get("license.url", json=response.meta["item"]) + license_url_raw: str = self.get("license.url", json=sodix_item) # possible license URL values returned by the Sodix API: # license_urls_sorted = ['https://creativecommons.org/licenses/by-nc-nd/2.0/de/', # 'https://creativecommons.org/licenses/by-nc-nd/3.0/de/', @@ -612,9 +772,14 @@ def getLicense(self, response) -> LicenseItemLoader: license_loader.replace_value("url", license_url_mapped) return license_loader - def getLOMEducational(self, response=None) -> LomEducationalItemLoader: - educational = LomBase.getLOMEducational(response) - class_level = self.get("classLevel", json=response.meta["item"]) + def getLOMEducational(self, response=None, **kwargs) -> LomEducationalItemLoader: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getLomEducational(): Could not access SODIX item.") + raise ke + educational = LomEducationalItemLoader() + class_level = self.get("classLevel", json=sodix_item) if class_level and len(class_level.split("-")) == 2: split = class_level.split("-") tar = LomAgeRangeItemLoader() @@ -624,16 +789,16 @@ def getLOMEducational(self, response=None) -> LomEducationalItemLoader: educational.add_value("typicalAgeRange", tar.load_item()) return educational - def get_subject_dictionary(self, response) -> dict[str, str] | None: + def get_subject_dictionary(self, sodix_item: dict) -> dict[str, str] | None: """ Parses the Sodix API field 'subject' and returns a dictionary consisting of: Sodix 'subject.id' (= the eafCode of a "Schulfach") and its human-readable counterpart Sodix 'subject.name' as its value. """ subject_dictionary = dict() - if "subject" in response.meta["item"] is not None: + if "subject" in sodix_item is not None: # the "subject"-field does not exist in every item returned by the sodix API - subjects_list: list = self.get("subject", json=response.meta["item"]) + subjects_list: list = self.get("subject", json=sodix_item) if subjects_list: # the "subject"-key might exist in the API, but still be of 'None'-value for subject in subjects_list: @@ -644,16 +809,22 @@ def get_subject_dictionary(self, response) -> dict[str, str] | None: else: return None - def getValuespaces(self, response) -> ValuespaceItemLoader: + def getValuespaces(self, response, **kwargs) -> ValuespaceItemLoader: + try: + sodix_item: dict = kwargs["sodix_item"] + except KeyError as ke: + logging.error(f"getValuespaces(): Could not access SODIX item.") + raise ke + valuespaces = LomBase.getValuespaces(self, response) - subjects = self.get_subject_dictionary(response) + subjects = self.get_subject_dictionary(sodix_item=sodix_item) if subjects: subject_ids = list(subjects.keys()) if subject_ids: subject_ids.sort() valuespaces.add_value("discipline", subject_ids) - educational_context_list = self.get("educationalLevels", json=response.meta["item"]) - school_types_list = self.get("schoolTypes", json=response.meta["item"]) + educational_context_list = self.get("educationalLevels", json=sodix_item) + school_types_list = self.get("schoolTypes", json=sodix_item) educational_context_set = set() if educational_context_list: # the Sodix field 'educationalLevels' is directly mappable to our 'educationalContext' @@ -672,7 +843,7 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: if educational_context_list: valuespaces.add_value("educationalContext", educational_context_list) - target_audience_list = self.get("targetAudience", json=response.meta["item"]) + target_audience_list = self.get("targetAudience", json=sodix_item) # possible 'targetAudience'-values according to the SODIX API Docs: "teacher", "learner", "parent" if target_audience_list: for target_audience_item in target_audience_list: @@ -680,7 +851,7 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: target_audience_item = self.MAPPING_INTENDED_END_USER_ROLE.get(target_audience_item) valuespaces.add_value("intendedEndUserRole", target_audience_item) - cost: str | None = self.get("cost", json=response.meta["item"]) + cost: str | None = self.get("cost", json=sodix_item) if cost: cost = cost.lower() match cost: @@ -696,8 +867,8 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: f"Documentation if the possible range of values has changed in the meantime. " f"(In this case: additional metadata values need to be mapped.)" ) - potential_lrts = self.get("learnResourceType", json=response.meta["item"]) - # attention: Sodix calls their LRT "learnResourceType", not "learningResourceType" + potential_lrts = self.get("learnResourceType", json=sodix_item) + # attention: Sodix calls their LRT "learnResourceType", not "learningResourceType"! if potential_lrts: for potential_lrt in potential_lrts: if potential_lrt in self.MAPPING_LRT: @@ -705,49 +876,59 @@ def getValuespaces(self, response) -> ValuespaceItemLoader: valuespaces.add_value("learningResourceType", potential_lrt) return valuespaces - def parse(self, response, **kwargs): + def parse(self, response=None, **kwargs): + # The 'response'-object will always be of type None due to the way the crawler is currently designed. Since the + # crawler does not make any 'scrapy.Request's to the target websites (anymore), we could completely refactor all + # crawler methods to increase readability/maintainability of the crawler code. + try: + sodix_item: dict = kwargs["cb_kwargs"]["sodix_item"] + except KeyError: + logging.error(f"Cannot parse SODIX item from callback arguments. Aborting parse()-method.") + return None + if LomBase.shouldImport(response) is False: - self.logger.debug(f"Skipping entry {str(self.getId(response))} because shouldImport() returned false") + self.logger.debug( + f"Skipping entry {str(self.getId(response, sodix_item=sodix_item))} because shouldImport() returned " + f"false" + ) + self.COUNTER_ITEMS_TO_BE_SKIPPED += 1 return None - if self.getId(response) is not None and self.getHash(response) is not None: - if not self.hasChanged(response): - return None - base = self.getBase(response) + base = self.getBase(response, sodix_item=sodix_item) lom = LomBaseItemloader() - general = self.getLOMGeneral(response) + general = self.getLOMGeneral(response, sodix_item=sodix_item) # "UNTERRICHTSBAUSTEIN"-Materials need to handled as aggregationLevel = 2 (according to LOM-DE) - potential_lrts = self.get("learnResourceType", json=response.meta["item"]) + potential_lrts = self.get("learnResourceType", json=sodix_item) if potential_lrts: if "UNTERRICHTSBAUSTEIN" in potential_lrts: general.add_value("aggregationLevel", 2) - technical = self.getLOMTechnical(response) - if self.get("author", json=response.meta["item"]): - lifecycle_author = self.get_lom_lifecycle_author(response) + technical = self.getLOMTechnical(response, sodix_item=sodix_item) + if self.get("author", json=sodix_item): + lifecycle_author = self.get_lom_lifecycle_author(response, sodix_item=sodix_item) if lifecycle_author: lom.add_value("lifecycle", lifecycle_author.load_item()) - if self.get("publishers", json=response.meta["item"]): + if self.get("publishers", json=sodix_item): # theoretically, there can be multiple publisher fields per item, but in reality this doesn't occur (yet). - lifecycle_iterator: Iterator[LomLifecycleItemloader] = self.get_lom_lifecycle_publisher(response) + lifecycle_iterator: Iterator[LomLifecycleItemloader] = self.get_lom_lifecycle_publisher( + response, sodix_item=sodix_item + ) for lifecycle_publisher in lifecycle_iterator: lom.add_value("lifecycle", lifecycle_publisher.load_item()) - if self.get("source", json=response.meta["item"]): - lifecycle_metadata_provider = self.get_lom_lifecycle_metadata_provider(response) + if self.get("source", json=sodix_item): + lifecycle_metadata_provider = self.get_lom_lifecycle_metadata_provider(response, sodix_item=sodix_item) lom.add_value("lifecycle", lifecycle_metadata_provider.load_item()) - educational = self.getLOMEducational(response) - classification = self.getLOMClassification(response) + educational = self.getLOMEducational(response, sodix_item=sodix_item) lom.add_value("general", general.load_item()) lom.add_value("technical", technical.load_item()) lom.add_value("educational", educational.load_item()) - lom.add_value("classification", classification.load_item()) base.add_value("lom", lom.load_item()) - base.add_value("valuespaces", self.getValuespaces(response).load_item()) - base.add_value("license", self.getLicense(response).load_item()) + base.add_value("valuespaces", self.getValuespaces(response, sodix_item=sodix_item).load_item()) + base.add_value("license", self.getLicense(response, sodix_item=sodix_item).load_item()) base.add_value("permissions", self.getPermissions(response).load_item()) - base.add_value("response", self.mapResponse(response).load_item()) + base.add_value("response", self.mapResponse(response, sodix_item=sodix_item).load_item()) return base.load_item() From b9c229dbc5a15542d47332939f6227ec3c157f92 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Sep 2023 20:33:54 +0200 Subject: [PATCH 367/590] change: move 'hasChanged'-check to later point in program flow - change: a quick debug run against Staging showed that calling the "hasChanged"-method too early delays the crawler's API Pagination by A LOT -- example: 2500 items per page => it takes roughly 41 minutes per page to just check if items need to be crawled, updated or created anew -- by moving the hasChanged check to the latest point in time (when all API results have been paginated already), we try to make sure that the items' positions don't change while we're still paginating - this change has the side effect that at the start of the crawl you'll have to wait until all API pages have been parsed Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/sodix_spider.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index ebface24..ce0e5999 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -384,11 +384,7 @@ def parse_api_page(self, response): f"{self.COUNTER_ITEM_IS_NOT_OER}" ) continue - if self.hasChanged(response, sodix_item=sodix_item_copy): - # Sommercamp 2023 observation: handle_entry needs to be called AFTER the complete item list has - # been extracted, otherwise the items' index positions change while we're still iterating - # through the API. - self.SODIX_ITEMS.append(sodix_item_copy) + self.SODIX_ITEMS.append(sodix_item_copy) # ToDo: links to binary files (.jpeg) cause errors while building the BaseItem, we might have to filter # specific media types / URLs yield self.start_request(response.meta["page"] + 1) @@ -411,7 +407,7 @@ def parse_api_page(self, response): # signal to start the actual parsing of individual items logging.info( f"API Pagination: Reached the last API page {response.meta['page']}. " - f"Beginning crawling of {len(self.SODIX_ITEMS)} SODIX items..." + f"Beginning handling of {len(self.SODIX_ITEMS)} SODIX items..." ) yield from self.handle_extracted_sodix_items() @@ -421,9 +417,14 @@ def handle_extracted_sodix_items(self): # if the crawler collected any items from the API, we're popping them 1-by-1 to reduce the initial memory # footprint of the crawler while self.SODIX_ITEMS: - self.COUNTER_ITEMS_TO_BE_CRAWLED += 1 next_item: dict = self.SODIX_ITEMS.pop() - yield self.handle_entry(next_item) + if self.hasChanged(response=None, sodix_item=next_item): + # Sommercamp 2023 observation: handle_entry needs to be called AFTER the complete item list has + # been extracted, otherwise the items' index positions change while we're still iterating + # through the API. + # We NEED to do the hasChanged()-check here, otherwise the API Pagination would take too long. + self.COUNTER_ITEMS_TO_BE_CRAWLED += 1 + yield self.handle_entry(next_item) # ToDo: if we don't notice any side-effects of the above method, delete the below for-loop in v0.3.1+ # for sodix_item in self.SODIX_ITEMS: # self.COUNTER_ITEMS_TO_BE_CRAWLED += 1 @@ -431,7 +432,7 @@ def handle_extracted_sodix_items(self): else: logging.info( f"The amount of extracted (and to be crawled) SODIX items is: {len(self.SODIX_ITEMS)}. " - f"Stopping crawl-process..." + f"Stopping crawling-process..." ) def handle_entry(self, sodix_item: dict): From 4b6f9c0766be659306a554a3ef8d7be3a2619910 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:42:29 +0200 Subject: [PATCH 368/590] feat: fallbacks for 'null'-values in SODIX field 'media.url' - during a test-crawl it was observed that the SODIX API doesn't provide a 'media.url' for 100% of its items -- some outliers hold a 'null'-value for this field, which causes the build_uuid()-method to fail - since our crawler needs a valid URI to build an uuid of said item: implemented a fallback to 'media.originalUrl' for those edge-cases Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/sodix_spider.py | 39 +++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index ce0e5999..0c0f4a69 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -42,7 +42,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.3.0" # last update: 2023-09-08 + version = "0.3.0" # last update: 2023-09-12 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -161,7 +161,13 @@ def mapResponse(self, response=None, **kwargs): r = ResponseItemLoader() r.replace_value("text", "") # ToDo: this might be obsolete r.replace_value("html", "") # ToDo: this might be obsolete - r.replace_value("url", sodix_item["media"]["url"]) + media_url: str = sodix_item["media"]["url"] + if media_url: + r.replace_value("url", media_url) + else: + media_original_url: str = sodix_item["media"]["originalUrl"] + if media_original_url: + r.replace_value("url", media_original_url) return r def getId(self, response=None, **kwargs) -> str: @@ -189,13 +195,32 @@ def getUUID(self, response=None, **kwargs) -> str: return EduSharing.build_uuid(self.getUri(response, sodix_item=sodix_item)) def getUri(self, response=None, **kwargs) -> str: - # or media.originalUrl? + """Return the URI of the SODIX item.""" + # Each SODIX item should (but in reality: doesn't) provide a 'media.url' which either points to + # - the internal (SODIX) URI + # - or an external URL + # Outlier items provide a 'null'-value for this field, which is why we need to use a fallback to + # 'media.originalUrl' for those exceptions. try: sodix_item: dict = kwargs["sodix_item"] except KeyError as ke: logging.error(f"getUri(): Could not access SODIX item.") raise ke - return self.get("media.url", json=sodix_item) + try: + media_url: str = sodix_item["media"]["url"] + if media_url: + return media_url + except KeyError: + # 2023-09-11: SODIX provides items where media.url is 'null', even though it is a REQUIRED field. + logging.info(f"SODIX did not provide a 'media.url'-value for item '{sodix_item['id']}'! Trying Fallback to " + f"'media.originalUrl'...") + try: + media_original_url: str = sodix_item["media"]["originalUrl"] + if media_original_url: + return media_original_url + except KeyError: + logging.warning(f"SODIX did not provide a 'media.originalUrl'-value for item '{sodix_item['id']}'! " + f"(If you see this warning, the fallback was not unsuccessful)") def start_request(self, page=0): access_token = requests.post( @@ -661,9 +686,9 @@ def getLOMTechnical(self, response=None, **kwargs) -> LomTechnicalItemLoader: technical = LomTechnicalItemLoader() technical.replace_value("format", self.get("media.dataType", json=sodix_item)) technical.replace_value("location", self.getUri(response, sodix_item=sodix_item)) - original = self.get("media.originalUrl", json=sodix_item) - if original and self.getUri(response, sodix_item=sodix_item) != original: - technical.add_value("location", original) + media_original_url: str = self.get("media.originalUrl", json=sodix_item) + if media_original_url and self.getUri(response, sodix_item=sodix_item) != media_original_url: + technical.add_value("location", media_original_url) duration: str = self.get("media.duration", json=sodix_item) if duration and duration != 0: # the API response contains "null"-values, we're making sure to only add valid duration values to our item From e4c00ebe0f6d0be957abd898adaee1c64880bd0f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 26 Sep 2023 17:46:08 +0200 Subject: [PATCH 369/590] sodix_spider v0.3.1 (educationalContext) - improve mapping from SODIX 'schoolTypes' to 'educationalContext' -- implements the suggested changes for "Gymnasium" and "Gesamtschule", as mentioned by Romy on 2023-09-26 - fix: educationalContext mapping for "Berufsschule" etc. -- the range of values within SODIX' "educationalLevels"-field was smaller than the values which were possible within its "schoolTypes"-field, but "schoolTypes" was only used if no "educationalLevels"-values were found --- changed educationalContext program flow from if-elif to two if-checks -- instead of using "prefLabel"-values, the mapping table is using the "educationalContext"-keys from now on - docs: add TypeHints - style: code formatting Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/sodix_spider.py | 43 +++++++++++++++++++------------ 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 0c0f4a69..77146439 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -42,7 +42,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): name = "sodix_spider" friendlyName = "Sodix" url = "https://sodix.de/" - version = "0.3.0" # last update: 2023-09-12 + version = "0.3.1" # last update: 2023-09-26 apiUrl = "https://api.sodix.de/gql/graphql" page_size = 2500 custom_settings = { @@ -104,14 +104,14 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): MAPPING_EDUCONTEXT = {"Primarbereich": "Primarstufe", "Fort- und Weiterbildung": "Fortbildung"} MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT = { - "Berufsschule": "Berufliche Bildung", - "Fachoberschule": "Sekundarstufe II", - "Gesamtschule": "Sekundarstufe I", - "Grundschule": "Primarstufe", - "Gymnasium": "Sekundarstufe II", - "Kindergarten": "Elementarbereich", - "Mittel- / Hauptschule": "Sekundarstufe I", - "Realschule": "Sekundarstufe I", + "Berufsschule": "berufliche_bildung", + "Fachoberschule": "sekundarstufe_2", + "Gesamtschule": ["sekundarstufe_1", "sekundarstufe_2"], + "Grundschule": "grundschule", + "Gymnasium": ["sekundarstufe_1", "sekundarstufe_2"], + "Kindergarten": "elementarbereich", + "Mittel- / Hauptschule": "sekundarstufe_1", + "Realschule": "sekundarstufe_1", } MAPPING_INTENDED_END_USER_ROLE = { @@ -212,15 +212,19 @@ def getUri(self, response=None, **kwargs) -> str: return media_url except KeyError: # 2023-09-11: SODIX provides items where media.url is 'null', even though it is a REQUIRED field. - logging.info(f"SODIX did not provide a 'media.url'-value for item '{sodix_item['id']}'! Trying Fallback to " - f"'media.originalUrl'...") + logging.info( + f"SODIX did not provide a 'media.url'-value for item '{sodix_item['id']}'! Trying Fallback to " + f"'media.originalUrl'..." + ) try: media_original_url: str = sodix_item["media"]["originalUrl"] if media_original_url: return media_original_url except KeyError: - logging.warning(f"SODIX did not provide a 'media.originalUrl'-value for item '{sodix_item['id']}'! " - f"(If you see this warning, the fallback was not unsuccessful)") + logging.warning( + f"SODIX did not provide a 'media.originalUrl'-value for item '{sodix_item['id']}'! " + f"(If you see this warning, the fallback was not unsuccessful)" + ) def start_request(self, page=0): access_token = requests.post( @@ -849,8 +853,8 @@ def getValuespaces(self, response, **kwargs) -> ValuespaceItemLoader: if subject_ids: subject_ids.sort() valuespaces.add_value("discipline", subject_ids) - educational_context_list = self.get("educationalLevels", json=sodix_item) - school_types_list = self.get("schoolTypes", json=sodix_item) + educational_context_list: list[str] = self.get("educationalLevels", json=sodix_item) + school_types_list: list[str] = self.get("schoolTypes", json=sodix_item) educational_context_set = set() if educational_context_list: # the Sodix field 'educationalLevels' is directly mappable to our 'educationalContext' @@ -858,12 +862,17 @@ def getValuespaces(self, response, **kwargs) -> ValuespaceItemLoader: if potential_edu_context in self.MAPPING_EDUCONTEXT: potential_edu_context = self.MAPPING_EDUCONTEXT.get(potential_edu_context) educational_context_set.add(potential_edu_context) - elif school_types_list: + if school_types_list: # if 'educationalLevels' isn't available, fallback to: map 'schoolTypes'-field to 'educationalContext' for school_type in school_types_list: if school_type in self.MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT: school_type = self.MAPPING_SCHOOL_TYPES_TO_EDUCONTEXT.get(school_type) - educational_context_set.add(school_type) + # the mapped value can be either a string or a list[str] from this point on, which is why we need to + # check their types before populating the educationalContext set + if school_type and type(school_type) is str: + educational_context_set.add(school_type) + if school_type and type(school_type) is list: + educational_context_set.update(school_type) educational_context_list = list(educational_context_set) educational_context_list.sort() if educational_context_list: From 8296c3cdc2d63f344053d1e58884dbccf44d5966 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 20 Oct 2023 16:08:52 +0200 Subject: [PATCH 370/590] serlo_spider v0.3.2 (Serlo API v1.2.0) - Serlo updated their API to v1.2.0 (see changelog: https://github.com/serlo/documentation/wiki/Metadata-API#changelog-120), therefore we updated our crawler to reflect these changes - feat: "creator.type"-check to handle authors vs. organizations -- remove: hard-coded and now obsolete "license.id"-workarounds - feat: prioritize the usage of the newly added GraphQL "image"-property and use previous implementations (header / website-screenshot) only as fallbacks for missing values - change: to minimize the possibility of accidental crawls of other Serlo instances, which might not be relevant to WLO at the moment, the "SERLO_INSTANCE" .env setting defaults to 'de' (de.serlo.org) from now on - style/docs: improve readability of log messages --- converter/spiders/serlo_spider.py | 99 ++++++++++++------------------- 1 file changed, 39 insertions(+), 60 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 8249662a..5795cde1 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -31,7 +31,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.3.1" # last update: 2023-08-29 + version = "0.3.2" # last update: 2023-10-27 (Serlo API v1.2.0) custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright @@ -128,11 +128,12 @@ def decide_crawl_mode(self): You can use this '.env'-setting to crawl Serlo more efficiently: Specify a date and only receive items that were modified since . """ - graphql_instance_param: str = env.get(key="SERLO_INSTANCE", allow_null=True, default=None) + graphql_instance_param: str = env.get(key="SERLO_INSTANCE", allow_null=True, default="de") if graphql_instance_param: logging.info( - f"INIT: '.env'-Setting 'SERLO_INSTANCE': {graphql_instance_param} (language) detected. " - f"Limiting query to a single language selection." + f"INIT: '.env'-Setting 'SERLO_INSTANCE': '{graphql_instance_param}' detected. " + f"Limiting query to a single language selection. (You should always see this message. " + f"This setting defaults to: 'de')" ) self.GRAPHQL_INSTANCE_PARAMETER = graphql_instance_param graphql_modified_after_param: str = env.get(key="SERLO_MODIFIED_AFTER", allow_null=True, default=None) @@ -347,8 +348,12 @@ def parse(self, response, **kwargs): return None base = BaseItemLoader() + og_image: str = selector_playwright.xpath('//meta[@property="og:image"]/@content').get() - if og_image: + if "image" in graphql_json and graphql_json["image"]: + # Serlo API v1.2.0 provides an 'image'-property that serves a single URL (type: String) + base.add_value("thumbnail", graphql_json["image"]) + elif og_image: # if an OpenGraph image property is available, we'll use that as our thumbnail URL, e.g.: # base.add_value("thumbnail", og_image) @@ -617,52 +622,6 @@ def parse(self, response, **kwargs): license_url_mapped = license_mapper.get_license_url(license_string=license_url) if license_url_mapped: lic.add_value("url", license_url_mapped) - elif license_url and not license_url_mapped: - # This edge-case happens when the Serlo API returns website URLs within the 'license.id'-property, - # which cannot be mapped to the usual CC licenses. - # As per team4 request on 2023-08-16, we're mapping these edge-cases to a custom license as a - # (temporary) workaround since we cannot confirm with confidence that 100% of these cases should be - # treated as the same license. - custom_license_str = str() - if "123mathe.de" in license_url: - # example: https://de.serlo.org/mathe/8297/8297 - custom_license_str: str = "Quelle: 123mathe.de & serlo.org" - lifecycle_author_loader = LomLifecycleItemloader() - lifecycle_author_loader.add_value("firstName", "Rudolf") - lifecycle_author_loader.add_value("lastName", "Brinkmann") - lifecycle_author_loader.add_value("url", license_url) - lom.add_value("lifecycle", lifecycle_author_loader.load_item()) - if "strobl-f.de" in license_url: - # example: https://de.serlo.org/mathe/10359/10359 - custom_license_str: str = "Quelle: strobl-f.de & serlo.org" - lifecycle_author_loader = LomLifecycleItemloader() - lifecycle_author_loader.add_value("firstName", "Franz") - lifecycle_author_loader.add_value("lastName", "Strobl") - lifecycle_author_loader.add_value("url", license_url) - lom.add_value("lifecycle", lifecycle_author_loader.load_item()) - if "raschweb.de" in license_url: - # example: https://de.serlo.org/mathe/254590/254590 - custom_license_str: str = "Quelle: raschweb.de & serlo.org" - lifecycle_author_loader = LomLifecycleItemloader() - lifecycle_author_loader.add_value("firstName", "Günther") - lifecycle_author_loader.add_value("lastName", "Rasch") - lifecycle_author_loader.add_value("url", license_url) - lom.add_value("lifecycle", lifecycle_author_loader.load_item()) - if "schule-bw.de" in license_url: - # example: https://de.serlo.org/mathe/181820/181820 - custom_license_str: str = ( - "Quelle: Ausgangsmaterialien des Landesbildungsservers " - "Baden-Württemberg (www.schule-bw.de) am Institut für " - "Bildungsanalysen Baden-Württemberg (IBBW) " - "(https://ibbw.kultus-bw.de)" - ) - lifecycle_author_loader = LomLifecycleItemloader() - lifecycle_author_loader.add_value("firstName", "Landesbildungsserver Baden-Württemberg") - lifecycle_author_loader.add_value("url", license_url) - lom.add_value("lifecycle", lifecycle_author_loader.load_item()) - if custom_license_str: - lic.add_value("internal", Constants.LICENSE_CUSTOM) - lic.add_value("description", custom_license_str) base.add_value("lom", lom.load_item()) base.add_value("license", lic.load_item()) @@ -698,15 +657,35 @@ def get_lifecycle_authors(graphql_json: dict, lom_base_item_loader: LomBaseIteml # } # While the "affiliation" needs to be handled within the lifecycle_publisher item, we can use the 'name' # and 'id'-field for author information. (the 'id'-field leads to the user-profile on Serlo) - lifecycle_author = LomLifecycleItemloader() - lifecycle_author.add_value("role", "author") - if "name" in creator: - # the "name"-property will hold a Serlo username - lifecycle_author.add_value("firstName", creator["name"]) - if "id" in creator: - # the "id"-property will point towards a serlo profile - lifecycle_author.add_value("url", creator["id"]) - lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) + creator_type: str = creator["type"] + if creator_type and creator_type == "Person": + # this is usually the case for Serlo authors + lifecycle_author = LomLifecycleItemloader() + lifecycle_author.add_value("role", "author") + if "name" in creator: + # the "name"-property will hold a Serlo username + lifecycle_author.add_value("firstName", creator["name"]) + if "id" in creator: + # the "id"-property will point towards a serlo profile + lifecycle_author.add_value("url", creator["id"]) + lom_base_item_loader.add_value("lifecycle", lifecycle_author.load_item()) + elif creator_type == "Organization": + # Prior to Serlo's API v1.2.0 there were some edge-cases in Serlo's "license"-property, which + # provided URLs to a creator's website in the wrong Serlo API property ("license"). + # Those (previous) edge-cases are now provided as a "creator"-object of type "Organization" and + # typically look like this: + # { + # "type": "Organization", + # "id": "http://www.strobl-f.de/", + # "name": "http://www.strobl-f.de/" + # }, + lifecycle_org = LomLifecycleItemloader() + lifecycle_org.add_value("role", "author") + if "name" in creator: + lifecycle_org.add_value("organization", creator["name"]) + if "id" in creator: + lifecycle_org.add_value("url", creator["id"]) + lom_base_item_loader.add_value("lifecycle", lifecycle_org.load_item()) @staticmethod def get_lifecycle_metadata_providers(graphql_json, lom_base_item_loader): From 8e29e68e4a75c15db770c3f9a020d53df11249c8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:10:32 +0200 Subject: [PATCH 371/590] build: add "babel" and "langcodes" dependencies - babel is used in the LanguageMapper module for locale recognition (normalization to 2-letter language codes according to ISO-639-1) - the 'langcodes' package has a useful functionality for natural language detection, even for international or localized natural strings ("Deutsch", "german", "Englisch" etc.) --- poetry.lock | 116 ++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 + requirements.txt | 4 ++ 3 files changed, 120 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 83fd49cd..0b59fc89 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "asgiref" @@ -53,6 +53,23 @@ six = "*" [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] +[[package]] +name = "babel" +version = "2.13.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, + {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, +] + +[package.dependencies] +setuptools = {version = "*", markers = "python_version >= \"3.12\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -801,9 +818,26 @@ files = [ {file = "langcodes-3.3.0.tar.gz", hash = "sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"}, ] +[package.dependencies] +language-data = {version = ">=1.1,<2.0", optional = true, markers = "extra == \"data\""} + [package.extras] data = ["language-data (>=1.1,<2.0)"] +[[package]] +name = "language-data" +version = "1.1" +description = "Supplementary data about languages used by the langcodes module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "language_data-1.1-py3-none-any.whl", hash = "sha256:f7ba86fafe099ef213ef597eda483d5227b12446604a61f617122d6c925847d5"}, + {file = "language_data-1.1.tar.gz", hash = "sha256:c1f5283c46bba68befa37505857a3f672497aba0c522b37d99367e911232455b"}, +] + +[package.dependencies] +marisa-trie = ">=0.7.7,<0.8.0" + [[package]] name = "lxml" version = "4.9.3" @@ -911,6 +945,84 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.35)"] +[[package]] +name = "marisa-trie" +version = "0.7.8" +description = "Static memory-efficient and fast Trie-like structures for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "marisa-trie-0.7.8.tar.gz", hash = "sha256:aee3de5f2836074cfd803f1caf16f68390f262ef09cd7dc7d0e8aee9b6878643"}, + {file = "marisa_trie-0.7.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f1cf9d5ead4471b149fdb93a1c84eddaa941d23e67b0782091adc222d198a87"}, + {file = "marisa_trie-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:73296b4d6d8ce2f6bc3898fe84348756beddb10cb56442391d050bff135e9c4c"}, + {file = "marisa_trie-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:782c1515caa603656e15779bc61d5db3b079fa4270ad77f464908796e0d940aa"}, + {file = "marisa_trie-0.7.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49131e51aad530e4d47c716cef1bbef15a4e5b8f75bddfcdd7903f5043ef2331"}, + {file = "marisa_trie-0.7.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45b0a38e015d0149141f028b8892ab518946b828c7931685199549294f5893ca"}, + {file = "marisa_trie-0.7.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a537e0efff1ec880bc212390e97f1d35832a44bd78c96807ddb685d538875096"}, + {file = "marisa_trie-0.7.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c2a33ede2655f1a6fb840729128cb4bc48829108711f79b7a645b6c0c54b5c2"}, + {file = "marisa_trie-0.7.8-cp310-cp310-win32.whl", hash = "sha256:7200cde8e2040811e98661a60463b296b76a6b224411f8899aa0850085e6af40"}, + {file = "marisa_trie-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:a432607bae139183c7251da7eb22f761440bc07d92eacc9e9f7dc0d87f70c495"}, + {file = "marisa_trie-0.7.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a891d2841da153b98c6c7fbe0a89ea8edbc164bdc96a001f360bdcdd54e2070d"}, + {file = "marisa_trie-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c9ab632c5caef23a59cd43c76ab59e325f9eadd1e9c8b1c34005b9756ae716ee"}, + {file = "marisa_trie-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68087942e95acb5801f2a5e9a874aa57af27a4afb52aca81fe1cbe22b2a2fd38"}, + {file = "marisa_trie-0.7.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef2c4a5023bb6ddbaf1803187b7fb3108e9955aa9c60564504e5f622517c9e7"}, + {file = "marisa_trie-0.7.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24e873619f61bef6a87c669ae459b79d98822270e8a10b21fc52dddf2acc9a46"}, + {file = "marisa_trie-0.7.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:34189c321f30cefb76a6b20c7f055b3f6cd0bc8378c16ba8b7283fd898bf4ac2"}, + {file = "marisa_trie-0.7.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:396555d5f52dc86c65717052573fa2875e10f9e5dd014f825677beadcaec8248"}, + {file = "marisa_trie-0.7.8-cp311-cp311-win32.whl", hash = "sha256:bfe649b02b6318bac572b86d9ddd8276c594411311f8e5ef2edc4bcd7285a06f"}, + {file = "marisa_trie-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:84991b52a187d09b269c4caefc8b857a81156c44997eec7eac0e2862d108cc20"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0555104fe9f414abb12e967322a13df778b21958d1727470f4c8dedfde76a8f2"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f96531013252bca14f7665f67aa642be113b6c348ada5e167ebf8db27b1551b5"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed76391b132c6261cfb402c1a08679e635d09a0a142dae2c1744d816f103c7f"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6232506b4d66da932f70cf359a4c5ba9e086228ccd97b602159e90c6ea53dab"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34f927f2738d0b402b76821895254e6a164d5020042559f7d910f6632829cdfa"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-win32.whl", hash = "sha256:645908879ae8fcadfb51650fc176902b9e68eee9a8c4d4d8c682cf99ce3ff029"}, + {file = "marisa_trie-0.7.8-cp36-cp36m-win_amd64.whl", hash = "sha256:a5bf2912810e135ce1e60a9b56a179ed62258306103bf5dd3186307f5c51b28f"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bd86212d5037973deda057fc29d60e83dca05e68fa1e7ceaf014c513975c7a0d"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f280f059be417cff81ac030db6a002f8a93093c7ca4555e570d43a24ed45514"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ae35c696f3c5b57c5fe4f73725102f3fe884bc658b854d484dfe6d7e72c86f5"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:524c02f398d361aaf85d8f7709b5ac6de68d020c588fb6c087fb171137643c13"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:353113e811ccfa176fbb611b83671f0b3b40f46b3896b096c10e43f65d35916d"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-win32.whl", hash = "sha256:93172a7314d4d5993970dbafb746f23140d3abfa0d93cc174e766a302d125f7d"}, + {file = "marisa_trie-0.7.8-cp37-cp37m-win_amd64.whl", hash = "sha256:579d69981b18f427bd8e540199c4de400a2bd4ae98e96c814a12cbf766e7029b"}, + {file = "marisa_trie-0.7.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:08858920d0e09ca07d239252884fd72db2abb56c35ff463145ffc9c1277a4f34"}, + {file = "marisa_trie-0.7.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1b4d07158a3f9b4e84ee709a1fa86b9e11f3dd3b1e6fc45493195105a029545"}, + {file = "marisa_trie-0.7.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0359f392679774d1ff014f12efdf48da5d661e6241531ff55a3ae5a72a1137e"}, + {file = "marisa_trie-0.7.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c1daaa8c38423fbd119db6654f92740d5ee40d1185a2bbc47afae6712b9ebfc"}, + {file = "marisa_trie-0.7.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:266bf4b6e00b4cff2b8618533919d38b883127f4e5c0af0e0bd78a042093dd99"}, + {file = "marisa_trie-0.7.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fd7e71d8d85d04d2a5d23611663b2d322b60c98c2edab7e9ef9a2019f7435c5b"}, + {file = "marisa_trie-0.7.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:66b13382be3c277f32143e6c814344118721c7954b2bfb57f5cfe93d17e63c9e"}, + {file = "marisa_trie-0.7.8-cp38-cp38-win32.whl", hash = "sha256:d75b5d642b3d1e47a0ab649fb5eb6bf3681a5e1d3793c8ea7546586ab72731fd"}, + {file = "marisa_trie-0.7.8-cp38-cp38-win_amd64.whl", hash = "sha256:07c14c88fde8a0ac55139f9fe763dc0deabc4b7950047719ae986ca62135e1fb"}, + {file = "marisa_trie-0.7.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c8df5238c7b29498f4ee24fd3ee25e0129b3c56beaed1dd1628bce0ebac8ec8c"}, + {file = "marisa_trie-0.7.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db2bdc480d83a1a566b3a64027f9fb34eae98bfe45788c41a45e99d430cbf48a"}, + {file = "marisa_trie-0.7.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:80b22bdbebc3e6677e83db1352e4f6d478364107874c031a34a961437ead4e93"}, + {file = "marisa_trie-0.7.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6412c816be723a0f11dd41225a30a08182cf2b3b7b3c882c44335003bde47003"}, + {file = "marisa_trie-0.7.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fcdb7f802db43857df3825c4c11acd14bb380deb961ff91e260950886531400"}, + {file = "marisa_trie-0.7.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5cf04156f38dc46f0f14423f98559c5def7d83f3a30f8a580c27ad3b0311ce76"}, + {file = "marisa_trie-0.7.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c53b1d02f4974ecb52c6e8c6f4f1dbf3a15e79bc3861f4ad48b14e4e77c82342"}, + {file = "marisa_trie-0.7.8-cp39-cp39-win32.whl", hash = "sha256:75317347f20bf05ab2ce5537a90989b1439b5e1752f558aad7b5d6b43194429b"}, + {file = "marisa_trie-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:82ba3caed5acfdff6a23d6881cc1927776b7320415261b6b24f48d0a190ab890"}, + {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:43abd082a21295b04859705b088d15acac8956587557680850e3149a79e36789"}, + {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d891f0138e5aecc9c5afb7b0a57c758e22c5b5c7c0edb0a1f21ae933259815"}, + {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9031184fe2215b591a6cdefe5d6d4901806fd7359e813c485a7ff25ea69d603c"}, + {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8ccb3ba8a2a589b8a7aed693d564f20a6d3bbbb552975f904ba311cea6b85706"}, + {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f49a2cba047e643e5cd295d75de59f1df710c5e919cd376ac06ead513439881b"}, + {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d37ea556bb99d9b0dfbe8fd6bdb17e91b91d04531be9e3b8b1b7b7f76ea55637"}, + {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55a5aea422a4c0c9ef143d3703323f2a43b4a5315fc90bbb6e9ff18544b8d931"}, + {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d19f363b981fe9b4a302060a8088fd1f00906bc315db24f5d6726b5c309cc47e"}, + {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e0d51c31fb41b6bc76c1abb7cf2d63a6e0ba7feffc96ea3d92b4d5084d71721a"}, + {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71ed6286e9d593dac035b8516e7ec35a1b54a7d9c6451a9319e918a8ef722714"}, + {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc1c1dca06c0fdcca5bb261a09eca2b3bcf41eaeb467caf600ac68e77d3ed2c0"}, + {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:891be5569cd6e3a059c2de53d63251aaaef513d68e8d2181f71378f9cb69e1ab"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +test = ["hypothesis", "pytest", "readme-renderer"] + [[package]] name = "mccabe" version = "0.7.0" @@ -1913,4 +2025,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "0d3a96ee74ec5463061ed5e3694c05316539a3ebebd589e975dcadd8e1dd5af0" +content-hash = "afe59b677369bb8b9f3a93887f7be61d6270b69d757b3c2c33f846d749a182d5" diff --git a/pyproject.toml b/pyproject.toml index ba138a8c..a2d0ea2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,6 +88,8 @@ vobject="0.9.6.1" w3lib="2.1.1" xmltodict="0.13.0" trafilatura = "^1.6.1" +babel = "2.13.1" +langcodes = {extras = ["data"], version = "^3.3.0"} [tool.poetry.group.dev.dependencies] diff --git a/requirements.txt b/requirements.txt index ddcec061..5040adf6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ asgiref==3.7.2 ; python_version >= "3.10" and python_version < "4.0" attrs==23.1.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" +babel==2.13.1 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.2 ; python_version >= "3.10" and python_version < "4.0" black==23.7.0 ; python_version >= "3.10" and python_version < "4.0" certifi==2023.7.22 ; python_version >= "3.10" and python_version < "4.0" @@ -35,7 +36,10 @@ jmespath==1.0.1 ; python_version >= "3.10" and python_version < "4.0" jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" justext==3.0.0 ; python_version >= "3.10" and python_version < "4.0" langcodes==3.3.0 ; python_version >= "3.10" and python_version < "4.0" +langcodes[data]==3.3.0 ; python_version >= "3.10" and python_version < "4.0" +language-data==1.1 ; python_version >= "3.10" and python_version < "4.0" lxml==4.9.3 ; python_version >= "3.10" and python_version < "4.0" +marisa-trie==0.7.8 ; python_version >= "3.10" and python_version < "4.0" mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==1.1.3 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" From 128cd9281582ab8e8d3305de68ae20615a6ebf53 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:16:14 +0200 Subject: [PATCH 372/590] feat: LanguageMapper helper utility MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - extracts language codes (ISO 639-1) from natural language strings or potentially ambiguous/malformed language strings ("en-US", "en_US", "fr_FR", "französisch", "englisch" etc.) - the validity of the language code recognition can be tested (and extended) by using the test_language_mapper.py class --- converter/util/language_mapper.py | 172 +++++++++++++++++++++++++ converter/util/test_language_mapper.py | 57 ++++++++ 2 files changed, 229 insertions(+) create mode 100644 converter/util/language_mapper.py create mode 100644 converter/util/test_language_mapper.py diff --git a/converter/util/language_mapper.py b/converter/util/language_mapper.py new file mode 100644 index 00000000..1bdac283 --- /dev/null +++ b/converter/util/language_mapper.py @@ -0,0 +1,172 @@ +import logging +import re + +import babel +import langcodes + + +class LanguageMapper: + """Helper class to detect ISO-639-1 language codes from potentially malformed strings and natural language.""" + + def __init__(self, languages: list[str] = None): + self.languages = languages + + logging.basicConfig( + format="%(asctime)s\t%(levelname)s: %(message)s", + level=logging.DEBUG, + ) + + @staticmethod + def _normalize_string_to_language_code(raw_string: str) -> str | None: + """ + Transform raw string to language code if a mapping was possible. If no mapping was possible, return None. + + (Please don't use this private method from outside. It is basically a wrapper for parsing ambiguous, but + pre-formatted strings with babel.) + + :param raw_string: a string which might or might not contain a language code + :return: string of mapped language code (2-letter) or None + """ + regex_lang_code = re.compile(r"^(?P\w{2,3})" r"((?P[_-])(?P\w{2}))?$") + regex_result = regex_lang_code.search(raw_string) + separator: str | None = None + if regex_result: + regex_result_dict = regex_result.groupdict() + if "separator" in regex_result_dict: + separator: str = regex_result_dict["separator"] + else: + logging.debug(f"The raw string {raw_string} does not look like a typical Locale string.") + + if regex_result and separator: + # this case happens when the raw string looks like "de_DE" or "en-US" + # if there is a separator in the provided string, we need to provide it to Babel as a parameter + try: + locale_parsed = babel.Locale.parse(raw_string, sep=separator) + if locale_parsed: + language_code = locale_parsed.language + return language_code + except ValueError: + return None + except babel.UnknownLocaleError: + return None + elif regex_result: + # this is the default case for 2-letter-codes like "de" or "EN" + try: + locale_parsed = babel.Locale.parse(raw_string) + if locale_parsed: + language_code = locale_parsed.language + return language_code + except ValueError: + return None + except babel.UnknownLocaleError: + return None + else: + return None + + def normalize_list_of_language_strings(self) -> list[str] | None: + """ + Transform list of (raw/potential) language strings into ISO-639-1 normalized 2-letter-codes. + If not a single mapping was possible, return None. + + (Please use only this method if you want to use this helper class from outside!) + + :return: alphabetically sorted list[str] containing all successfully mapped 2-letter language codes or None if + no mapping was possible. + """ + if self.languages and isinstance(self.languages, str): + # since every step from here on expects a list of strings, typecasting to list[str] provides some minor + # Quality of Life + logging.debug(f"LanguageMapper was instantiated with a string, converting to Type list[str]...") + self.languages: list[str] = [self.languages] + + if self.languages and isinstance(self.languages, list): + normalized_set: set[str] = set() # normalized strings are saved to a set to mitigate potential duplicates + edge_cases: set[str] = set() # helper set to print out all encountered edge-cases during mapping + + for language_item in self.languages: + # making sure the list items are actually strings: + if language_item and isinstance(language_item, str): + # if the string has (accidental) whitespaces, strip them before parsing: + language_item = language_item.strip() + + if len(language_item) < 2: + # logging.debug( + # f"LanguageMapper detected an INVALID language string: '{language_item}' (string length is " + # f"too short to be valid. Dropping string...)" + # ) + edge_cases.add(language_item) + # strings which are shorter than 2 chars cannot be valid ISO 639-1 + # this case might happen if there are typos or empty whitespace strings (e.g. " ") + if 2 <= len(language_item) <= 5 and len(language_item) != 4: + # this case covers the majority of pre-formatted language-codes, e.g.: + # "de", "EN", "de-DE", "de_DE", "en_US" or "sgn" + # logging.debug( + # f"LanguageMapper detected a potential 2-to-4-letter language code: '{language_item}'" + # ) + normalized_str: str | None = self._normalize_string_to_language_code(language_item) + if normalized_str: + normalized_set.add(normalized_str) + else: + edge_cases.add(language_item) + if len(language_item) == 4 or len(language_item) > 5: + # natural language edge-cases like "Deutsch", "german", "englisch" are handled here + # logging.debug( + # f"LanguageMapper detected a POTENTIALLY INVALID language string: '{language_item}'. " + # f"(String is too long to be a 2- or 4-letter-code). " + # f"Trying to match natural language string to language code..." + # ) + try: + langcodes_result: langcodes.Language = langcodes.find(language_item) + # using the langcodes Package as a fallback for ambiguous strings + # see: https://github.com/rspeer/langcodes/tree/master#recognizing-language-names-in-natural-language + if langcodes_result: + langcode_detected = langcodes_result.to_tag() + # logging.debug( + # f"Detected language code '{langcode_detected}' from string '{language_item}'." + # ) + # ToDo - optional: maybe compare distance between 'langcodes' and 'babel' result? + # see: https://github.com/rspeer/langcodes/tree/master#comparing-and-matching-languages + # + normalized_str: str | None = self._normalize_string_to_language_code(langcode_detected) + normalized_set.add(normalized_str) + except LookupError: + # if langcodes couldn't find a natural language description, it will throw a LookupError + # in that case we can't map the value and add it to our collected edge-cases + edge_cases.add(language_item) + + if edge_cases: + logging.info( + f"LanguageMapper could NOT map the following edge-cases to a normalized language code: " + f"{list(edge_cases)}" + ) + if normalized_set: + # happy case: all recognized and normalized language codes were collected in our result set + # -> now we need to typecast them as list[str] so they can be used within Scrapy's Field class + result_list: list[str] = list(normalized_set) + # to make testing easier, sort the result list before returning it + result_list.sort() + return result_list + else: + # sad case: if not a single mapping was possible, our result set is empty + return None + else: + logging.warning(f"LanguageMapper expected list[str] but received unexpected type {type(self.languages)} ") + return None + + +if __name__ == "__main__": + # creating a LanguageMapper object for debugging with specific cases that we observed over the years: + language_candidates: list[str] = [ + "de", + "de-DE", + "en_US", + "Deutsch", + "fr-FR", + "", + "failed string input", # random string + "no_NO", # does not exist + "Englisch", + ] + lm = LanguageMapper(languages=language_candidates) + normalized_langs = lm.normalize_list_of_language_strings() + print(f"LanguageMapper result (language codes): {normalized_langs}") diff --git a/converter/util/test_language_mapper.py b/converter/util/test_language_mapper.py new file mode 100644 index 00000000..483f9677 --- /dev/null +++ b/converter/util/test_language_mapper.py @@ -0,0 +1,57 @@ +import logging + +import pytest + +from .language_mapper import LanguageMapper + + +class TestLanguageMapper: + @pytest.mark.parametrize( + "test_input, expected_result", + [ + ("en-US", "en"), + ("de-DE", "de"), + ("de_DE", "de"), + ("fr-FR", "fr"), + ("Deutsch", None), + ("this string is invalid", None), + ], + ) + def test_normalize_string_to_language_code(self, test_input, expected_result): + test_mapper = LanguageMapper() + assert test_mapper._normalize_string_to_language_code(test_input) == expected_result + + @pytest.mark.parametrize( + "test_input, expected_result", + [ + (["en-US"], ["en"]), + (["en-GB"], ["en"]), + (["en_UK"], ["en"]), + (["en"], ["en"]), + (["de-DE"], ["de"]), + (["de_DE"], ["de"]), + (["de"], ["de"]), + (["DE"], ["de"]), + (["deu"], ["de"]), + (["ger"], ["de"]), + (["fr"], ["fr"]), + (["fra"], ["fr"]), + (["fre"], ["fr"]), + # some websites and APIs provide languages as natural languages: + (["französisch"], ["fr"]), + (["deutsch"], ["de"]), + (["German", "german"], ["de"]), + (["englisch"], ["en"]), + (["English"], ["en"]), + (["Spanish"], ["es"]), + (["español"], ["es"]), + (["chinese"], ["zh"]), + # keep only the 3 correct, unique language codes: + (["de-DE", "en_GB", "fr-FR", "", " en ", "german"], ["de", "en", "fr"]), + # These codes don't exist: + (["no_NO", "fa_IL"], None), + ], + ) + def test_normalize_list_of_language_strings(self, test_input, expected_result): + test_mapper = LanguageMapper(languages=test_input) + assert test_mapper.normalize_list_of_language_strings() == expected_result From d699b1fa6eaf26fea8847b4b66136b8e30672581 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Oct 2023 18:29:50 +0200 Subject: [PATCH 373/590] feat: NormLanguagePipeline (normalize strings to 2-letter-language-codes according to ISO 693-1) - add: NormLanguagePipeline to settings.py - feat: implement language string normalization for "lom.general.language" and "lom.educational.language" - change: "lom.educational.language" to multi-value field (otherwise it would drop all other strings besides the first one) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/items.py | 2 +- converter/pipelines.py | 28 ++++++++++++++++++++++++++++ converter/settings.py | 1 + 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/converter/items.py b/converter/items.py index fd9bca2c..c637fc52 100644 --- a/converter/items.py +++ b/converter/items.py @@ -159,7 +159,7 @@ class LomEducationalItem(Item): interactivityType = Field() """Corresponding edu-sharing property: 'ccm:educationalinteractivitytype'""" # ToDo: 'ccm:educationalinteractivitytype' is currently not used anywhere in edu-sharing - language = Field() + language = Field(output_processor=JoinMultivalues()) # ToDo: "Educational language" seems to be unused in edu-sharing. semanticDensity = Field() # ToDo: 'semanticDensity' is not used anywhere and there doesn't appear to be an edu-sharing property for it diff --git a/converter/pipelines.py b/converter/pipelines.py index ba335783..26047988 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -32,6 +32,7 @@ from converter.constants import * from converter.es_connector import EduSharing from converter.items import BaseItem +from converter.util.language_mapper import LanguageMapper from converter.web_tools import WebTools, WebEngine from valuespace_converter.app.valuespaces import Valuespaces @@ -157,6 +158,33 @@ def process_item(self, raw_item, spider): raise DropItem(f'Item {item} was dropped for not providing enough metadata') +class NormLanguagePipeline(BasicPipeline): + """Normalize raw or ambiguous language strings to 2-letter-language-codes (ISO 639-1).""" + def process_item(self, item, spider): + item_adapter = ItemAdapter(item) + try: + lom_general_languages: list[str] = item_adapter["lom"]["general"]["language"] + if lom_general_languages: + language_mapper = LanguageMapper(languages=lom_general_languages) + normalized_language_codes: list[str] | None = language_mapper.normalize_list_of_language_strings() + if normalized_language_codes: + item_adapter["lom"]["general"]["language"] = normalized_language_codes + except KeyError: + # happens when the "language" field does not exist within lom.general + pass + try: + lom_educational_languages: list[str] = item_adapter["lom"]["educational"]["language"] + if lom_educational_languages: + language_mapper = LanguageMapper(languages=lom_educational_languages) + normalized_language_codes: list[str] | None = language_mapper.normalize_list_of_language_strings() + if normalized_language_codes: + item_adapter["lom"]["general"]["language"] = normalized_language_codes + except KeyError: + # happens when the "language" field does not exist within lom.educational + pass + return item + + class NormLicensePipeline(BasicPipeline): def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) diff --git a/converter/settings.py b/converter/settings.py index ccd6cc80..24bb5631 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -117,6 +117,7 @@ "converter.pipelines.FilterSparsePipeline": 25, "converter.pipelines.LOMFillupPipeline": 100, "converter.pipelines.NormLicensePipeline": 125, + "converter.pipelines.NormLanguagePipeline": 150, "converter.pipelines.ConvertTimePipeline": 200, "converter.pipelines.ProcessValuespacePipeline": 250, "converter.pipelines.ProcessThumbnailPipeline": 300, From cb2d694b62a5625c9f876ff26d1aab0c98cd9425 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:36:05 +0200 Subject: [PATCH 374/590] docs: explain SERLO_INSTANCE default setting Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/.env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/.env.example b/converter/.env.example index fbf2fe3c..b1889b99 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -77,7 +77,7 @@ YOUTUBE_API_KEY="" # --- serlo_spider (v0.2.8+) settings: # SERLO_MODIFIED_AFTER="2023-07-01" # Crawl only Serlo Materials which have been modified (by Serlo authors) after # . Use this setting to improve the crawling speed of periodic crawls. -# SERLO_INSTANCE="de" +# SERLO_INSTANCE="de" # optional setting (defaults to: "de" if not actively set) # Available Serlo "instance" values (as of 2023-08-02): "de" | "en" | "es" | "fr" | "hi" | "ta" # --- lehreronline_spider Settings From 6a37ea89a30850e5f7003b2114a9d65682a8c2ea Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 27 Nov 2023 23:16:17 +0100 Subject: [PATCH 375/590] build/chore: upgrade dependencies (Scrapy 2.11 ...) - upgrade all (safely) upgradeable dependencies to the newest versions - tested against Python 3.11.6 with serlo_spider Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- poetry.lock | 1089 ++++++++++++++++++++++++---------------------- pyproject.toml | 18 +- requirements.txt | 78 ++-- 3 files changed, 620 insertions(+), 565 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0b59fc89..d6520bfc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -90,33 +90,29 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "23.11.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, + {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, + {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, + {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, + {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, + {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, + {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, + {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, + {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, + {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, + {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, + {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, + {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, + {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, + {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, + {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, + {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, + {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, ] [package.dependencies] @@ -126,6 +122,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -135,86 +132,74 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -222,97 +207,112 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -331,24 +331,24 @@ files = [ [[package]] name = "constantly" -version = "15.1.0" +version = "23.10.4" description = "Symbolic constants in Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, - {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, + {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, + {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, ] [[package]] name = "courlan" -version = "0.9.3" +version = "0.9.4" description = "Clean, filter and sample URLs to optimize data collection – includes spam, content type and language filters." optional = false python-versions = ">=3.6" files = [ - {file = "courlan-0.9.3-py3-none-any.whl", hash = "sha256:d76fe98be7cfef077b7099d8d906e817f9c20f610a7c29d676f1869e7b45144d"}, - {file = "courlan-0.9.3.tar.gz", hash = "sha256:7e6dd2a7e57853b94db9656ee1769b8cdaccbb4982346891c42338329d7e0958"}, + {file = "courlan-0.9.4-py3-none-any.whl", hash = "sha256:72f03e307f3dfe91e0c790ab0766bab2aa319fdb09597daec920e4a394cfc14e"}, + {file = "courlan-0.9.4.tar.gz", hash = "sha256:6906aa9a15ae9d442821e06ae153c60f385cff41a8d44b9597c00b349f7043c5"}, ] [package.dependencies] @@ -358,34 +358,34 @@ urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} [[package]] name = "cryptography" -version = "41.0.3" +version = "41.0.6" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"}, + {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"}, + {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"}, + {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"}, + {file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"}, + {file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"}, + {file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"}, ] [package.dependencies] @@ -414,13 +414,13 @@ files = [ [[package]] name = "dateparser" -version = "1.1.8" +version = "1.2.0" description = "Date parsing library designed to parse dates from HTML pages" optional = false python-versions = ">=3.7" files = [ - {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, - {file = "dateparser-1.1.8.tar.gz", hash = "sha256:86b8b7517efcc558f085a142cdb7620f0921543fcabdb538c8a4c4001d8178e3"}, + {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, + {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, ] [package.dependencies] @@ -436,13 +436,13 @@ langdetect = ["langdetect"] [[package]] name = "django" -version = "4.2.4" +version = "4.2.7" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.4-py3-none-any.whl", hash = "sha256:860ae6a138a238fc4f22c99b52f3ead982bb4b1aad8c0122bcd8c8a3a02e409d"}, - {file = "Django-4.2.4.tar.gz", hash = "sha256:7e4225ec065e0f354ccf7349a22d209de09cc1c074832be9eb84c51c1799c432"}, + {file = "Django-4.2.7-py3-none-any.whl", hash = "sha256:e1d37c51ad26186de355cbcec16613ebdabfa9689bbade9c538835205a8abbe9"}, + {file = "Django-4.2.7.tar.gz", hash = "sha256:8e0f1c2c2786b5c0e39fe1afce24c926040fad47c8ea8ad30aaf1188df29fc41"}, ] [package.dependencies] @@ -456,13 +456,13 @@ bcrypt = ["bcrypt"] [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -494,18 +494,19 @@ cli = ["requests"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.13.1" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" @@ -525,75 +526,72 @@ pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "greenlet" -version = "2.0.2" +version = "3.0.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, + {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, + {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, + {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, + {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, + {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, + {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, + {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, + {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, + {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, + {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, + {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, + {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, + {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, + {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, ] [package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +docs = ["Sphinx"] test = ["objgraph", "psutil"] [[package]] @@ -664,6 +662,28 @@ urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} all = ["cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "urllib3[brotli]"] speed = ["cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "urllib3[brotli]"] +[[package]] +name = "htmldate" +version = "1.6.0" +description = "Fast and robust extraction of original and updated publication dates from URLs and web pages." +optional = false +python-versions = ">=3.6" +files = [ + {file = "htmldate-1.6.0-py3-none-any.whl", hash = "sha256:6ee374849fe7491b3e6c0b26066e8f6940367b0215e7c4fec88774af065a4dbc"}, + {file = "htmldate-1.6.0.tar.gz", hash = "sha256:5827c8f626a16800a29e57e8188a3d32d0b08ca4c7bd662537b73bbbf22c45a6"}, +] + +[package.dependencies] +charset-normalizer = {version = ">=3.3.2", markers = "python_version >= \"3.7\""} +dateparser = ">=1.1.2" +lxml = {version = ">=4.9.3", markers = "platform_system != \"Darwin\""} +python-dateutil = ">=2.8.2" +urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} + +[package.extras] +all = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urllib3[brotli]"] +speed = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urllib3[brotli]"] + [[package]] name = "hyperlink" version = "21.0.0" @@ -680,13 +700,13 @@ idna = ">=2.5" [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -1073,13 +1093,13 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -1182,13 +1202,13 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, + {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, ] [package.extras] @@ -1197,33 +1217,33 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "playwright" -version = "1.36.0" +version = "1.40.0" description = "A high-level API to automate web browsers" optional = false python-versions = ">=3.8" files = [ - {file = "playwright-1.36.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b7c6ddfca2b141b0385387cc56c125b14ea867902c39e3fc650ddd6c429b17da"}, - {file = "playwright-1.36.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:428a719a6c7e40781c19860ed813840ac2d63678f7587abe12e800ea030d4b7e"}, - {file = "playwright-1.36.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:4e396853034742b76654cdab27422155d238f46e4dc6369ea75854fafb935586"}, - {file = "playwright-1.36.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:72e80076e595f5fcd8ebd89bf6635ad78e4bafa633119faed8b2568d17dbd398"}, - {file = "playwright-1.36.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffbb927679b62fad5071439d5fe0840af46ad1844bc44bf80e1a0ad706140c98"}, - {file = "playwright-1.36.0-py3-none-win32.whl", hash = "sha256:84213339f179fd2a70f77ea7faea0616d74871349d556c53a1ecb7dd5097973c"}, - {file = "playwright-1.36.0-py3-none-win_amd64.whl", hash = "sha256:89ca2261bb00b67d3dff97691cf18f4347ee0529a11e431e47df67b703d4d8fa"}, + {file = "playwright-1.40.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:35b7e0b389df2aa632f3614d35be7bace35f6f634d880db44b035c83e4481312"}, + {file = "playwright-1.40.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:382a7465cc0ea3bf7fa66716bd37fd53f66af4bcc5c72283a8eff3f6e87758a8"}, + {file = "playwright-1.40.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:f11e1ec32f3b3dbd7f24d1481c313cb527001955004ee88a73f9b4a610d0db28"}, + {file = "playwright-1.40.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:6a842dca4dd53feda1d7bd0e14aa65140e4e816452ebddd307e90cad184d92bd"}, + {file = "playwright-1.40.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ec3746de69e7ba912b70e0fe3a3c6b8af97f21ece793c5db27c251da4d2f3e6"}, + {file = "playwright-1.40.0-py3-none-win32.whl", hash = "sha256:3ae90ea5ad776fe5e1300a9c730244c8e57a183c6eb261044418710d51ae03c0"}, + {file = "playwright-1.40.0-py3-none-win_amd64.whl", hash = "sha256:ba5a89953aedb158025e4581eafb6fdeebb3d58acd9ce24b59f691b1e2a861bc"}, ] [package.dependencies] -greenlet = "2.0.2" -pyee = "9.0.4" +greenlet = "3.0.1" +pyee = "11.0.1" [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1232,27 +1252,24 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protego" -version = "0.2.1" +version = "0.3.0" description = "Pure-Python robots.txt parser with support for modern conventions" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "Protego-0.2.1-py2.py3-none-any.whl", hash = "sha256:04419b18f20e8909f1691c6b678392988271cc2a324a72f9663cb3af838b4bf7"}, - {file = "Protego-0.2.1.tar.gz", hash = "sha256:df666d4304dab774e2dc9feb208bb1ac8d71ea5ceec12f4c99eba30fbd642ff2"}, + {file = "Protego-0.3.0-py2.py3-none-any.whl", hash = "sha256:db38f6a945839d8162a4034031a21490469566a2726afb51d668497c457fb0aa"}, + {file = "Protego-0.3.0.tar.gz", hash = "sha256:04228bffde4c6bcba31cf6529ba2cfd6e1b70808fdc1d2cb4301be6b28d6c568"}, ] -[package.dependencies] -six = "*" - [[package]] name = "pyasn1" -version = "0.5.0" +version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, ] [[package]] @@ -1271,13 +1288,13 @@ pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pycodestyle" -version = "2.11.0" +version = "2.11.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] @@ -1307,18 +1324,21 @@ dev = ["tox"] [[package]] name = "pyee" -version = "9.0.4" -description = "A port of node.js's EventEmitter to python." +version = "11.0.1" +description = "A rough port of Node.js's EventEmitter to Python with a few tricks of its own" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "pyee-9.0.4-py2.py3-none-any.whl", hash = "sha256:9f066570130c554e9cc12de5a9d86f57c7ee47fece163bbdaa3e9c933cfbdfa5"}, - {file = "pyee-9.0.4.tar.gz", hash = "sha256:2770c4928abc721f46b705e6a72b0c59480c4a69c9a83ca0b00bb994f1ea4b32"}, + {file = "pyee-11.0.1-py3-none-any.whl", hash = "sha256:9bcc9647822234f42c228d88de63d0f9ffa881e87a87f9d36ddf5211f6ac977d"}, + {file = "pyee-11.0.1.tar.gz", hash = "sha256:a642c51e3885a33ead087286e35212783a4e9b8d6514a10a5db4e57ac57b2b29"}, ] [package.dependencies] typing-extensions = "*" +[package.extras] +dev = ["black", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio", "pytest-trio", "toml", "tox", "trio", "trio", "trio-typing", "twine", "twisted", "validate-pyproject[all]"] + [[package]] name = "pyflakes" version = "3.1.0" @@ -1332,20 +1352,20 @@ files = [ [[package]] name = "pyopenssl" -version = "23.2.0" +version = "23.3.0" description = "Python wrapper module around the OpenSSL library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, - {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, + {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, + {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, ] [package.dependencies] -cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" +cryptography = ">=41.0.5,<42" [package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] @@ -1389,13 +1409,13 @@ rdflib = "*" [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -1439,13 +1459,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -1482,99 +1502,99 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "regex" -version = "2023.6.3" +version = "2023.10.3" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, + {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, + {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, + {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, + {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, + {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, + {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, + {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, + {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, + {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, + {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, + {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, + {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, + {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, + {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, + {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, + {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, + {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, + {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, + {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, + {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, + {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, + {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, + {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, + {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, + {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, + {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, + {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, + {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, + {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, + {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, + {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, + {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, ] [[package]] @@ -1615,21 +1635,21 @@ six = "*" [[package]] name = "scrapy" -version = "2.9.0" +version = "2.11.0" description = "A high-level Web Crawling and Web Scraping framework" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Scrapy-2.9.0-py2.py3-none-any.whl", hash = "sha256:908fdb7874d235230a16fa288637e3f673813cf27fb177f589b5a22bad00b0f9"}, - {file = "Scrapy-2.9.0.tar.gz", hash = "sha256:564c972b56e54b83141f395ce3f6a25bfe2093d61d13f9b81d05384e19db98da"}, + {file = "Scrapy-2.11.0-py2.py3-none-any.whl", hash = "sha256:a7f36544d1f5ceb13cff9b7bc904bd7c0fc43a3af0fbe5aa2034fd937cf092d1"}, + {file = "Scrapy-2.11.0.tar.gz", hash = "sha256:3cbdedce0c3f0e0482d61be2d7458683be7cd7cf14b0ee6adfbaddb80f5b36a5"}, ] [package.dependencies] -cryptography = ">=3.4.6" +cryptography = ">=36.0.0" cssselect = ">=0.9.1" itemadapter = ">=0.1.0" itemloaders = ">=1.0.1" -lxml = ">=4.3.0" +lxml = ">=4.4.1" packaging = "*" parsel = ">=1.5.0" protego = ">=0.1.15" @@ -1640,7 +1660,7 @@ queuelib = ">=1.4.2" service-identity = ">=18.1.0" setuptools = "*" tldextract = "*" -Twisted = ">=18.9.0" +Twisted = ">=18.9.0,<23.8.0" w3lib = ">=1.17.0" "zope.interface" = ">=5.1.0" @@ -1681,19 +1701,19 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "68.0.0" +version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1708,13 +1728,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -1746,13 +1766,13 @@ files = [ [[package]] name = "tldextract" -version = "3.4.4" +version = "5.1.1" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tldextract-3.4.4-py3-none-any.whl", hash = "sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2"}, - {file = "tldextract-3.4.4.tar.gz", hash = "sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234"}, + {file = "tldextract-5.1.1-py3-none-any.whl", hash = "sha256:b9c4510a8766d377033b6bace7e9f1f17a891383ced3c5d50c150f181e9e1cc2"}, + {file = "tldextract-5.1.1.tar.gz", hash = "sha256:9b6dbf803cb5636397f0203d48541c0da8ba53babaf0e8a6feda2d88746813d4"}, ] [package.dependencies] @@ -1761,6 +1781,9 @@ idna = "*" requests = ">=2.1.0" requests-file = ">=1.4" +[package.extras] +testing = ["black", "mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "tox", "types-filelock", "types-requests"] + [[package]] name = "tomli" version = "2.0.1" @@ -1796,6 +1819,30 @@ urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "htmldate[speed] (>=1.4.3)", "py3langid (>=0.2.2)", "pycurl (>=7.45.2)"] gui = ["Gooey (>=1.0.1)"] +[[package]] +name = "trafilatura" +version = "1.6.2" +description = "Python package and command-line tool designed to gather text on the Web. It includes discovery, extraction and text processing components. Its main applications are web crawling, downloads, scraping, and extraction of main texts, metadata and comments." +optional = false +python-versions = ">=3.6" +files = [ + {file = "trafilatura-1.6.2-py3-none-any.whl", hash = "sha256:5bf97ed0d09eda4393770360b6e2b8851781d35fdff2d0276ec705a4f7791047"}, + {file = "trafilatura-1.6.2.tar.gz", hash = "sha256:a984630ad9c54d9fe803555d00f5a028ca65c766ce89bfd87d976f561c55b503"}, +] + +[package.dependencies] +certifi = "*" +charset-normalizer = {version = ">=3.2.0", markers = "python_version >= \"3.7\""} +courlan = ">=0.9.4" +htmldate = ">=1.5.1" +justext = ">=3.0.0" +lxml = {version = ">=4.9.3", markers = "platform_system != \"Darwin\""} +urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} + +[package.extras] +all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "htmldate[speed] (>=1.5.1)", "py3langid (>=0.2.2)", "pycurl (>=7.45.2)"] +gui = ["Gooey (>=1.0.1)"] + [[package]] name = "twisted" version = "22.10.0" @@ -1836,38 +1883,41 @@ windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0. [[package]] name = "twisted-iocpsupport" -version = "1.0.3" +version = "1.0.4" description = "An extension for use in the twisted I/O Completion Ports reactor." optional = false python-versions = "*" files = [ - {file = "twisted-iocpsupport-1.0.3.tar.gz", hash = "sha256:afb00801fdfbaccf0d0173a722626500023d4a19719ac9f129d1347a32e2fc66"}, - {file = "twisted_iocpsupport-1.0.3-cp310-cp310-win32.whl", hash = "sha256:a379ef56a576c8090889f74441bc3822ca31ac82253cc61e8d50631bcb0c26d0"}, - {file = "twisted_iocpsupport-1.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1ea2c3fbdb739c95cc8b3355305cd593d2c9ec56d709207aa1a05d4d98671e85"}, - {file = "twisted_iocpsupport-1.0.3-cp311-cp311-win32.whl", hash = "sha256:7efcdfafb377f32db90f42bd5fc5bb32cd1e3637ee936cdaf3aff4f4786ab3bf"}, - {file = "twisted_iocpsupport-1.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1dbfac706972bf9ec5ce1ddbc735d2ebba406ad363345df8751ffd5252aa1618"}, - {file = "twisted_iocpsupport-1.0.3-cp36-cp36m-win32.whl", hash = "sha256:1ddfc5fa22ec6f913464b736b3f46e642237f17ac41be47eed6fa9bd52f5d0e0"}, - {file = "twisted_iocpsupport-1.0.3-cp36-cp36m-win_amd64.whl", hash = "sha256:1bdccbb22199fc69fd7744d6d2dfd22d073c028c8611d994b41d2d2ad0e0f40d"}, - {file = "twisted_iocpsupport-1.0.3-cp37-cp37m-win32.whl", hash = "sha256:db11c80054b52dbdea44d63d5474a44c9a6531882f0e2960268b15123088641a"}, - {file = "twisted_iocpsupport-1.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:67bec1716eb8f466ef366bbf262e1467ecc9e20940111207663ac24049785bad"}, - {file = "twisted_iocpsupport-1.0.3-cp38-cp38-win32.whl", hash = "sha256:98a6f16ab215f8c1446e9fc60aaed0ab7c746d566aa2f3492a23cea334e6bebb"}, - {file = "twisted_iocpsupport-1.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:4f249d0baac836bb431d6fa0178be063a310136bc489465a831e3abd2d7acafd"}, - {file = "twisted_iocpsupport-1.0.3-cp39-cp39-win32.whl", hash = "sha256:aaca8f30c3b7c80d27a33fe9fe0d0bac42b1b012ddc60f677175c30e1becc1f3"}, - {file = "twisted_iocpsupport-1.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:dff43136c33665c2d117a73706aef6f7d6433e5c4560332a118fe066b16b8695"}, - {file = "twisted_iocpsupport-1.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8faceae553cfadc42ad791b1790e7cdecb7751102608c405217f6a26e877e0c5"}, - {file = "twisted_iocpsupport-1.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6f8c433faaad5d53d30d1da6968d5a3730df415e2efb6864847267a9b51290cd"}, - {file = "twisted_iocpsupport-1.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3f39c41c0213a81a9ce0961e30d0d7650f371ad80f8d261007d15a2deb6d5be3"}, + {file = "twisted-iocpsupport-1.0.4.tar.gz", hash = "sha256:858096c0d15e33f15ac157f455d8f86f2f2cdd223963e58c0f682a3af8362d89"}, + {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win32.whl", hash = "sha256:afa2b630797f9ed2f27f3d9f55e3f72b4244911e45a8c82756f44babbf0b243e"}, + {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:0058c963c8957bcd3deda62122e89953c9de1e867a274facc9b15dde1a9f31e8"}, + {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win32.whl", hash = "sha256:196f7c7ccad4ba4d1783b1c4e1d1b22d93c04275cd780bf7498d16c77319ad6e"}, + {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:4e5f97bcbabdd79cbaa969b63439b89801ea560f11d42b0a387634275c633623"}, + {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win32.whl", hash = "sha256:6081bd7c2f4fcf9b383dcdb3b3385d75a26a7c9d2be25b6950c3d8ea652d2d2d"}, + {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:76f7e67cec1f1d097d1f4ed7de41be3d74546e1a4ede0c7d56e775c4dce5dfb0"}, + {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:3d306fc4d88a6bcf61ce9d572c738b918578121bfd72891625fab314549024b5"}, + {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:391ac4d6002a80e15f35adc4ad6056f4fe1c17ceb0d1f98ba01b0f4f917adfd7"}, + {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:0c1b5cf37f0b2d96cc3c9bc86fff16613b9f5d0ca565c96cf1f1fb8cfca4b81c"}, + {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:3c5dc11d72519e55f727320e3cee535feedfaee09c0f0765ed1ca7badff1ab3c"}, + {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win32.whl", hash = "sha256:cc86c2ef598c15d824a243c2541c29459881c67fc3c0adb6efe2242f8f0ec3af"}, + {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c27985e949b9b1a1fb4c20c71d315c10ea0f93fdf3ccdd4a8c158b5926edd8c8"}, + {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win32.whl", hash = "sha256:e311dfcb470696e3c077249615893cada598e62fa7c4e4ca090167bd2b7d331f"}, + {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4574eef1f3bb81501fb02f911298af3c02fe8179c31a33b361dd49180c3e644d"}, + {file = "twisted_iocpsupport-1.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:872747a3b64e2909aee59c803ccd0bceb9b75bf27915520ebd32d69687040fa2"}, + {file = "twisted_iocpsupport-1.0.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c2712b778bacf1db434e3e065adfed3db300754186a29aecac1efae9ef4bcaff"}, + {file = "twisted_iocpsupport-1.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c66fa0aa4236b27b3c61cb488662d85dae746a6d1c7b0d91cf7aae118445adf"}, + {file = "twisted_iocpsupport-1.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:300437af17396a945a58dcfffd77863303a8b6d9e65c6e81f1d2eed55b50d444"}, ] [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -1883,35 +1933,34 @@ files = [ [[package]] name = "tzlocal" -version = "5.0.1" +version = "5.2" description = "tzinfo object for the local timezone" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"}, - {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, ] [package.dependencies] tzdata = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] [[package]] name = "urllib3" -version = "2.0.4" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1930,13 +1979,13 @@ python-dateutil = ">=2.4.0" [[package]] name = "w3lib" -version = "2.1.1" +version = "2.1.2" description = "Library of web-related functions" optional = false python-versions = ">=3.7" files = [ - {file = "w3lib-2.1.1-py3-none-any.whl", hash = "sha256:7fd5bd7980a95d1a8185e867d05f68a591aa281a3ded4590d2641d7b09086ed4"}, - {file = "w3lib-2.1.1.tar.gz", hash = "sha256:0e1198f1b745195b6b3dd1a4cd66011fbf82f30a4d9dabaee1f9e5c86f020274"}, + {file = "w3lib-2.1.2-py3-none-any.whl", hash = "sha256:c4432926e739caa8e3f49f5de783f336df563d9490416aebd5d39fb896d264e7"}, + {file = "w3lib-2.1.2.tar.gz", hash = "sha256:ed5b74e997eea2abe3c1321f916e344144ee8e9072a6f33463ee8e57f858a4b1"}, ] [[package]] @@ -1952,13 +2001,13 @@ files = [ [[package]] name = "wheel" -version = "0.41.0" +version = "0.42.0" description = "A built-package format for Python" optional = false python-versions = ">=3.7" files = [ - {file = "wheel-0.41.0-py3-none-any.whl", hash = "sha256:7e9be3bbd0078f6147d82ed9ed957e323e7708f57e134743d2edef3a7b7972a9"}, - {file = "wheel-0.41.0.tar.gz", hash = "sha256:55a0f0a5a84869bce5ba775abfd9c462e3a6b1b7b7ec69d72c0b83d673a5114d"}, + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, ] [package.extras] @@ -1977,52 +2026,58 @@ files = [ [[package]] name = "zope-interface" -version = "6.0" +version = "6.1" description = "Interfaces for Python" optional = false python-versions = ">=3.7" files = [ - {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, - {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, - {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, - {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, - {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, - {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, - {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, - {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, - {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, - {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, - {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, - {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, - {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, - {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, - {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, - {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, - {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, - {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, - {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, - {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, - {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, + {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, + {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"}, + {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"}, + {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"}, + {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"}, + {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"}, + {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"}, + {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"}, + {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"}, + {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"}, + {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"}, + {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"}, + {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"}, + {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"}, + {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"}, + {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"}, + {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"}, + {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"}, + {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"}, + {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"}, + {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"}, + {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"}, + {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"}, + {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, ] [package.dependencies] setuptools = "*" [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface"] +docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "afe59b677369bb8b9f3a93887f7be61d6270b69d757b3c2c33f846d749a182d5" +content-hash = "320c29e404798670618d7e13ff7d1892bded05e045223bd6895eb0bf104a6f03" diff --git a/pyproject.toml b/pyproject.toml index a2d0ea2c..c7f3c77e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,10 +59,10 @@ packages = [{include = "converter"}] [tool.poetry.dependencies] python = "^3.10" -wheel = "0.41.0" +wheel = "^0.42.0" black = "^23.7.0" -certifi="2023.7.22" -dateparser="1.1.8" +certifi="^2023.11.17" +dateparser="1.2" extruct="0.16.0" flake8 = "^6.1.0" html2text="2020.1.16" @@ -74,18 +74,18 @@ isodate="0.6.1" lxml="4.9.3" overrides="3.1.0" Pillow="10.0.0" -playwright="1.36.0" -pyOpenSSL="23.2.0" -pytest="7.4.0" +playwright="1.40" +pyOpenSSL="23.3.0" +pytest="^7.4.3" python-dateutil="2.8.2" python-dotenv="1.0.0" requests="2.31.0" six="1.16.0" -Scrapy="2.9.0" +Scrapy="2.11" scrapy-splash="0.9.0" -urllib3="2.0.4" +urllib3="2.1.0" vobject="0.9.6.1" -w3lib="2.1.1" +w3lib="2.1.2" xmltodict="0.13.0" trafilatura = "^1.6.1" babel = "2.13.1" diff --git a/requirements.txt b/requirements.txt index 5040adf6..ba1f2843 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,29 +3,29 @@ attrs==23.1.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" babel==2.13.1 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.2 ; python_version >= "3.10" and python_version < "4.0" -black==23.7.0 ; python_version >= "3.10" and python_version < "4.0" -certifi==2023.7.22 ; python_version >= "3.10" and python_version < "4.0" -cffi==1.15.1 ; python_version >= "3.10" and python_version < "4.0" -charset-normalizer==3.2.0 ; python_version >= "3.10" and python_version < "4.0" -click==8.1.6 ; python_version >= "3.10" and python_version < "4.0" +black==23.11.0 ; python_version >= "3.10" and python_version < "4.0" +certifi==2023.11.17 ; python_version >= "3.10" and python_version < "4.0" +cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" +charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" +click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") -constantly==15.1.0 ; python_version >= "3.10" and python_version < "4.0" -courlan==0.9.3 ; python_version >= "3.10" and python_version < "4.0" -cryptography==41.0.3 ; python_version >= "3.10" and python_version < "4.0" +constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" +courlan==0.9.4 ; python_version >= "3.10" and python_version < "4.0" +cryptography==41.0.6 ; python_version >= "3.10" and python_version < "4.0" cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -dateparser==1.1.8 ; python_version >= "3.10" and python_version < "4.0" -django==4.2.4 ; python_version >= "3.10" and python_version < "4.0" -exceptiongroup==1.1.2 ; python_version >= "3.10" and python_version < "3.11" +dateparser==1.2.0 ; python_version >= "3.10" and python_version < "4.0" +django==4.2.7 ; python_version >= "3.10" and python_version < "4.0" +exceptiongroup==1.2.0 ; python_version >= "3.10" and python_version < "3.11" extruct==0.16.0 ; python_version >= "3.10" and python_version < "4.0" -filelock==3.12.2 ; python_version >= "3.10" and python_version < "4.0" +filelock==3.13.1 ; python_version >= "3.10" and python_version < "4.0" flake8==6.1.0 ; python_version >= "3.10" and python_version < "4.0" -greenlet==2.0.2 ; python_version >= "3.10" and python_version < "4.0" +greenlet==3.0.1 ; python_version >= "3.10" and python_version < "4.0" html-text==0.5.2 ; python_version >= "3.10" and python_version < "4.0" html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" -htmldate==1.4.3 ; python_version >= "3.10" and python_version < "4.0" +htmldate==1.6.0 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" -idna==3.4 ; python_version >= "3.10" and python_version < "4.0" +idna==3.6 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" incremental==22.10.0 ; python_version >= "3.10" and python_version < "4.0" iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" @@ -44,54 +44,54 @@ mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==1.1.3 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" -packaging==23.1 ; python_version >= "3.10" and python_version < "4.0" +packaging==23.2 ; python_version >= "3.10" and python_version < "4.0" parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" pathspec==0.11.2 ; python_version >= "3.10" and python_version < "4.0" pillow==10.0.0 ; python_version >= "3.10" and python_version < "4.0" -platformdirs==3.10.0 ; python_version >= "3.10" and python_version < "4.0" -playwright==1.36.0 ; python_version >= "3.10" and python_version < "4.0" -pluggy==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -protego==0.2.1 ; python_version >= "3.10" and python_version < "4.0" +platformdirs==4.0.0 ; python_version >= "3.10" and python_version < "4.0" +playwright==1.40.0 ; python_version >= "3.10" and python_version < "4.0" +pluggy==1.3.0 ; python_version >= "3.10" and python_version < "4.0" +protego==0.3.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" -pyasn1==0.5.0 ; python_version >= "3.10" and python_version < "4.0" -pycodestyle==2.11.0 ; python_version >= "3.10" and python_version < "4.0" +pyasn1==0.5.1 ; python_version >= "3.10" and python_version < "4.0" +pycodestyle==2.11.1 ; python_version >= "3.10" and python_version < "4.0" pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" -pyee==9.0.4 ; python_version >= "3.10" and python_version < "4.0" +pyee==11.0.1 ; python_version >= "3.10" and python_version < "4.0" pyflakes==3.1.0 ; python_version >= "3.10" and python_version < "4.0" -pyopenssl==23.2.0 ; python_version >= "3.10" and python_version < "4.0" +pyopenssl==23.3.0 ; python_version >= "3.10" and python_version < "4.0" pyparsing==3.1.1 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" pyrdfa3==3.5.3 ; python_version >= "3.10" and python_version < "4.0" -pytest==7.4.0 ; python_version >= "3.10" and python_version < "4.0" +pytest==7.4.3 ; python_version >= "3.10" and python_version < "4.0" python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.0 ; python_version >= "3.10" and python_version < "4.0" -pytz==2023.3 ; python_version >= "3.10" and python_version < "4.0" +pytz==2023.3.post1 ; python_version >= "3.10" and python_version < "4.0" queuelib==1.6.2 ; python_version >= "3.10" and python_version < "4.0" rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" -regex==2023.6.3 ; python_version >= "3.10" and python_version < "4.0" +regex==2023.10.3 ; python_version >= "3.10" and python_version < "4.0" requests-file==1.5.1 ; python_version >= "3.10" and python_version < "4.0" requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" -scrapy==2.9.0 ; python_version >= "3.10" and python_version < "4.0" +scrapy==2.11.0 ; python_version >= "3.10" and python_version < "4.0" service-identity==23.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==68.0.0 ; python_version >= "3.10" and python_version < "4.0" +setuptools==69.0.2 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" -soupsieve==2.4.1 ; python_version >= "3.10" and python_version < "4.0" +soupsieve==2.5 ; python_version >= "3.10" and python_version < "4.0" sqlparse==0.4.4 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" -tldextract==3.4.4 ; python_version >= "3.10" and python_version < "4.0" +tldextract==5.1.1 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" -trafilatura==1.6.1 ; python_version >= "3.10" and python_version < "4.0" -twisted-iocpsupport==1.0.3 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" +trafilatura==1.6.2 ; python_version >= "3.10" and python_version < "4.0" +twisted-iocpsupport==1.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" twisted==22.10.0 ; python_version >= "3.10" and python_version < "4.0" -typing-extensions==4.7.1 ; python_version >= "3.10" and python_version < "4.0" +typing-extensions==4.8.0 ; python_version >= "3.10" and python_version < "4.0" tzdata==2023.3 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") -tzlocal==5.0.1 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.0.4 ; python_version >= "3.10" and python_version < "4.0" +tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" +urllib3==2.1.0 ; python_version >= "3.10" and python_version < "4.0" vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" -w3lib==2.1.1 ; python_version >= "3.10" and python_version < "4.0" +w3lib==2.1.2 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" -wheel==0.41.0 ; python_version >= "3.10" and python_version < "4.0" +wheel==0.42.0 ; python_version >= "3.10" and python_version < "4.0" xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" -zope-interface==6.0 ; python_version >= "3.10" and python_version < "4.0" +zope-interface==6.1 ; python_version >= "3.10" and python_version < "4.0" From 34c24cf089f01030170d584240287352cc4d87f0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 27 Nov 2023 23:24:53 +0100 Subject: [PATCH 376/590] build: upgrade Dockerfile to Python v3.11.6 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index dec4cdb2..e7b60cb9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10.9-slim-buster +FROM python:3.11.6-slim-bookworm # ENV CRAWLER wirlernenonline_spider From 2c38142f3b53c488c20a6dc56bd757877ff23f9d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 27 Nov 2023 23:27:29 +0100 Subject: [PATCH 377/590] build: upgrade valuespace_converter.Dockerfile to Python v3.11.6 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- valuespace_converter/valuespace_converter.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/valuespace_converter/valuespace_converter.Dockerfile b/valuespace_converter/valuespace_converter.Dockerfile index 010788ac..535165b4 100644 --- a/valuespace_converter/valuespace_converter.Dockerfile +++ b/valuespace_converter/valuespace_converter.Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10-alpine as base +FROM python:3.11.6-alpine as base COPY app/requirements.txt /requirements.txt From 9528934f8229040c6fd70428badc167cee05ce18 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 11:06:56 +0100 Subject: [PATCH 378/590] build: introduce 'httpx' dependency for async requests Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- poetry.lock | 90 +++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + requirements.txt | 5 +++ 3 files changed, 95 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index d6520bfc..19ddf355 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,26 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +[[package]] +name = "anyio" +version = "4.1.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, + {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "asgiref" version = "3.7.2" @@ -594,6 +615,17 @@ files = [ docs = ["Sphinx"] test = ["objgraph", "psutil"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "html-text" version = "0.5.2" @@ -684,6 +716,51 @@ urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} all = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urllib3[brotli]"] speed = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urllib3[brotli]"] +[[package]] +name = "httpcore" +version = "1.0.2" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, + {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.23.0)"] + +[[package]] +name = "httpx" +version = "0.25.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, + {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "hyperlink" version = "21.0.0" @@ -1726,6 +1803,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "soupsieve" version = "2.5" @@ -2080,4 +2168,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "320c29e404798670618d7e13ff7d1892bded05e045223bd6895eb0bf104a6f03" +content-hash = "bf8435b68ce1c2a04527168ca0fe451f5b136a877495babc69c10246be0f8ca4" diff --git a/pyproject.toml b/pyproject.toml index c7f3c77e..fdfe773c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,7 @@ xmltodict="0.13.0" trafilatura = "^1.6.1" babel = "2.13.1" langcodes = {extras = ["data"], version = "^3.3.0"} +httpx = "^0.25.2" [tool.poetry.group.dev.dependencies] diff --git a/requirements.txt b/requirements.txt index ba1f2843..285d7885 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +anyio==4.1.0 ; python_version >= "3.10" and python_version < "4.0" asgiref==3.7.2 ; python_version >= "3.10" and python_version < "4.0" attrs==23.1.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" @@ -20,10 +21,13 @@ extruct==0.16.0 ; python_version >= "3.10" and python_version < "4.0" filelock==3.13.1 ; python_version >= "3.10" and python_version < "4.0" flake8==6.1.0 ; python_version >= "3.10" and python_version < "4.0" greenlet==3.0.1 ; python_version >= "3.10" and python_version < "4.0" +h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" html-text==0.5.2 ; python_version >= "3.10" and python_version < "4.0" html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" htmldate==1.6.0 ; python_version >= "3.10" and python_version < "4.0" +httpcore==1.0.2 ; python_version >= "3.10" and python_version < "4.0" +httpx==0.25.2 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" idna==3.6 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" @@ -77,6 +81,7 @@ scrapy==2.11.0 ; python_version >= "3.10" and python_version < "4.0" service-identity==23.1.0 ; python_version >= "3.10" and python_version < "4.0" setuptools==69.0.2 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" +sniffio==1.3.0 ; python_version >= "3.10" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.10" and python_version < "4.0" sqlparse==0.4.4 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" From 6e869d821608b830e6b240a7e5f356ec0077f000 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 12:35:14 +0100 Subject: [PATCH 379/590] change: replace sync 'requests'-methods with async 'httpx'-methods - change: default 'httpx' timeout value from 5s to 30s - fix: get_headers()-method omits NoneType values from now on -- the method previously built a dictionary, where "Content-Type" would more often than not be of "NoneType", which would throw (correct) 'httpx'-Exceptions when trying to encode NoneTypes --- converter/es_connector.py | 113 ++++++++++++++++++++------------------ 1 file changed, 61 insertions(+), 52 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 782450a3..a040f044 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -6,6 +6,7 @@ from enum import Enum from typing import List +import httpx import requests import vobject from requests.auth import HTTPBasicAuth @@ -117,11 +118,13 @@ def __init__(self): self.init_api_client() def get_headers(self, content_type: str | None = "application/json"): - return { - "COOKIE": EduSharing.cookie, - "Accept": "application/json", - "Content-Type": content_type, - } + header_dict: dict = dict() # result dict that only contains values, no NoneTypes! + header_dict.update({"Accept": "application/json"}) + if EduSharing.cookie: + header_dict.update({"COOKIE": EduSharing.cookie}) + if content_type: + header_dict.update({"Content-Type": content_type}) + return header_dict def sync_node(self, spider, type, properties): groupBy = [] @@ -156,17 +159,19 @@ def sync_node(self, spider, type, properties): raise e return response["node"] - def set_node_text(self, uuid, item) -> bool: + async def set_node_text(self, uuid, item) -> bool: if "fulltext" in item: - response = requests.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/textContent?mimetype=text/plain", - headers=self.get_headers("multipart/form-data"), - data=item["fulltext"].encode("utf-8"), - ) - return response.status_code == 200 + async with httpx.AsyncClient() as client: + response = await client.post( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/textContent?mimetype=text/plain", + headers=self.get_headers("multipart/form-data"), + data=item["fulltext"].encode("utf-8"), + timeout=30, + ) + return response.status_code == 200 # does currently not store data # try: # EduSharing.nodeApi.change_content_as_text(EduSharingConstants.HOME, uuid, 'text/plain',item['fulltext']) @@ -188,44 +193,48 @@ def set_permissions(self, uuid, permissions) -> bool: except ApiException as e: return False - def set_node_binary_data(self, uuid, item) -> bool: + async def set_node_binary_data(self, uuid, item) -> bool: if "binary" in item: - logging.info( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"] - ) - files = {"file": item["binary"]} - response = requests.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"], - headers=self.get_headers(None), - files=files, - ) - return response.status_code == 200 - else: - return False - - def set_node_preview(self, uuid, item) -> bool: - if "thumbnail" in item: - key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None - if key: - files = {"image": base64.b64decode(item["thumbnail"][key])} - response = requests.post( + async with httpx.AsyncClient() as client: + logging.info( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/content?mimetype=" + + item["lom"]["technical"]["format"] + ) + files = {"file": item["binary"]} + response = await client.post( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid - + "/preview?mimetype=" - + item["thumbnail"]["mimetype"], + + "/content?mimetype=" + + item["lom"]["technical"]["format"], headers=self.get_headers(None), files=files, + timeout=30, ) return response.status_code == 200 + else: + return False + + async def set_node_preview(self, uuid, item) -> bool: + if "thumbnail" in item: + async with httpx.AsyncClient() as client: + key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None + if key: + files = {"image": base64.b64decode(item["thumbnail"][key])} + response = await client.post( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/preview?mimetype=" + + item["thumbnail"]["mimetype"], + headers=self.get_headers(None), + files=files, + timeout=30, + ) + return response.status_code == 200 else: logging.warning("No thumbnail provided for " + uuid) @@ -612,15 +621,15 @@ def set_node_permissions(self, uuid, item): ) logging.error(item["permissions"]) - def insert_item(self, spider, uuid, item): + async def insert_item(self, spider, uuid, item): node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) self.set_node_permissions(node["ref"]["id"], item) - self.set_node_preview(node["ref"]["id"], item) - if not self.set_node_binary_data(node["ref"]["id"], item): - self.set_node_text(node["ref"]["id"], item) + await self.set_node_preview(node["ref"]["id"], item) + if not await self.set_node_binary_data(node["ref"]["id"], item): + await self.set_node_text(node["ref"]["id"], item) - def update_item(self, spider, uuid, item): - self.insert_item(spider, uuid, item) + async def update_item(self, spider, uuid, item): + await self.insert_item(spider, uuid, item) @staticmethod def init_cookie(): @@ -740,7 +749,7 @@ def find_item(self, id, spider): try: error_dict: dict = json.loads(e.body) error_name: str = error_dict["error"] - if error_name and error_name == 'org.edu_sharing.restservices.DAOMissingException': + if error_name and error_name == "org.edu_sharing.restservices.DAOMissingException": # when there is no already existing node in the edu-sharing repository, edu-sharing returns # a "DAOMissingException". The following debug message is commented out to reduce log-spam: # error_message: str = error_dict["message"] From fb0512f9b92b5e96c59df2f761d8a5a2ade95e76 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 17:25:26 +0100 Subject: [PATCH 380/590] change: enable 'asyncio'-support in Scrapy settings.py - 'httpx' requires 'asyncio' to be enabled in Scrapy - see: https://docs.scrapy.org/en/latest/topics/asyncio.html Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/settings.py b/converter/settings.py index 24bb5631..499785c7 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -31,6 +31,8 @@ "LOG_FORMATTER": LOG_FORMATTER }) +TWISTED_REACTOR = "twisted.internet.asyncioreactor.AsyncioSelectorReactor" + # Default behaviour for regular crawlers of non-license-controlled content # When set True, every item will have GROUP_EVERYONE attached in edu-sharing # When set False, no permissions are set at all, which can be helpful if you want to control them later (e.g. via inherition) From 31e54593176f57ce2ae83f22eaafb2e393fc5c01 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 19:32:24 +0100 Subject: [PATCH 381/590] change: replace synchronous 'requests'-calls with async 'httpx'-requests Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 194 +++++++++++++++++++++-------------------- 1 file changed, 101 insertions(+), 93 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 26047988..6d1a5249 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -17,8 +17,8 @@ import dateparser import dateutil.parser +import httpx import isodate -import requests import scrapy import scrapy.crawler from PIL import Image @@ -348,7 +348,7 @@ def scale_image(img, max_size): h *= 0.9 return img.resize((int(w), int(h)), Image.Resampling.LANCZOS).convert("RGB") - def process_item(self, raw_item, spider): + async def process_item(self, raw_item, spider): """ By default the thumbnail-pipeline handles several cases: - if there is a URL-string inside the "BaseItem.thumbnail"-field: @@ -367,100 +367,108 @@ def process_item(self, raw_item, spider): settings = get_settings_for_crawler(spider) # checking if the (optional) attribute WEB_TOOLS exists: web_tools = settings.get("WEB_TOOLS", WebEngine.Splash) - # if screenshot_bytes is provided (the crawler has already a binary representation of the image - # the pipeline will convert/scale the given image - if "screenshot_bytes" in item: - # in case we are already using playwright in a spider, we can skip one additional HTTP Request by - # accessing the (temporary available) "screenshot_bytes"-field - img = Image.open(BytesIO(item["screenshot_bytes"])) - self.create_thumbnails_from_image_bytes(img, item, settings) - # the final BaseItem data model doesn't use screenshot_bytes, - # therefore we delete it after we're done processing it - del item["screenshot_bytes"] - - # a thumbnail (url) is given - we will try to fetch it from the url - elif "thumbnail" in item: - url = item["thumbnail"] - response = requests.get(url) - log.debug( - "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" - ) - # nothing was given, we try to screenshot the page either via Splash or Playwright - elif ( - "location" in item["lom"]["technical"] - and len(item["lom"]["technical"]["location"]) > 0 - and "format" in item["lom"]["technical"] - and item["lom"]["technical"]["format"] == "text/html" - ): - if settings.get("SPLASH_URL") and web_tools == WebEngine.Splash: - response = requests.post( - settings.get("SPLASH_URL") + "/render.png", - json={ - "url": item["lom"]["technical"]["location"][0], - # since there can be multiple "technical.location"-values, the first URL is used for thumbnails - "wait": settings.get("SPLASH_WAIT"), - "html5_media": 1, - "headers": settings.get("SPLASH_HEADERS"), - }, - ) - if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright: - # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, - # it will default back to "splash" - - # this edge-case is necessary for spiders that only need playwright to gather a screenshot, - # but don't use playwright within the spider itself (e.g. serlo_spider) - playwright_dict = WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], - engine=WebEngine.Playwright) - screenshot_bytes = playwright_dict.get("screenshot_bytes") - img = Image.open(BytesIO(screenshot_bytes)) + async with httpx.AsyncClient() as client: + # if screenshot_bytes is provided (the crawler has already a binary representation of the image + # the pipeline will convert/scale the given image + if "screenshot_bytes" in item: + # in case we are already using playwright in a spider, we can skip one additional HTTP Request by + # accessing the (temporary available) "screenshot_bytes"-field + img = Image.open(BytesIO(item["screenshot_bytes"])) self.create_thumbnails_from_image_bytes(img, item, settings) - else: - if settings.get("DISABLE_SPLASH") is False: - log.warning( - "No thumbnail provided and SPLASH_URL was not configured for screenshots!" + # the final BaseItem data model doesn't use screenshot_bytes, + # therefore we delete it after we're done processing it + del item["screenshot_bytes"] + + # a thumbnail (url) is given - we will try to fetch it from the url + elif "thumbnail" in item: + url = item["thumbnail"] + try: + response = await client.get(url=url, follow_redirects=True, timeout=60) + log.debug( + "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" ) - if response is None: - if settings.get("DISABLE_SPLASH") is False: - log.error( - "Neither thumbnail or technical.location (and technical.format) provided! Please provide at least one of them" - ) - else: - try: - if response.headers["Content-Type"] == "image/svg+xml": - if len(response.content) > settings.get("THUMBNAIL_MAX_SIZE"): - raise Exception( - "SVG images can't be converted, and the given image exceeds the maximum allowed size (" - + str(len(response.content)) - + " > " - + str(settings.get("THUMBNAIL_MAX_SIZE")) - + ")" - ) - item["thumbnail"] = {} - item["thumbnail"]["mimetype"] = response.headers["Content-Type"] - item["thumbnail"]["small"] = base64.b64encode( - response.content - ).decode() - else: - img = Image.open(BytesIO(response.content)) - self.create_thumbnails_from_image_bytes(img, item, settings) - except Exception as e: - if url is not None: - log.warning( - "Could not read thumbnail at " - + url - + ": " - + str(e) - + " (falling back to screenshot)" + except httpx.ConnectError: + # some website hosts are super slow or throttle connections + log.warning(f"Thumbnail-Pipeline failed to establish a connection with URL {url}") + except httpx.ReadError: + log.warning(f"Thumbnail-Pipeline could not read data from URL {url}") + # nothing was given, we try to screenshot the page either via Splash or Playwright + elif ( + "location" in item["lom"]["technical"] + and len(item["lom"]["technical"]["location"]) > 0 + and "format" in item["lom"]["technical"] + and item["lom"]["technical"]["format"] == "text/html" + ): + if settings.get("SPLASH_URL") and web_tools == WebEngine.Splash: + response = await client.post( + settings.get("SPLASH_URL") + "/render.png", + json={ + "url": item["lom"]["technical"]["location"][0], + # since there can be multiple "technical.location"-values, the first URL is used for thumbnails + "wait": settings.get("SPLASH_WAIT"), + "html5_media": 1, + "headers": settings.get("SPLASH_HEADERS"), + }, + timeout=30, ) - if "thumbnail" in item: - del item["thumbnail"] - return self.process_item(raw_item, spider) + if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright: + # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, + # it will default back to "splash" + + # this edge-case is necessary for spiders that only need playwright to gather a screenshot, + # but don't use playwright within the spider itself (e.g. serlo_spider) + playwright_dict = await WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], + engine=WebEngine.Playwright) + screenshot_bytes = playwright_dict.get("screenshot_bytes") + img = Image.open(BytesIO(screenshot_bytes)) + self.create_thumbnails_from_image_bytes(img, item, settings) else: - # item['thumbnail']={} - raise DropItem( - "No thumbnail provided or ressource was unavailable for fetching" + if settings.get("DISABLE_SPLASH") is False: + log.warning( + "No thumbnail provided and SPLASH_URL was not configured for screenshots!" + ) + if response is None: + if settings.get("DISABLE_SPLASH") is False: + log.error( + "Neither thumbnail or technical.location (and technical.format) provided! Please provide at least one of them" ) - return raw_item + else: + try: + if response.headers["Content-Type"] == "image/svg+xml": + if len(response.content) > settings.get("THUMBNAIL_MAX_SIZE"): + raise Exception( + "SVG images can't be converted, and the given image exceeds the maximum allowed size (" + + str(len(response.content)) + + " > " + + str(settings.get("THUMBNAIL_MAX_SIZE")) + + ")" + ) + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = response.headers["Content-Type"] + item["thumbnail"]["small"] = base64.b64encode( + response.content + ).decode() + else: + img = Image.open(BytesIO(response.content)) + self.create_thumbnails_from_image_bytes(img, item, settings) + except Exception as e: + if url is not None: + log.warning( + "Could not read thumbnail at " + + url + + ": " + + str(e) + + " (falling back to screenshot)" + ) + if "thumbnail" in item: + del item["thumbnail"] + return self.process_item(raw_item, spider) + else: + # item['thumbnail']={} + raise DropItem( + "No thumbnail provided or ressource was unavailable for fetching" + ) + return raw_item # override the project settings with the given ones from the current spider # see PR 56 for details @@ -617,13 +625,13 @@ def __init__(self): super().__init__() self.counter = 0 - def process_item(self, raw_item, spider): + async def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) title = "" if "title" in item["lom"]["general"]: title = str(item["lom"]["general"]["title"]) entryUUID = EduSharing.build_uuid(item["response"]["url"] if "url" in item["response"] else item["hash"]) - self.insert_item(spider, entryUUID, item) + await self.insert_item(spider, entryUUID, item) logging.info("item " + entryUUID + " inserted/updated") # @TODO: We may need to handle Collections From ba5173531beb2cdd9ccb4c7adec08708ec01104e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 19:49:49 +0100 Subject: [PATCH 382/590] change: make web_tools.py methods async - change the web_tools implementation to handle requests asynchronously -- currently uses a Semaphore from 'asyncio' to stay below 10 concurrent connections to the docker container --- when increasing the "MAX_QUEUE_SIZE"-setting in the "browserless/chrome" container, we might be able to increase the semaphore value accordingly for higher performance -- attention: if too many requests are made in parallel, the docker container will refuse those connections and items will be dropped! Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 70 +++++++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 32 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index 034d7b3e..571ebbfb 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -1,9 +1,10 @@ import asyncio import json +from asyncio import Semaphore from enum import Enum import html2text -import requests +import httpx from playwright.async_api import async_playwright from scrapy.utils.project import get_project_settings @@ -19,17 +20,20 @@ class WebEngine(Enum): class WebTools: @staticmethod - def getUrlData(url: str, engine=WebEngine.Splash): - if engine == WebEngine.Splash: - return WebTools.__getUrlDataSplash(url) - elif engine == WebEngine.Playwright: - return WebTools.__getUrlDataPlaywright(url) + async def getUrlData(url: str, engine=WebEngine.Splash): + sem: Semaphore = asyncio.Semaphore(value=10) + # the headless browser can only handle 5 concurrent sessions and 5 items in the queue by default + async with sem: + if engine == WebEngine.Splash: + return await WebTools.__getUrlDataSplash(url) + elif engine == WebEngine.Playwright: + return await WebTools.__getUrlDataPlaywright(url) raise Exception("Invalid engine") @staticmethod - def __getUrlDataPlaywright(url: str): - playwright_dict = asyncio.run(WebTools.fetchDataPlaywright(url)) + async def __getUrlDataPlaywright(url: str): + playwright_dict = await WebTools.fetchDataPlaywright(url) html = playwright_dict.get("content") screenshot_bytes = playwright_dict.get("screenshot_bytes") return {"html": html, @@ -39,35 +43,37 @@ def __getUrlDataPlaywright(url: str): "screenshot_bytes": screenshot_bytes} @staticmethod - def __getUrlDataSplash(url: str): + async def __getUrlDataSplash(url: str): settings = get_project_settings() # html = None if settings.get("SPLASH_URL") and not url.endswith(".pdf") and not url.endswith(".docx"): # Splash can't handle some binary direct-links (Splash will throw "LUA Error 400: Bad Request" as a result) # ToDo: which additional filetypes need to be added to the exclusion list? - media files (.mp3, mp4 etc.?) - result = requests.post( - settings.get("SPLASH_URL") + "/render.json", - json={ - "html": 1, - "iframes": 1, - "url": url, - "wait": settings.get("SPLASH_WAIT"), - "headers": settings.get("SPLASH_HEADERS"), - "script": 1, - "har": 1, - "response_body": 1, - }, - ) - data = result.content.decode("UTF-8") - j = json.loads(data) - html = j['html'] if 'html' in j else '' - text = html - text += '\n'.join(list(map(lambda x: x["html"], j["childFrames"]))) if 'childFrames' in j else '' - cookies = result.cookies.get_dict() - return {"html": html, - "text": WebTools.html2Text(text), - "cookies": cookies, - "har": json.dumps(j["har"])} + async with httpx.AsyncClient() as client: + result = await client.post( + settings.get("SPLASH_URL") + "/render.json", + json={ + "html": 1, + "iframes": 1, + "url": url, + "wait": settings.get("SPLASH_WAIT"), + "headers": settings.get("SPLASH_HEADERS"), + "script": 1, + "har": 1, + "response_body": 1, + }, + timeout=30 + ) + data = result.content.decode("UTF-8") + j = json.loads(data) + html = j['html'] if 'html' in j else '' + text = html + text += '\n'.join(list(map(lambda x: x["html"], j["childFrames"]))) if 'childFrames' in j else '' + cookies = dict(result.cookies) + return {"html": html, + "text": WebTools.html2Text(text), + "cookies": cookies, + "har": json.dumps(j["har"])} else: return {"html": None, "text": None, "cookies": None, "har": None} From 2b1c12df9ed8a13b846c9fc21f6ede01431f6a66 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 20:11:55 +0100 Subject: [PATCH 383/590] change: async "parse"-method / await web_tools Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 5795cde1..74811f59 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -1,3 +1,4 @@ +import asyncio import datetime import json import logging @@ -38,6 +39,7 @@ class SerloSpider(scrapy.Spider, LomBase): } GRAPHQL_MODIFIED_AFTER_PARAMETER: str = "" GRAPHQL_INSTANCE_PARAMETER: str = "" + sem = asyncio.Semaphore(value=10) # used to control the amount of concurrent requests in "parse"-method graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: @@ -318,17 +320,18 @@ def check_if_item_should_be_dropped(self, response, graphql_json: dict): drop_item_flag = True return drop_item_flag - def parse(self, response, **kwargs): + async def parse(self, response, **kwargs): graphql_json: dict = kwargs.get("graphql_item") drop_item_flag = self.check_if_item_should_be_dropped(response, graphql_json) if drop_item_flag is True: - return None + return json_ld = response.xpath('//*[@type="application/ld+json"]/text()').get() json_ld = json.loads(json_ld) - playwright_dict = WebTools.getUrlData(response.url, WebEngine.Playwright) + async with self.sem: + playwright_dict = await WebTools.getUrlData(response.url, WebEngine.Playwright) html_body = playwright_dict.get("html") screenshot_bytes = playwright_dict.get("screenshot_bytes") html_text = playwright_dict.get("text") @@ -345,7 +348,7 @@ def parse(self, response, **kwargs): f"Robot Meta Tag {robot_meta_tags} identified. Robot Meta Tags 'noindex' or 'none' should " f"be skipped by the crawler. Dropping item {response.url} ." ) - return None + return base = BaseItemLoader() From 37a5044444b4154bac46e4c4708a3abb0a136b19 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 Nov 2023 20:19:02 +0100 Subject: [PATCH 384/590] change: enable Scrapy "PeriodicLog"-extension - see: https://docs.scrapy.org/en/latest/topics/extensions.html#periodic-log-extension Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/settings.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/converter/settings.py b/converter/settings.py index 499785c7..05dada37 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -109,7 +109,13 @@ EXTENSIONS = { # 'scrapy.extensions.telnet.TelnetConsole': None, # 'scrapy.extensions.closespider.CLOSESPIDER_PAGECOUNT': 4, + "scrapy.extensions.periodic_log.PeriodicLog": 0, } +# PeriodicLog Extension Settings +# (see: https://docs.scrapy.org/en/latest/topics/extensions.html#periodic-log-extension) +PERIODIC_LOG_STATS = True +PERIODIC_LOG_DELTA = True +PERIODIC_LOG_TIMING_ENABLED = True # Configure item pipelines # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html From ce6a5d8bd1f24780f3eae864f4d026915b2a433a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 29 Nov 2023 11:37:51 +0100 Subject: [PATCH 385/590] change: async parse method Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/kmap_spider.py | 4 ++-- converter/spiders/materialnetzwerk_spider.py | 4 ++-- converter/spiders/oersi_spider.py | 4 ++-- converter/spiders/sample_spider_alternative.py | 4 ++-- converter/spiders/tutory_spider.py | 4 ++-- converter/spiders/zum_mathe_apps_spider.py | 4 ++-- converter/spiders/zum_physik_apps_spider.py | 4 ++-- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/converter/spiders/kmap_spider.py b/converter/spiders/kmap_spider.py index 46cdcd5c..c9441e39 100644 --- a/converter/spiders/kmap_spider.py +++ b/converter/spiders/kmap_spider.py @@ -55,7 +55,7 @@ def getId(self, response=None) -> str: def getHash(self, response=None) -> str: pass - def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: """ Scrapy Contracts: @@ -63,7 +63,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: @returns item 1 """ last_modified = kwargs.get("lastModified") - url_data_web_tools_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + url_data_web_tools_dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) splash_html_string = url_data_web_tools_dict.get('html') json_ld_string: str = Selector(text=splash_html_string).xpath('//*[@id="ld"]/text()').get() json_ld: dict = json.loads(json_ld_string) diff --git a/converter/spiders/materialnetzwerk_spider.py b/converter/spiders/materialnetzwerk_spider.py index d978daf1..14b7316a 100644 --- a/converter/spiders/materialnetzwerk_spider.py +++ b/converter/spiders/materialnetzwerk_spider.py @@ -84,7 +84,7 @@ def parse_start_url(self, response: scrapy.http.Response, **kwargs): bundle_urls.append(current_url) yield scrapy.Request(url=current_url, callback=self.parse_bundle_overview) - def parse_bundle_overview(self, response: scrapy.http.Response): + async def parse_bundle_overview(self, response: scrapy.http.Response): """ Spider Contracts: @@ -98,7 +98,7 @@ def parse_bundle_overview(self, response: scrapy.http.Response): bundle_dict = dict() bundle_dict["bundle_url"] = response.url # render the web page to execute js and copy to the response - body = WebTools.getUrlData(response.url, WebEngine.Playwright) + body = await WebTools.getUrlData(response.url, WebEngine.Playwright) response = response.replace(body=body['html']) # a typical bundle_overview looks like this: https://editor.mnweg.org/mnw/sammlung/das-menschliche-skelett-m-78 diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 269c1de2..6b39cc32 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -756,7 +756,7 @@ def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_ite else: lifecycle_item_loader.add_value("firstName", name_string) - def parse(self, response: scrapy.http.Response, **kwargs): + async def parse(self, response: scrapy.http.Response, **kwargs): elastic_item: dict = kwargs.get("elastic_item") elastic_item_source: dict = elastic_item.get("_source") # _source is the original JSON body passed for the document at index time @@ -1080,7 +1080,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): if not thumbnail_url: # only use the headless browser if we need to take a website screenshot, otherwise skip this (expensive) # part of the program flow completely - url_data = WebTools.getUrlData(url=response.url, engine=WebEngine.Playwright) + url_data = await WebTools.getUrlData(url=response.url, engine=WebEngine.Playwright) if "html" in url_data: response_loader.add_value("html", url_data["html"]) if "text" in url_data: diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 0aeed4a9..567593fe 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -43,10 +43,10 @@ def start_requests(self): for start_url in self.start_urls: yield scrapy.Request(url=start_url, callback=self.parse) - def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: # OPTIONAL: If you need to use playwright to crawl a website, this is how you can access the data provided # by Playwright's headless browser - playwright_dict: dict = WebTools.getUrlData(response.url, WebEngine.Playwright) + playwright_dict: dict = await WebTools.getUrlData(response.url, WebEngine.Playwright) html_body = playwright_dict.get("html") screenshot_bytes = playwright_dict.get("screenshot_bytes") # to be used in base.screenshot_bytes diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 74398685..b4f214b7 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -269,7 +269,7 @@ def getLicense(self, response=None): license_loader.add_value("author", full_name) return license_loader - def getLOMGeneral(self, response=None): + async def getLOMGeneral(self, response=None): general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["name"]) item_description = None @@ -287,7 +287,7 @@ def getLOMGeneral(self, response=None): general.add_value("description", meta_og_description) else: # this is where the (expensive) calls to our headless browser start - playwright_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + playwright_dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) playwright_html = playwright_dict["html"] # ToDo: if we need DOM data from Playwright in another method, move the call to Playwright into parse() # and parametrize the result diff --git a/converter/spiders/zum_mathe_apps_spider.py b/converter/spiders/zum_mathe_apps_spider.py index 9d4657a2..a669b90d 100644 --- a/converter/spiders/zum_mathe_apps_spider.py +++ b/converter/spiders/zum_mathe_apps_spider.py @@ -75,7 +75,7 @@ def parse_apollonian_subtopic(self, response: scrapy.http.Response): apollo_url = response.urljoin(apollo_url) yield scrapy.Request(url=apollo_url, callback=self.parse) - def parse(self, response: scrapy.http.Response, **kwargs): + async def parse(self, response: scrapy.http.Response, **kwargs): """ Populates a BaseItemLoader with metadata and yields the BaseItem afterwards. @@ -84,7 +84,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): @returns items 1 """ # fetching publication date and lastModified from dynamically loaded

-element: - url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + url_data_splash_dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) splash_html_string = url_data_splash_dict.get('html') page_end_element = Selector(text=splash_html_string).xpath('//p[@class="Ende"]').get() line_regex = re.compile(r'
') diff --git a/converter/spiders/zum_physik_apps_spider.py b/converter/spiders/zum_physik_apps_spider.py index efa8dd72..f136739b 100644 --- a/converter/spiders/zum_physik_apps_spider.py +++ b/converter/spiders/zum_physik_apps_spider.py @@ -54,7 +54,7 @@ def parse_topic_overview(self, response: scrapy.http.Response): topic_url = response.urljoin(topic_url) yield scrapy.Request(url=topic_url, callback=self.parse) - def parse(self, response: scrapy.http.Response, **kwargs): + async def parse(self, response: scrapy.http.Response, **kwargs): """ Populates a BaseItemLoader with metadata and yields the individual BaseItem via BaseItemLoader.load_item() afterwards. @@ -64,7 +64,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): @returns item 1 """ # fetching publication date and lastModified from dynamically loaded

-element: - url_data_splash_dict = WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + url_data_splash_dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) splash_html_string = url_data_splash_dict.get('html') page_end_element = Selector(text=splash_html_string).xpath('//p[@class="Ende"]').get() line_regex = re.compile(r'
') From 60f9f70ba43ea1872bd2f2b2eb63d71c2989ef7b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Nov 2023 13:35:28 +0100 Subject: [PATCH 386/590] change: use a shared requests Session for GraphQL queries - sodix_spider shares a Session object between all GraphQL calls from now on -- this should (significantly) increase performance and connection reliability (see: https://requests.readthedocs.io/en/latest/user/advanced/#session-objects) - fix: Type Warning ("Response"-parameter expected 'LomBase'-type because LomBase.shouldImport() was called (instead of 'self.shouldImport()'-override)) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/sodix_spider.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/converter/spiders/sodix_spider.py b/converter/spiders/sodix_spider.py index 77146439..4113af6e 100644 --- a/converter/spiders/sodix_spider.py +++ b/converter/spiders/sodix_spider.py @@ -48,6 +48,7 @@ class SodixSpider(scrapy.Spider, LomBase, JSONBase): custom_settings = { "ROBOTSTXT_OBEY": False # returns an 401-error anyway, we might as well skip this scrapy.Request } + SESSION = requests.Session() OER_FILTER = False # flag used for controlling the crawling process between two modes # - by default (OER_FILTER=False), ALL entries from the GraphQL API are crawled. # - If OER_FILTER=TRUE, only materials with OER-compatible licenses are crawled (everything else gets skipped) @@ -227,7 +228,7 @@ def getUri(self, response=None, **kwargs) -> str: ) def start_request(self, page=0): - access_token = requests.post( + access_token = self.SESSION.post( "https://api.sodix.de/gql/auth/login", None, { @@ -379,7 +380,7 @@ def hasChanged(self, response=None, **kwargs) -> bool: logging.info(f"matching requested id: {self.remoteId}") return True return False - db = EduSharing().find_item(identifier, self) + db = EduSharing().find_item(id=identifier, spider=self) changed = db is None or db[1] != hash_str if not changed: logging.info(f"Item {identifier} (uuid: {db[0]}) has not changed") @@ -921,7 +922,7 @@ def parse(self, response=None, **kwargs): logging.error(f"Cannot parse SODIX item from callback arguments. Aborting parse()-method.") return None - if LomBase.shouldImport(response) is False: + if self.shouldImport(response) is False: self.logger.debug( f"Skipping entry {str(self.getId(response, sodix_item=sodix_item))} because shouldImport() returned " f"false" From 0c2af18e44f821d99f13929bf61fdef23ff5cb7e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:17:33 +0100 Subject: [PATCH 387/590] fix: Scrapy DeprecationWarning ("REQUEST_FINGERPRINTER_IMPLEMENTATION") - fix: DeprecationWarning that appeared with Scrapy 2.10 by setting its value to the recommended value "2.7" -- see: https://docs.scrapy.org/en/latest/topics/request-response.html#request-fingerprinter-implementation Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/settings.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/converter/settings.py b/converter/settings.py index 05dada37..a839a3ea 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -32,6 +32,9 @@ }) TWISTED_REACTOR = "twisted.internet.asyncioreactor.AsyncioSelectorReactor" +REQUEST_FINGERPRINTER_IMPLEMENTATION = "2.7" +# fixes Scrapy DeprecationWarning on startup (Scrapy v2.10+) +# (see: https://docs.scrapy.org/en/latest/topics/request-response.html#request-fingerprinter-implementation): # Default behaviour for regular crawlers of non-license-controlled content # When set True, every item will have GROUP_EVERYONE attached in edu-sharing From 9e10759a4f86cd86844b4cf81b06888cf334b102 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:55:58 +0100 Subject: [PATCH 388/590] change: shared 'httpx.AsyncClient()' for es_connector.py Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 98 +++++++++++++++++++-------------------- 1 file changed, 48 insertions(+), 50 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index a040f044..70e096c0 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -107,6 +107,7 @@ class CreateGroupType(Enum): nodeApi: NODEV1Api groupCache: List[str] enabled: bool + client_async = httpx.AsyncClient() def __init__(self): cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) @@ -161,24 +162,23 @@ def sync_node(self, spider, type, properties): async def set_node_text(self, uuid, item) -> bool: if "fulltext" in item: - async with httpx.AsyncClient() as client: - response = await client.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/textContent?mimetype=text/plain", - headers=self.get_headers("multipart/form-data"), - data=item["fulltext"].encode("utf-8"), - timeout=30, - ) - return response.status_code == 200 - # does currently not store data - # try: - # EduSharing.nodeApi.change_content_as_text(EduSharingConstants.HOME, uuid, 'text/plain',item['fulltext']) - # return True - # except ApiException as e: - # print(e) - # return False + response = await self.client_async.post( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/textContent?mimetype=text/plain", + headers=self.get_headers("multipart/form-data"), + data=item["fulltext"].encode("utf-8"), + timeout=30, + ) + return response.status_code == 200 + # does currently not store data + # try: + # EduSharing.nodeApi.change_content_as_text(EduSharingConstants.HOME, uuid, 'text/plain',item['fulltext']) + # return True + # except ApiException as e: + # print(e) + # return False def set_permissions(self, uuid, permissions) -> bool: try: @@ -195,46 +195,44 @@ def set_permissions(self, uuid, permissions) -> bool: async def set_node_binary_data(self, uuid, item) -> bool: if "binary" in item: - async with httpx.AsyncClient() as client: - logging.info( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"] - ) - files = {"file": item["binary"]} - response = await client.post( + logging.info( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/content?mimetype=" + + item["lom"]["technical"]["format"] + ) + files = {"file": item["binary"]} + response = await self.client_async.post( + get_project_settings().get("EDU_SHARING_BASE_URL") + + "rest/node/v1/nodes/-home-/" + + uuid + + "/content?mimetype=" + + item["lom"]["technical"]["format"], + headers=self.get_headers(None), + files=files, + timeout=30, + ) + return response.status_code == 200 + else: + return False + + async def set_node_preview(self, uuid, item) -> bool: + if "thumbnail" in item: + key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None + if key: + files = {"image": base64.b64decode(item["thumbnail"][key])} + response = await self.client_async.post( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"], + + "/preview?mimetype=" + + item["thumbnail"]["mimetype"], headers=self.get_headers(None), files=files, timeout=30, ) return response.status_code == 200 - else: - return False - - async def set_node_preview(self, uuid, item) -> bool: - if "thumbnail" in item: - async with httpx.AsyncClient() as client: - key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None - if key: - files = {"image": base64.b64decode(item["thumbnail"][key])} - response = await client.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/preview?mimetype=" - + item["thumbnail"]["mimetype"], - headers=self.get_headers(None), - files=files, - timeout=30, - ) - return response.status_code == 200 else: logging.warning("No thumbnail provided for " + uuid) From f1cfb39c40c734556e5127630dc4c57a84bb4439 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Nov 2023 15:36:26 +0100 Subject: [PATCH 389/590] change: shared 'httpx.AsyncClient()' for Thumbnail-Pipeline Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 198 ++++++++++++++++++++--------------------- 1 file changed, 99 insertions(+), 99 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 6d1a5249..124b555b 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -338,6 +338,7 @@ class ProcessThumbnailPipeline(BasicPipeline): """ generate thumbnails """ + _client_async = httpx.AsyncClient() @staticmethod def scale_image(img, max_size): @@ -350,12 +351,12 @@ def scale_image(img, max_size): async def process_item(self, raw_item, spider): """ - By default the thumbnail-pipeline handles several cases: + By default, the thumbnail-pipeline handles several cases: - if there is a URL-string inside the "BaseItem.thumbnail"-field: -- download image from URL; rescale it into different sizes (small/large); --- save the thumbnails as base64 within ---- "BaseItem.thumbnail.small", "BaseItem.thumbnail.large" - --- (afterwards delete the URL from "BaseItem.thumbnail") + --- (afterward delete the URL from "BaseItem.thumbnail") - if there is NO "BaseItem.thumbnail"-field: -- default: take a screenshot of the URL from "technical.location" with Splash, rescale and save (as above) @@ -367,108 +368,107 @@ async def process_item(self, raw_item, spider): settings = get_settings_for_crawler(spider) # checking if the (optional) attribute WEB_TOOLS exists: web_tools = settings.get("WEB_TOOLS", WebEngine.Splash) - async with httpx.AsyncClient() as client: - # if screenshot_bytes is provided (the crawler has already a binary representation of the image - # the pipeline will convert/scale the given image - if "screenshot_bytes" in item: - # in case we are already using playwright in a spider, we can skip one additional HTTP Request by - # accessing the (temporary available) "screenshot_bytes"-field - img = Image.open(BytesIO(item["screenshot_bytes"])) + # if screenshot_bytes is provided (the crawler has already a binary representation of the image + # the pipeline will convert/scale the given image + if "screenshot_bytes" in item: + # in case we are already using playwright in a spider, we can skip one additional HTTP Request by + # accessing the (temporary available) "screenshot_bytes"-field + img = Image.open(BytesIO(item["screenshot_bytes"])) + self.create_thumbnails_from_image_bytes(img, item, settings) + # The final BaseItem data model doesn't use screenshot_bytes. + # Therefore, we delete it after we're done with processing it + del item["screenshot_bytes"] + + # a thumbnail (url) is given - we will try to fetch it from the url + elif "thumbnail" in item: + url = item["thumbnail"] + try: + response = await self._client_async.get(url=url, follow_redirects=True, timeout=60) + log.debug( + "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" + ) + except httpx.ConnectError: + # some website hosts are super slow or throttle connections + log.warning(f"Thumbnail-Pipeline failed to establish a connection with URL {url}") + except httpx.ReadError: + log.warning(f"Thumbnail-Pipeline could not read data from URL {url}") + # nothing was given, we try to screenshot the page either via Splash or Playwright + elif ( + "location" in item["lom"]["technical"] + and len(item["lom"]["technical"]["location"]) > 0 + and "format" in item["lom"]["technical"] + and item["lom"]["technical"]["format"] == "text/html" + ): + if settings.get("SPLASH_URL") and web_tools == WebEngine.Splash: + response = await self._client_async.post( + settings.get("SPLASH_URL") + "/render.png", + json={ + "url": item["lom"]["technical"]["location"][0], + # since there can be multiple "technical.location"-values, the first URL is used for thumbnails + "wait": settings.get("SPLASH_WAIT"), + "html5_media": 1, + "headers": settings.get("SPLASH_HEADERS"), + }, + timeout=30, + ) + if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright: + # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, + # it will default back to "splash" + + # this edge-case is necessary for spiders that only need playwright to gather a screenshot, + # but don't use playwright within the spider itself (e.g. serlo_spider) + playwright_dict = await WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], + engine=WebEngine.Playwright) + screenshot_bytes = playwright_dict.get("screenshot_bytes") + img = Image.open(BytesIO(screenshot_bytes)) self.create_thumbnails_from_image_bytes(img, item, settings) - # the final BaseItem data model doesn't use screenshot_bytes, - # therefore we delete it after we're done processing it - del item["screenshot_bytes"] - - # a thumbnail (url) is given - we will try to fetch it from the url - elif "thumbnail" in item: - url = item["thumbnail"] - try: - response = await client.get(url=url, follow_redirects=True, timeout=60) - log.debug( - "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" - ) - except httpx.ConnectError: - # some website hosts are super slow or throttle connections - log.warning(f"Thumbnail-Pipeline failed to establish a connection with URL {url}") - except httpx.ReadError: - log.warning(f"Thumbnail-Pipeline could not read data from URL {url}") - # nothing was given, we try to screenshot the page either via Splash or Playwright - elif ( - "location" in item["lom"]["technical"] - and len(item["lom"]["technical"]["location"]) > 0 - and "format" in item["lom"]["technical"] - and item["lom"]["technical"]["format"] == "text/html" - ): - if settings.get("SPLASH_URL") and web_tools == WebEngine.Splash: - response = await client.post( - settings.get("SPLASH_URL") + "/render.png", - json={ - "url": item["lom"]["technical"]["location"][0], - # since there can be multiple "technical.location"-values, the first URL is used for thumbnails - "wait": settings.get("SPLASH_WAIT"), - "html5_media": 1, - "headers": settings.get("SPLASH_HEADERS"), - }, - timeout=30, + else: + if settings.get("DISABLE_SPLASH") is False: + log.warning( + "No thumbnail provided and SPLASH_URL was not configured for screenshots!" ) - if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright: - # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, - # it will default back to "splash" - - # this edge-case is necessary for spiders that only need playwright to gather a screenshot, - # but don't use playwright within the spider itself (e.g. serlo_spider) - playwright_dict = await WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], - engine=WebEngine.Playwright) - screenshot_bytes = playwright_dict.get("screenshot_bytes") - img = Image.open(BytesIO(screenshot_bytes)) + if response is None: + if settings.get("DISABLE_SPLASH") is False: + log.error( + "Neither thumbnail or technical.location (and technical.format) provided! Please provide at least one of them" + ) + else: + try: + if response.headers["Content-Type"] == "image/svg+xml": + if len(response.content) > settings.get("THUMBNAIL_MAX_SIZE"): + raise Exception( + "SVG images can't be converted, and the given image exceeds the maximum allowed size (" + + str(len(response.content)) + + " > " + + str(settings.get("THUMBNAIL_MAX_SIZE")) + + ")" + ) + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = response.headers["Content-Type"] + item["thumbnail"]["small"] = base64.b64encode( + response.content + ).decode() + else: + img = Image.open(BytesIO(response.content)) self.create_thumbnails_from_image_bytes(img, item, settings) + except Exception as e: + if url is not None: + log.warning( + "Could not read thumbnail at " + + url + + ": " + + str(e) + + " (falling back to screenshot)" + ) + if "thumbnail" in item: + del item["thumbnail"] + return self.process_item(raw_item, spider) else: - if settings.get("DISABLE_SPLASH") is False: - log.warning( - "No thumbnail provided and SPLASH_URL was not configured for screenshots!" - ) - if response is None: - if settings.get("DISABLE_SPLASH") is False: - log.error( - "Neither thumbnail or technical.location (and technical.format) provided! Please provide at least one of them" + # item['thumbnail']={} + raise DropItem( + "No thumbnail provided or ressource was unavailable for fetching" ) - else: - try: - if response.headers["Content-Type"] == "image/svg+xml": - if len(response.content) > settings.get("THUMBNAIL_MAX_SIZE"): - raise Exception( - "SVG images can't be converted, and the given image exceeds the maximum allowed size (" - + str(len(response.content)) - + " > " - + str(settings.get("THUMBNAIL_MAX_SIZE")) - + ")" - ) - item["thumbnail"] = {} - item["thumbnail"]["mimetype"] = response.headers["Content-Type"] - item["thumbnail"]["small"] = base64.b64encode( - response.content - ).decode() - else: - img = Image.open(BytesIO(response.content)) - self.create_thumbnails_from_image_bytes(img, item, settings) - except Exception as e: - if url is not None: - log.warning( - "Could not read thumbnail at " - + url - + ": " - + str(e) - + " (falling back to screenshot)" - ) - if "thumbnail" in item: - del item["thumbnail"] - return self.process_item(raw_item, spider) - else: - # item['thumbnail']={} - raise DropItem( - "No thumbnail provided or ressource was unavailable for fetching" - ) - return raw_item + return raw_item # override the project settings with the given ones from the current spider # see PR 56 for details From fdbbd7f742613e31a34ded9827728f9280e68716 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Nov 2023 18:11:12 +0100 Subject: [PATCH 390/590] build: introduce 'async-lru'-package to dependencies - ports Python's built-in "functools.lru_cache"-function to asyncio - see: https://github.com/aio-libs/async-lru Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- poetry.lock | 16 +++++++++++++++- pyproject.toml | 1 + requirements.txt | 1 + 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 19ddf355..9dddab5a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,6 +38,20 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "attrs" version = "23.1.0" @@ -2168,4 +2182,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "bf8435b68ce1c2a04527168ca0fe451f5b136a877495babc69c10246be0f8ca4" +content-hash = "bfab694eeb1b66b9383643eabb65947057108c2c884e73bf01f210fdf54e784e" diff --git a/pyproject.toml b/pyproject.toml index fdfe773c..61904fe4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,6 +91,7 @@ trafilatura = "^1.6.1" babel = "2.13.1" langcodes = {extras = ["data"], version = "^3.3.0"} httpx = "^0.25.2" +async-lru = "2.0.4" [tool.poetry.group.dev.dependencies] diff --git a/requirements.txt b/requirements.txt index 285d7885..d9db2647 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ anyio==4.1.0 ; python_version >= "3.10" and python_version < "4.0" asgiref==3.7.2 ; python_version >= "3.10" and python_version < "4.0" +async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" attrs==23.1.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" babel==2.13.1 ; python_version >= "3.10" and python_version < "4.0" From 83e525e25678b2c3e991d1536af544ddfd086589 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 30 Nov 2023 18:32:10 +0100 Subject: [PATCH 391/590] feat: LRU-Cache for Thumbnail-URLs, drop 'httpx' in Thumbnail-Pipeline, improve Error-Handling feat/perf: use LRU cache for Thumbnail-Pipeline - feat: implements a cache for "thumbnail"-URls that discards 'least recently used' items first -- while debugging large crawlers it was observed that many website hosters serve generic placeholder-like images for items that don't have a unique thumbnail: by keeping the most commonly requested URLs (and their response) in a cache, we should be able to significantly reduce the amount of outgoing requests and traffic (and increase performance) change: replace 'httpx' in Thumbnail-Pipeline with Scrapy Requests for Splash - the screenshot and thumbnail pipeline worked previously in parallel to Scrapy's built-in scheduler, which comes with its own set of problems -- (ignoring the Scrapy scheduler means that we cannot control the load/traffic in a reasonable / responsible manner) feat: improve Splash Error-Handling for unsupported URLs - feat: use 'scrapy.FormRequest'-objects to handle Splash Requests (Splash is queried within the Scrapy Scheduler from now on) - feat: fallback to Playwright if Splash failed to retrieve Thumbnails -- if Splash fails to render a websites, it will set the splash_success flag to False and use Playwright instead -- Splash will be DEPRECATED in the future since it has proven itself more and more unreliable with modern web-pages - fix: several warnings in regard to shadowed variables --- converter/pipelines.py | 147 ++++++++++++++++++++++++++++------------- 1 file changed, 102 insertions(+), 45 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 124b555b..9aefc484 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -8,25 +8,30 @@ # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html import csv +import datetime import logging import re import time from abc import ABCMeta +from asyncio import Future from io import BytesIO from typing import BinaryIO, TextIO, Optional import dateparser import dateutil.parser -import httpx import isodate import scrapy import scrapy.crawler from PIL import Image +from async_lru import alru_cache from itemadapter import ItemAdapter from scrapy import settings from scrapy.exceptions import DropItem from scrapy.exporters import JsonItemExporter +from scrapy.http.request import NO_CALLBACK +from scrapy.utils.defer import maybe_deferred_to_future from scrapy.utils.project import get_project_settings +from twisted.internet.defer import Deferred from converter import env from converter.constants import * @@ -338,7 +343,6 @@ class ProcessThumbnailPipeline(BasicPipeline): """ generate thumbnails """ - _client_async = httpx.AsyncClient() @staticmethod def scale_image(img, max_size): @@ -363,35 +367,36 @@ async def process_item(self, raw_item, spider): -- alternatively, on-demand: use Playwright to take a screenshot, rescale and save (as above) """ item = ItemAdapter(raw_item) - response = None - url = None - settings = get_settings_for_crawler(spider) + response: scrapy.http.Response | None = None + url: str | None = None + settings_crawler = get_settings_for_crawler(spider) # checking if the (optional) attribute WEB_TOOLS exists: - web_tools = settings.get("WEB_TOOLS", WebEngine.Splash) + web_tools = settings_crawler.get("WEB_TOOLS", default=WebEngine.Splash) + _splash_success: bool = True # control flag flips to False if Splash can't handle a URL # if screenshot_bytes is provided (the crawler has already a binary representation of the image # the pipeline will convert/scale the given image if "screenshot_bytes" in item: # in case we are already using playwright in a spider, we can skip one additional HTTP Request by # accessing the (temporary available) "screenshot_bytes"-field img = Image.open(BytesIO(item["screenshot_bytes"])) - self.create_thumbnails_from_image_bytes(img, item, settings) + self.create_thumbnails_from_image_bytes(img, item, settings_crawler) # The final BaseItem data model doesn't use screenshot_bytes. # Therefore, we delete it after we're done with processing it del item["screenshot_bytes"] - - # a thumbnail (url) is given - we will try to fetch it from the url elif "thumbnail" in item: - url = item["thumbnail"] - try: - response = await self._client_async.get(url=url, follow_redirects=True, timeout=60) - log.debug( - "Loading thumbnail took " + str(response.elapsed.total_seconds()) + "s" - ) - except httpx.ConnectError: - # some website hosts are super slow or throttle connections - log.warning(f"Thumbnail-Pipeline failed to establish a connection with URL {url}") - except httpx.ReadError: - log.warning(f"Thumbnail-Pipeline could not read data from URL {url}") + # a thumbnail (url) is given - we will try to fetch it from the url + url: str = item["thumbnail"] + # ToDo: Log time before the request + time_start = datetime.datetime.now() + response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) + time_end = datetime.datetime.now() + log.debug(f"Loading thumbnail from {url} took {time_end - time_start}.") + # ToDo: log time after response + if response.status != 200: + log.debug(f"Thumbnail-Pipeline received unexpected response (status: {response.status}) from {url}") + # ToDo: Error-handling necessary + pass + log.debug(f"Thumbnail-URL-Cache after trying to query {url}: {self.download_thumbnail_url.cache_info()}") # nothing was given, we try to screenshot the page either via Splash or Playwright elif ( "location" in item["lom"]["technical"] @@ -399,58 +404,84 @@ async def process_item(self, raw_item, spider): and "format" in item["lom"]["technical"] and item["lom"]["technical"]["format"] == "text/html" ): - if settings.get("SPLASH_URL") and web_tools == WebEngine.Splash: - response = await self._client_async.post( - settings.get("SPLASH_URL") + "/render.png", - json={ - "url": item["lom"]["technical"]["location"][0], - # since there can be multiple "technical.location"-values, the first URL is used for thumbnails - "wait": settings.get("SPLASH_WAIT"), - "html5_media": 1, - "headers": settings.get("SPLASH_HEADERS"), - }, - timeout=30, + if settings_crawler.get("SPLASH_URL") and web_tools == WebEngine.Splash: + target_url: str = item["lom"]["technical"]["location"][0] + _splash_url: str = f"{settings_crawler.get('SPLASH_URL')}/render.png" + _splash_parameter_wait: str = f"{settings_crawler.get('SPLASH_WAIT')}" + _splash_parameter_html5media: str = str(1) + _splash_headers: dict = settings_crawler.get("SPLASH_HEADERS") + _splash_dict: dict = { + "url": target_url, + "wait": _splash_parameter_wait, + "html5_media": _splash_parameter_wait, + "headers": _splash_headers + } + request_splash = scrapy.FormRequest( + url=_splash_url, + formdata=_splash_dict, + callback=NO_CALLBACK + ) + splash_response: scrapy.http.Response = await maybe_deferred_to_future( + spider.crawler.engine.download(request_splash) ) - if env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright: + if splash_response and splash_response.status != 200: + log.debug(f"SPLASH could not handle the requested website. " + f"(Splash returned HTTP Status {splash_response.status} for {target_url} !)") + _splash_success = False + # ToDo: Error-Handling for unsupported URLs + if splash_response.status == 415: + log.debug(f"SPLASH (HTTP Status {splash_response.status} -> Unsupported Media Type): " + f"Could not render target url {target_url}") + elif splash_response: + response: scrapy.http.Response = splash_response + else: + # ToDo: if Splash error's out -> Fallback to Playwright? + log.debug(f"SPLASH returned {splash_response.status} for {target_url} ") + + if (_splash_success is False and env.get("PLAYWRIGHT_WS_ENDPOINT") + or env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright): # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, # it will default back to "splash" # this edge-case is necessary for spiders that only need playwright to gather a screenshot, # but don't use playwright within the spider itself (e.g. serlo_spider) - playwright_dict = await WebTools.getUrlData(url=item["lom"]["technical"]["location"][0], + # ToDo: change to scrapy.FormRequest? + target_url: str = item["lom"]["technical"]["location"][0] + playwright_dict = await WebTools.getUrlData(url=target_url, engine=WebEngine.Playwright) screenshot_bytes = playwright_dict.get("screenshot_bytes") img = Image.open(BytesIO(screenshot_bytes)) - self.create_thumbnails_from_image_bytes(img, item, settings) + self.create_thumbnails_from_image_bytes(img, item, settings_crawler) else: - if settings.get("DISABLE_SPLASH") is False: + if settings_crawler.get("DISABLE_SPLASH") is False: log.warning( "No thumbnail provided and SPLASH_URL was not configured for screenshots!" ) if response is None: - if settings.get("DISABLE_SPLASH") is False: + if settings_crawler.get("DISABLE_SPLASH") is False: log.error( - "Neither thumbnail or technical.location (and technical.format) provided! Please provide at least one of them" + "Neither thumbnail or technical.location (and technical.format) provided! " + "Please provide at least one of them" ) else: try: if response.headers["Content-Type"] == "image/svg+xml": - if len(response.content) > settings.get("THUMBNAIL_MAX_SIZE"): + if len(response.body) > settings_crawler.get("THUMBNAIL_MAX_SIZE"): raise Exception( "SVG images can't be converted, and the given image exceeds the maximum allowed size (" - + str(len(response.content)) + + str(len(response.body)) + " > " - + str(settings.get("THUMBNAIL_MAX_SIZE")) + + str(settings_crawler.get("THUMBNAIL_MAX_SIZE")) + ")" ) item["thumbnail"] = {} item["thumbnail"]["mimetype"] = response.headers["Content-Type"] item["thumbnail"]["small"] = base64.b64encode( - response.content + response.body ).decode() else: - img = Image.open(BytesIO(response.content)) - self.create_thumbnails_from_image_bytes(img, item, settings) + img = Image.open(BytesIO(response.body)) + self.create_thumbnails_from_image_bytes(img, item, settings_crawler) except Exception as e: if url is not None: log.warning( @@ -466,10 +497,36 @@ async def process_item(self, raw_item, spider): else: # item['thumbnail']={} raise DropItem( - "No thumbnail provided or ressource was unavailable for fetching" + "No thumbnail provided or resource was unavailable for fetching" ) return raw_item + @alru_cache(maxsize=128) + async def download_thumbnail_url(self, url: str, spider: scrapy.Spider): + """ + Download a thumbnail URL and **caches** the result. + + The cache works similarly to Python's built-in `functools.lru_cache`-decorator and discards the + least recently used items first. + (see: https://github.com/aio-libs/async-lru) + + Typical use-case: + Some webhosters serve generic placeholder images as their default thumbnail. + By caching the response of such URLs, we can save a significant amount of time and traffic. + + :param spider: The spider process that collected the URL. + :param url: URL of a thumbnail/image. + :return: Response or None + """ + try: + request = scrapy.Request(url=url, callback=NO_CALLBACK) + response: Deferred | Future = await maybe_deferred_to_future( + spider.crawler.engine.download(request) + ) + return response + except ValueError: + log.debug(f"Thumbnail-Pipeline received an invalid URL: {url}") + # override the project settings with the given ones from the current spider # see PR 56 for details @@ -498,7 +555,7 @@ def create_thumbnails_from_image_bytes(self, image, item, settings): ).decode() -def get_settings_for_crawler(spider): +def get_settings_for_crawler(spider) -> scrapy.settings.Settings: all_settings = get_project_settings() crawler_settings = settings.BaseSettings(getattr(spider, "custom_settings") or {}, 'spider') if type(crawler_settings) == dict: From 0aafa967395398e1d355171ce326fde5a8780244 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Dec 2023 12:23:00 +0100 Subject: [PATCH 392/590] feat: control async WebTools with Semaphores - by limiting concurrent access to 'getUrlData'-method by using two semaphores (one for Splash, one for Playwright), we can increase crawler performance without running into PoolTimeout Exceptions (which typically happen when too many requests are fired at once) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index 571ebbfb..b1e599d8 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -1,4 +1,3 @@ -import asyncio import json from asyncio import Semaphore from enum import Enum @@ -19,16 +18,27 @@ class WebEngine(Enum): class WebTools: - @staticmethod - async def getUrlData(url: str, engine=WebEngine.Splash): - sem: Semaphore = asyncio.Semaphore(value=10) - # the headless browser can only handle 5 concurrent sessions and 5 items in the queue by default - async with sem: - if engine == WebEngine.Splash: - return await WebTools.__getUrlDataSplash(url) - elif engine == WebEngine.Playwright: - return await WebTools.__getUrlDataPlaywright(url) + _sem_splash: Semaphore = Semaphore(10) + _sem_playwright: Semaphore = Semaphore(10) + + @classmethod + async def __safely_get_splash_response(cls, url: str): + # ToDo: Docs + async with cls._sem_splash: + return await WebTools.__getUrlDataSplash(url) + + @classmethod + async def __safely_get_playwright_response(cls, url: str): + # ToDo: Docs + async with cls._sem_playwright: + return await WebTools.__getUrlDataPlaywright(url) + @classmethod + async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Splash): + if engine == WebEngine.Splash: + return await cls.__safely_get_splash_response(url) + elif engine == WebEngine.Playwright: + return await cls.__safely_get_playwright_response(url) raise Exception("Invalid engine") @staticmethod From be2e6d781b5e1fa6ee066cbba80d44875d6932d7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Dec 2023 13:51:47 +0100 Subject: [PATCH 393/590] change: disable timeouts, use Semaphore - work-in-progress: for now, 25 seems to be a reasonable limit Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 70e096c0..02150a45 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -1,8 +1,10 @@ +import asyncio import base64 import json import logging import time import uuid +from asyncio import Semaphore from enum import Enum from typing import List @@ -107,7 +109,8 @@ class CreateGroupType(Enum): nodeApi: NODEV1Api groupCache: List[str] enabled: bool - client_async = httpx.AsyncClient() + _client_async = httpx.AsyncClient() + _sem: Semaphore = asyncio.Semaphore(25) def __init__(self): cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) @@ -162,14 +165,14 @@ def sync_node(self, spider, type, properties): async def set_node_text(self, uuid, item) -> bool: if "fulltext" in item: - response = await self.client_async.post( + response = await self._client_async.post( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid + "/textContent?mimetype=text/plain", headers=self.get_headers("multipart/form-data"), data=item["fulltext"].encode("utf-8"), - timeout=30, + timeout=None, ) return response.status_code == 200 # does currently not store data @@ -203,7 +206,7 @@ async def set_node_binary_data(self, uuid, item) -> bool: + item["lom"]["technical"]["format"] ) files = {"file": item["binary"]} - response = await self.client_async.post( + response = await self._client_async.post( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid @@ -211,7 +214,7 @@ async def set_node_binary_data(self, uuid, item) -> bool: + item["lom"]["technical"]["format"], headers=self.get_headers(None), files=files, - timeout=30, + timeout=None, ) return response.status_code == 200 else: @@ -222,7 +225,7 @@ async def set_node_preview(self, uuid, item) -> bool: key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None if key: files = {"image": base64.b64decode(item["thumbnail"][key])} - response = await self.client_async.post( + response = await self._client_async.post( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid @@ -230,7 +233,7 @@ async def set_node_preview(self, uuid, item) -> bool: + item["thumbnail"]["mimetype"], headers=self.get_headers(None), files=files, - timeout=30, + timeout=None, ) return response.status_code == 200 else: @@ -620,11 +623,14 @@ def set_node_permissions(self, uuid, item): logging.error(item["permissions"]) async def insert_item(self, spider, uuid, item): - node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) - self.set_node_permissions(node["ref"]["id"], item) - await self.set_node_preview(node["ref"]["id"], item) - if not await self.set_node_binary_data(node["ref"]["id"], item): - await self.set_node_text(node["ref"]["id"], item) + async with self._sem: + # inserting items is controlled with a Semaphore, otherwise we'd get PoolTimeout Exceptions when there's a + # temporary burst of items that need to be inserted + node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) + self.set_node_permissions(node["ref"]["id"], item) + await self.set_node_preview(node["ref"]["id"], item) + if not await self.set_node_binary_data(node["ref"]["id"], item): + await self.set_node_text(node["ref"]["id"], item) async def update_item(self, spider, uuid, item): await self.insert_item(spider, uuid, item) From 04b173924c958966e25d0a2c16a88433aacaacde Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Dec 2023 16:03:24 +0100 Subject: [PATCH 394/590] fix: processThumbnailPipeline returned coroutine instead of item - fix: during SVG handling the "Content-Type"-field of "response.body" is a bytes-object, but we compared it to a string value -- feat: made sure that we also typecheck the "mimetype" (and convert it if needed) before trying to save it to its 'thumbnail.mimetype'-field - change: initiate "splash"-success-flag with None instead of True -- this should give a better indication if the Thumbnail-Pipeline tried to use Splash up until a specific point in time --- converter/pipelines.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 9aefc484..a815204b 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -372,7 +372,7 @@ async def process_item(self, raw_item, spider): settings_crawler = get_settings_for_crawler(spider) # checking if the (optional) attribute WEB_TOOLS exists: web_tools = settings_crawler.get("WEB_TOOLS", default=WebEngine.Splash) - _splash_success: bool = True # control flag flips to False if Splash can't handle a URL + _splash_success: bool | None = None # control flag flips to False if Splash can't handle a URL # if screenshot_bytes is provided (the crawler has already a binary representation of the image # the pipeline will convert/scale the given image if "screenshot_bytes" in item: @@ -386,12 +386,10 @@ async def process_item(self, raw_item, spider): elif "thumbnail" in item: # a thumbnail (url) is given - we will try to fetch it from the url url: str = item["thumbnail"] - # ToDo: Log time before the request time_start = datetime.datetime.now() response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) time_end = datetime.datetime.now() log.debug(f"Loading thumbnail from {url} took {time_end - time_start}.") - # ToDo: log time after response if response.status != 200: log.debug(f"Thumbnail-Pipeline received unexpected response (status: {response.status}) from {url}") # ToDo: Error-handling necessary @@ -428,24 +426,22 @@ async def process_item(self, raw_item, spider): log.debug(f"SPLASH could not handle the requested website. " f"(Splash returned HTTP Status {splash_response.status} for {target_url} !)") _splash_success = False - # ToDo: Error-Handling for unsupported URLs + # ToDo (optional): more granular Error-Handling for unsupported URLs? if splash_response.status == 415: log.debug(f"SPLASH (HTTP Status {splash_response.status} -> Unsupported Media Type): " f"Could not render target url {target_url}") elif splash_response: response: scrapy.http.Response = splash_response else: - # ToDo: if Splash error's out -> Fallback to Playwright? - log.debug(f"SPLASH returned {splash_response.status} for {target_url} ") + log.debug(f"SPLASH returned HTTP Status {splash_response.status} for {target_url} ") - if (_splash_success is False and env.get("PLAYWRIGHT_WS_ENDPOINT") + if (_splash_success and _splash_success is False and env.get("PLAYWRIGHT_WS_ENDPOINT") or env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright): # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, # it will default back to "splash" # this edge-case is necessary for spiders that only need playwright to gather a screenshot, # but don't use playwright within the spider itself (e.g. serlo_spider) - # ToDo: change to scrapy.FormRequest? target_url: str = item["lom"]["technical"]["location"][0] playwright_dict = await WebTools.getUrlData(url=target_url, engine=WebEngine.Playwright) @@ -465,7 +461,7 @@ async def process_item(self, raw_item, spider): ) else: try: - if response.headers["Content-Type"] == "image/svg+xml": + if response.headers["Content-Type"] == b"image/svg+xml": if len(response.body) > settings_crawler.get("THUMBNAIL_MAX_SIZE"): raise Exception( "SVG images can't be converted, and the given image exceeds the maximum allowed size (" @@ -475,7 +471,11 @@ async def process_item(self, raw_item, spider): + ")" ) item["thumbnail"] = {} - item["thumbnail"]["mimetype"] = response.headers["Content-Type"] + _mimetype: bytes = response.headers["Content-Type"] + if _mimetype and isinstance(_mimetype, bytes): + item["thumbnail"]["mimetype"] = _mimetype.decode() + elif _mimetype and isinstance(_mimetype, str): + item["thumbnail"]["mimetype"] = _mimetype item["thumbnail"]["small"] = base64.b64encode( response.body ).decode() @@ -493,7 +493,7 @@ async def process_item(self, raw_item, spider): ) if "thumbnail" in item: del item["thumbnail"] - return self.process_item(raw_item, spider) + return await self.process_item(raw_item, spider) else: # item['thumbnail']={} raise DropItem( From 80fb90c6af56dcd4721c1d80b3ddc689bcb87bfc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Dec 2023 17:19:03 +0100 Subject: [PATCH 395/590] build: upgrade to browserless v2 - change: observed connection timeouts in both v1 and v2, therefore changed the timeout setting to 60s (from default: 30s) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 8827408a..fce6841d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,10 +19,10 @@ services: retries: 3 start_period: 40s headless_chrome: - image: browserless/chrome + image: ghcr.io/browserless/chrome restart: always environment: - - "DEFAULT_LAUNCH_ARGS:[\"--disable-dev-shm-usage\"]" + - CONNECTION_TIMEOUT=60000 ports: - "127.0.0.1:3000:3000" networks: From f9ec308f6010384f5ac59ad6540d06c9a025cf87 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Dec 2023 19:09:56 +0100 Subject: [PATCH 396/590] change: WebTools use 'Playwright' by default - feat: set WebTools (Playwright) default to "trafilatura" -- when extracting data with Playwright, we'll try to use "trafilatura" for fulltext extraction first and only fall back to html2text if trafilatura returned None - docs: clean up ToDos, add TypeHints --- converter/web_tools.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index b1e599d8..ea691bae 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -4,6 +4,7 @@ import html2text import httpx +import trafilatura from playwright.async_api import async_playwright from scrapy.utils.project import get_project_settings @@ -34,7 +35,7 @@ async def __safely_get_playwright_response(cls, url: str): return await WebTools.__getUrlDataPlaywright(url) @classmethod - async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Splash): + async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Playwright): if engine == WebEngine.Splash: return await cls.__safely_get_splash_response(url) elif engine == WebEngine.Playwright: @@ -44,10 +45,17 @@ async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Splash): @staticmethod async def __getUrlDataPlaywright(url: str): playwright_dict = await WebTools.fetchDataPlaywright(url) - html = playwright_dict.get("content") - screenshot_bytes = playwright_dict.get("screenshot_bytes") + html: str = playwright_dict.get("content") + screenshot_bytes: bytes = playwright_dict.get("screenshot_bytes") + fulltext: str = WebTools.html2Text(html) + if html and isinstance(html, str): + html_bytes: bytes = html.encode() + trafilatura_text: str | None = trafilatura.extract(html_bytes) + if trafilatura_text: + # trafilatura text extraction is (in general) more precise than html2Text, so we'll use it if available + fulltext = trafilatura_text return {"html": html, - "text": WebTools.html2Text(html), + "text": fulltext, "cookies": None, "har": None, "screenshot_bytes": screenshot_bytes} @@ -99,10 +107,10 @@ async def fetchDataPlaywright(url: str): # since waiting for 'networkidle' seems to cause timeouts content = await page.content() screenshot_bytes = await page.screenshot() - # ToDo: HAR / text / cookies - # if we are able to replicate the Splash response with all its fields, we could save traffic/Requests - # that are currently still being handled by Splash - # await page.close() + # ToDo: HAR / cookies + # if we are able to replicate the Splash response with all its fields, + # we could save traffic/requests that are currently still being handled by Splash + # see: https://playwright.dev/python/docs/api/class-browsercontext#browser-context-cookies return { "content": content, "screenshot_bytes": screenshot_bytes From 7dcf2a74e804530ed770cf1bb2a7a209beed43c9 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Dec 2023 15:13:42 +0100 Subject: [PATCH 397/590] feat: fallback to Playwright screenshot on failed Splash or Thumbnail URL - if Splash fails to render a website or a thumbnail URL couldn't be downloaded due to unexpected HTTP Status Codes, try to grab a screenshot of the website as a last resort / fallback Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index a815204b..59a01e6c 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -373,7 +373,9 @@ async def process_item(self, raw_item, spider): # checking if the (optional) attribute WEB_TOOLS exists: web_tools = settings_crawler.get("WEB_TOOLS", default=WebEngine.Splash) _splash_success: bool | None = None # control flag flips to False if Splash can't handle a URL - # if screenshot_bytes is provided (the crawler has already a binary representation of the image + _thumbnail_url_success: bool | None = None # flips to False if there was an error during thumbnail download + + # if screenshot_bytes is provided (the crawler has already a binary representation of the image, # the pipeline will convert/scale the given image if "screenshot_bytes" in item: # in case we are already using playwright in a spider, we can skip one additional HTTP Request by @@ -392,9 +394,9 @@ async def process_item(self, raw_item, spider): log.debug(f"Loading thumbnail from {url} took {time_end - time_start}.") if response.status != 200: log.debug(f"Thumbnail-Pipeline received unexpected response (status: {response.status}) from {url}") - # ToDo: Error-handling necessary - pass - log.debug(f"Thumbnail-URL-Cache after trying to query {url}: {self.download_thumbnail_url.cache_info()}") + _thumbnail_url_success = False + # flipping the thumbnail flag to False triggers a website screenshot by Playwright (fallback) + log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") # nothing was given, we try to screenshot the page either via Splash or Playwright elif ( "location" in item["lom"]["technical"] @@ -435,13 +437,17 @@ async def process_item(self, raw_item, spider): else: log.debug(f"SPLASH returned HTTP Status {splash_response.status} for {target_url} ") - if (_splash_success and _splash_success is False and env.get("PLAYWRIGHT_WS_ENDPOINT") - or env.get("PLAYWRIGHT_WS_ENDPOINT") and web_tools == WebEngine.Playwright): - # if the attribute "WEB_TOOLS" doesn't exist as an attribute within a specific spider, - # it will default back to "splash" + playwright_websocket_endpoint: str | None = env.get("PLAYWRIGHT_WS_ENDPOINT") + if (not bool(_splash_success) and playwright_websocket_endpoint + or not bool(_thumbnail_url_success) and playwright_websocket_endpoint + or playwright_websocket_endpoint and web_tools == WebEngine.Playwright): + # we're using Playwright to take a website screenshot if: + # - the spider explicitly defined Playwright in its 'custom_settings'-dict + # - or: Splash failed to render a website (= fallback) + # - or: the thumbnail URL could not be downloaded (= fallback) # this edge-case is necessary for spiders that only need playwright to gather a screenshot, - # but don't use playwright within the spider itself (e.g. serlo_spider) + # but don't use playwright within the spider itself target_url: str = item["lom"]["technical"]["location"][0] playwright_dict = await WebTools.getUrlData(url=target_url, engine=WebEngine.Playwright) From 4bae38f66f021bd39c5c14529254137d87f08e77 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:13:04 +0100 Subject: [PATCH 398/590] change: browserless/chrome 'timeout'-setting to 120s - during testing/debugging it was observed that even the lenient timeout of 60s (default: 30s) per job is oftentimes not enough time for some website responses Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index fce6841d..bc11f76e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,7 +22,7 @@ services: image: ghcr.io/browserless/chrome restart: always environment: - - CONNECTION_TIMEOUT=60000 + - TIMEOUT=120000 ports: - "127.0.0.1:3000:3000" networks: From 8b7541fc8313c06ce68b8160c12a3698e23878e5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:24:53 +0100 Subject: [PATCH 399/590] change: LomBase parse / mapResponse / getUrlData methods to async - change methods to async where necessary to await the coroutines of WebTools and be able to work with their data - fix/optimize imports Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/lom_base.py | 29 ++++++++++++---------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 545e05a0..9ee6a105 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -1,8 +1,6 @@ -import html2text import logging from scrapy import settings -from scrapy.utils.project import get_project_settings from converter.constants import Constants from converter.es_connector import EduSharing @@ -87,7 +85,7 @@ def hasChanged(self, response=None) -> bool: def shouldImport(self, response=None) -> bool: return True - def parse(self, response): + async def parse(self, response): if self.shouldImport(response) is False: logging.debug( "Skipping entry {} because shouldImport() returned false".format(str(self.getId(response))) @@ -102,7 +100,8 @@ def parse(self, response): main.add_value("license", self.getLicense(response).load_item()) main.add_value("permissions", self.getPermissions(response).load_item()) # logging.debug(main.load_item()) - main.add_value("response", self.mapResponse(response).load_item()) + response_itemloader = await self.mapResponse(response) + main.add_value("response", response_itemloader.load_item()) return main.load_item() # @deprecated @@ -112,21 +111,25 @@ def html2Text(self, html): # @deprecated # directly use WebTools instead - def getUrlData(self, url): - return WebTools.getUrlData(url) + async def getUrlData(self, url): + return await WebTools.getUrlData(url) - def mapResponse(self, response, fetchData=True): + async def mapResponse(self, response, fetchData=True): r = ResponseItemLoader(response=response) r.add_value("status", response.status) # r.add_value('body',response.body.decode('utf-8')) - # render via splash to also get the full javascript rendered content. if fetchData: - data = self.getUrlData(response.url) - r.add_value("html", data["html"]) - r.add_value("text", data["text"]) - r.add_value("cookies", data["cookies"]) - r.add_value("har", data["har"]) + # render via splash or playwright to also get the full javascript rendered content. + data = await self.getUrlData(response.url) + if "html" in data: + r.add_value("html", data["html"]) + if "text" in data: + r.add_value("text", data["text"]) + if "cookies" in data: + r.add_value("cookies", data["cookies"]) + if "har" in data: + r.add_value("har", data["har"]) r.add_value("headers", response.headers) r.add_value("url", self.getUri(response)) return r From e5ede09ccb627dd196b2ace1cbedc67fb5cc77b3 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Dec 2023 16:34:52 +0100 Subject: [PATCH 400/590] fobizz_spider v0.0.5 - change: set default WebEngine to Playwright (Splash isn't able to render most webpages anymore) - change: make 'parse'-method async - fix: drop items where 'jslde' cannot extract a JSON-LD (because it doesn exist) -- this happens typically on "overview"-pages Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/fobizz_spider.py | 48 +++++++++++++++++++++--------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/converter/spiders/fobizz_spider.py b/converter/spiders/fobizz_spider.py index a1688c9b..9032d6ae 100644 --- a/converter/spiders/fobizz_spider.py +++ b/converter/spiders/fobizz_spider.py @@ -1,16 +1,25 @@ from __future__ import annotations +import logging from urllib import parse import scrapy from extruct.jsonld import JsonLdExtractor from converter.constants import Constants -from converter.items import LomGeneralItemloader, LomBaseItemloader, LomTechnicalItemLoader, \ - LicenseItemLoader, ResponseItemLoader, LomEducationalItemLoader, ValuespaceItemLoader, \ - LomLifecycleItemloader +from converter.items import ( + LomGeneralItemloader, + LomBaseItemloader, + LomTechnicalItemLoader, + LicenseItemLoader, + ResponseItemLoader, + LomEducationalItemLoader, + ValuespaceItemLoader, + LomLifecycleItemloader, +) from converter.spiders.base_classes import LomBase from converter.util.sitemap import SitemapEntry, from_xml_response +from converter.web_tools import WebEngine jslde = JsonLdExtractor() @@ -21,9 +30,10 @@ class FobizzSpider(scrapy.Spider, LomBase): https://plattform.fobizz.com/sitemap """ - start_urls = ['https://plattform.fobizz.com/sitemap'] - name = 'fobizz_spider' - version = '0.0.4' # last update: 2023-08-12 + start_urls = ["https://plattform.fobizz.com/sitemap"] + name = "fobizz_spider" + version = "0.0.5" # last update: 2023-12-06 + custom_settings = {"WEB_TOOLS": WebEngine.Playwright} overview_pages_without_a_json_ld = [ "https://plattform.fobizz.com/unterrichtsmaterialien/faecher/Religion", @@ -81,7 +91,7 @@ def getId(self, response: scrapy.http.Response = None) -> str: def getHash(self, response: scrapy.http.Response = None) -> str: return response.meta["sitemap_entry"].lastmod + self.version - def parse(self, response: scrapy.http.XmlResponse, **kwargs): + async def parse(self, response: scrapy.http.XmlResponse, **kwargs): """ one url element usually looks like this: @@ -106,15 +116,25 @@ def parse(self, response: scrapy.http.XmlResponse, **kwargs): if self.hasChanged: yield response.follow(item.loc, callback=self.parse_site, cb_kwargs={'sitemap_entry': item}) - def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): - # extract the jsonld - data = jslde.extract(response.text)[0] - response.meta['sitemap_entry'] = sitemap_entry + async def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): + # extract the JSON-LD + json_ld_extract: list[dict] = jslde.extract(response.text) + if json_ld_extract and isinstance(json_ld_extract, list): + data = json_ld_extract[0] + else: + logging.warning(f"'jslde' could not parse JSON-LD for item {response.url} . Dropping Item.") + return + + response.meta["sitemap_entry"] = sitemap_entry base = super().getBase(response=response) - base.add_value("response", super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value("response", response_itemloader.load_item()) # we assume that content is imported. Please use replace_value if you import something different - base.add_value('thumbnail', data.get("thumbnailUrl", None)) - base.add_value('lastModified', data.get("dateModified", None)) + thumbnail_url: str | None = data.get("thumbnailUrl", None) + if thumbnail_url and isinstance(thumbnail_url, str): + # do not fill the 'thumbnail'-field with None -> this would cause unnecessary Splash/Playwright requests + base.add_value("thumbnail", thumbnail_url) + base.add_value("lastModified", data.get("dateModified", None)) for publisher in data.get("publisher", []): # TODO add type, e.g. organization base.add_value("publisher", publisher.get("name")) From 630c934bc2a09f11d13b89588af470e589aed958 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 5 Dec 2023 13:30:34 +0100 Subject: [PATCH 401/590] rpi_virtuell_spider v0.0.7 - fix: "license.url" sometimes contained values intended for the "license.internal"-field - cleanup: since v0.0.6 replaced the method how licenses are detected, removed all old/obsolete code fragments -- removed RegEx from imports - cleanup: optimize imports and fix weak warnings -- fix: TypeHint Warning --- converter/spiders/rpi_virtuell_spider.py | 27 ++++++------------------ 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/converter/spiders/rpi_virtuell_spider.py b/converter/spiders/rpi_virtuell_spider.py index d0c20908..a94a5eb2 100644 --- a/converter/spiders/rpi_virtuell_spider.py +++ b/converter/spiders/rpi_virtuell_spider.py @@ -1,4 +1,3 @@ -import html import re from typing import Optional @@ -22,7 +21,7 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): friendlyName = "rpi-virtuell" start_urls = ['https://material.rpi-virtuell.de/wp-json/mymaterial/v1/material/'] - version = "0.0.6" + version = "0.0.7" custom_settings = { 'ROBOTSTXT_OBEY': False, @@ -68,12 +67,6 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): # rpi-virtuell has clarified their license-description: # 'Zur Wiederverwendung und Veränderung gekennzeichnet' can be both CC_BY and CC_BY_SA # since CC_BY_SA is slightly more restricting, we choose this mapping rather than the more liberal CC_BY - mapping_copyright = { - 'Zur Wiederverwendung und Veränderung gekennzeichnet': Constants.LICENSE_CC_BY_SA_40, - 'Zur nicht kommerziellen Wiederverwendung gekennzeichnet': Constants.LICENSE_CC_BY_NC_ND_40, - 'Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet': Constants.LICENSE_CC_BY_NC_SA_30, - } - mapping_copyright_url = { '?fwp_lizenz=non-commercial-remixable': Constants.LICENSE_CC_BY_NC_SA_30, '?fwp_lizenz=non-commercial-copyable': Constants.LICENSE_CC_BY_NC_ND_40, @@ -148,7 +141,7 @@ def start_requests(self): elif (url.split('/')[-2] == 'material') and (url.split('/') != ''): yield scrapy.Request(url=url, callback=self.parse) - def parse(self, response, **kwargs): + def parse(self, response: scrapy.http.TextResponse, **kwargs): """ Checks how many pages need to be parsed with the currently set parameters (per_page items) first then yields all following scrapy.http.Requests that are needed to iterate through all wp_json pages. @@ -274,7 +267,6 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) # logging.debug("DEBUG inside get_metadata_from_review_url: response type = ", type(response), # "url =", response.url) - base = BaseItemLoader() base.add_value("sourceId", response.url) date_modified: str = response.xpath('//meta[@property="og:article:modified_time"]/@content').get() @@ -371,26 +363,21 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) lic = LicenseItemLoader() - license_regex_nc_reuse = re.compile(r'Zur nicht kommerziellen Wiederverwendung gekennzeichnet') - license_regex_nc_reuse_and_change = re.compile( - r'Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet') - # important clarification from rpi-virtuell: # 'frei zugänglich' describes 'ungeklärte Lizenz' / 'volles Urheberrecht' # CC licenses > 'frei zugänglich' if both values are found in the license description - license_regex_free_access = re.compile(r'frei zugänglich') - license_regex_free_after_signup = re.compile(r'kostenfrei nach Anmeldung') - license_regex_with_costs = re.compile(r'kostenpflichtig') for key in self.mapping_copyright_url: if response.xpath('//a[contains(@href,"' + key + '")]').get(): - lic.add_value("url", self.mapping_copyright_url[key]) + # the mapping table holds "INTERNAL"-constants, which need to be saved to another field than urls: + if self.mapping_copyright_url[key] == Constants.LICENSE_COPYRIGHT_LAW: + lic.add_value("internal", self.mapping_copyright_url[key]) + else: + lic.add_value("url", self.mapping_copyright_url[key]) break # by default, all materials should be CC_BY_SA - according to the rpi-virtuell ToS # changed/decided on 2022-10-13: We can't assume that this license is correct and will not set any license - #lic.replace_value("url", Constants.LICENSE_CC_BY_SA_40) - if response.xpath('//a[contains(@href,"' + "?fwp_verfuegbarkeit=kostenpflichtig" + '")]').get(): vs.add_value("price", "yes") From 9715f1aec4b03414d4bbc9aa20cef1f933c8a5ce Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 5 Dec 2023 13:37:26 +0100 Subject: [PATCH 402/590] fix: mapping for COPYRIGHT_LAW / COPYRIGHT_FREE ('license.internal') - docs: add explanations with regard to how these 'internal'-values are displayed in the edu-sharing frontend Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/constants.py | 3 ++- converter/es_connector.py | 2 +- converter/util/test_license_mapper.py | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/converter/constants.py b/converter/constants.py index b4ec9dce..0c9cbe30 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -88,7 +88,8 @@ class Constants: "PDM": [LICENSE_PDM], } - LICENSE_COPYRIGHT_LAW: Final[str] = "COPYRIGHT_FREE" + LICENSE_COPYRIGHT_FREE: Final[str] = "COPYRIGHT_FREE" # edu-sharing Frontend: "Copyright, freier Zugang" + LICENSE_COPYRIGHT_LAW: Final[str] = "COPYRIGHT_LICENSE" # edu-sharing Frontend: "Copyright, lizenzpflichtig" LICENSE_CUSTOM: Final[str] = "CUSTOM" # Custom License, use the license description field for arbitrary values LICENSE_NONPUBLIC: Final[str] = "NONPUBLIC" LICENSE_SCHULFUNK: Final[str] = "SCHULFUNK" # "Schulfunk (§47 UrhG)" diff --git a/converter/es_connector.py b/converter/es_connector.py index 02150a45..27d4f97b 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -319,7 +319,7 @@ def map_license(self, spaces, license): ) if "internal" in license: match license["internal"]: - case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" | Constants.LICENSE_COPYRIGHT_LAW | Constants.LICENSE_SCHULFUNK | Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN: + case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" | Constants.LICENSE_COPYRIGHT_FREE | Constants.LICENSE_COPYRIGHT_LAW | Constants.LICENSE_SCHULFUNK | Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN: spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 54ddefc1..4263a895 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -54,6 +54,7 @@ def test_get_license_url(self, test_input, expected_result): [ ("Copyright Zweites Deutsches Fernsehen, ZDF", Constants.LICENSE_COPYRIGHT_LAW), (" © ", Constants.LICENSE_COPYRIGHT_LAW), + # ToDo: find valid test-cases for LICENSE.COPYRIGHT_FREE # ToDo: regularly check if new enums for the 'internal' field need to be added here or in Constants.py ("jemand erwähnt CC0 in einem Freitext", "CC_0"), ("CC-0", "CC_0"), From 8bd4ae7143de368c4ee1017b816d2b71de596d38 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Dec 2023 19:05:12 +0100 Subject: [PATCH 403/590] change: async-await "mapResponse"- and "parse"-methods - if necessary, also change 'parse'-method to async Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/edu_sharing_base.py | 8 ++++---- .../base_classes/lernprogramme_spider_base.py | 4 ++-- converter/spiders/base_classes/lrmi_base.py | 4 ++-- converter/spiders/base_classes/mediawiki_base.py | 8 ++++---- converter/spiders/base_classes/oai_base.py | 4 ++-- converter/spiders/base_classes/rss_base.py | 8 ++++---- converter/spiders/digitallearninglab_spider.py | 12 +++++++----- converter/spiders/dilertube_spider.py | 7 +++---- converter/spiders/dwu_spider.py | 7 ++++--- converter/spiders/edulabs_spider.py | 8 ++++---- converter/spiders/geogebra_spider.py | 4 ++-- converter/spiders/ginkgomaps_spider.py | 9 +++++---- converter/spiders/grundschulkoenig_spider.py | 4 ++-- converter/spiders/kindoergarten_spider.py | 5 +++-- converter/spiders/kmap_spider.py | 5 +++-- converter/spiders/learning_apps_spider.py | 4 ++-- converter/spiders/leifi_spider.py | 4 ++-- converter/spiders/mediothek_pixiothek_spider.py | 4 ++-- converter/spiders/memucho_spider.py | 8 ++++---- converter/spiders/merlin_spider.py | 8 ++++---- converter/spiders/planet_schule_spider.py | 8 ++++---- converter/spiders/sample_spider.py | 4 ++-- converter/spiders/schule_im_aufbruch_spider.py | 4 ++-- converter/spiders/science_in_school_spider.py | 4 ++-- converter/spiders/segu_spider.py | 4 ++-- converter/spiders/tutory_spider.py | 9 +++++---- converter/spiders/umwelt_im_unterricht_spider.py | 10 +++++----- converter/spiders/youtube_spider.py | 8 ++++---- converter/spiders/zum_mathe_apps_spider.py | 6 +++--- converter/spiders/zum_physik_apps_spider.py | 7 ++++--- 30 files changed, 98 insertions(+), 91 deletions(-) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 42243b21..80a595f6 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -85,14 +85,14 @@ def getProperty(self, name, response): def start_requests(self): yield self.search() - def parse(self, response): + async def parse(self, response): data = json.loads(response.text) if len(data["nodes"]) > 0: for item in data["nodes"]: copyResponse = response.replace(url=item["content"]["url"]) copyResponse.meta["item"] = item if self.hasChanged(copyResponse): - yield LomBase.parse(self, copyResponse) + yield await LomBase.parse(self, copyResponse) yield self.search(data["pagination"]["from"] + data["pagination"]["count"]) def getBase(self, response): @@ -137,8 +137,8 @@ def getBase(self, response): return base # fulltext is handled in base, response is not necessary - def mapResponse(self, response, fetchData=True): - return LomBase.mapResponse(self, response, False) + async def mapResponse(self, response, fetchData=True): + return await LomBase.mapResponse(self, response, False) def getId(self, response=None) -> str: return response.meta["item"]["ref"]["id"] diff --git a/converter/spiders/base_classes/lernprogramme_spider_base.py b/converter/spiders/base_classes/lernprogramme_spider_base.py index 514e672b..f59e3848 100644 --- a/converter/spiders/base_classes/lernprogramme_spider_base.py +++ b/converter/spiders/base_classes/lernprogramme_spider_base.py @@ -65,7 +65,7 @@ def __init__(self, **kwargs): else None ) - def parse(self, response): + async def parse(self, response): reader = csv.DictReader( StringIO(response.text), # DictReader expects a file handle ["title", "description", "keywords", "thumbnail", "url", "width", "height"], @@ -76,7 +76,7 @@ def parse(self, response): row = self.map_row(row) response_copy = response.replace(url=row["url"]) response_copy.meta["row"] = row - yield self.loader.parse(response_copy) + yield await self.loader.parse(response_copy) if self.exercise_loader is not None: yield self.request_exercise(row) diff --git a/converter/spiders/base_classes/lrmi_base.py b/converter/spiders/base_classes/lrmi_base.py index 245e1e22..c281900c 100644 --- a/converter/spiders/base_classes/lrmi_base.py +++ b/converter/spiders/base_classes/lrmi_base.py @@ -39,8 +39,8 @@ def getLRMI(self, *params, response): return html.unescape(value) return None - def parse(self, response): - return LomBase.parse(self, response) + async def parse(self, response): + return await LomBase.parse(self, response) def getId(self, response): return self.getLRMI("identifier", "url", "name", response=response) diff --git a/converter/spiders/base_classes/mediawiki_base.py b/converter/spiders/base_classes/mediawiki_base.py index 9afa743e..782f621a 100644 --- a/converter/spiders/base_classes/mediawiki_base.py +++ b/converter/spiders/base_classes/mediawiki_base.py @@ -182,7 +182,7 @@ def parse_page_query(self, response: scrapy.http.Response): return yield self.query_for_pages(jmes_continue.search(data)) - def parse_page_data(self, response: scrapy.http.Response, extra=None): + async def parse_page_data(self, response: scrapy.http.Response, extra=None): data = json.loads(response.body) response.meta["item"] = data response.meta["item_extra"] = extra @@ -196,7 +196,7 @@ def parse_page_data(self, response: scrapy.http.Response, extra=None): ) return None - return super().parse(response) + return await super().parse(response) def getId(self, response=None): data = response.meta["item"] @@ -205,8 +205,8 @@ def getId(self, response=None): def getHash(self, response=None): return str(jmes_revid.search(response.meta["item"])) + self.version - def mapResponse(self, response, fetchData=True): - mr = super().mapResponse(response, fetchData=False) + async def mapResponse(self, response, fetchData=True): + mr = await super().mapResponse(response, fetchData=False) data = json.loads(response.body) title: str = jmes_title.search(data) title_underscored: str = title.replace(" ", "_") diff --git a/converter/spiders/base_classes/oai_base.py b/converter/spiders/base_classes/oai_base.py index 7b38d0b7..29dacb68 100644 --- a/converter/spiders/base_classes/oai_base.py +++ b/converter/spiders/base_classes/oai_base.py @@ -77,8 +77,8 @@ def parse(self, response): ) yield scrapy.Request(url=nextUrl, callback=self.parse) - def parseRecord(self, response): - lom = LomBase.parse(self, response) + async def parseRecord(self, response): + lom = await LomBase.parse(self, response) return lom def getBase(self, response): diff --git a/converter/spiders/base_classes/rss_base.py b/converter/spiders/base_classes/rss_base.py index 8d2a32db..df1b379d 100644 --- a/converter/spiders/base_classes/rss_base.py +++ b/converter/spiders/base_classes/rss_base.py @@ -24,11 +24,11 @@ def parse(self, response): self.response = response return self.startHandler(response) - def startHandler(self, response): + async def startHandler(self, response): for item in response.xpath("//rss/channel/item"): responseCopy = response.replace(url=item.xpath("link//text()").get()) responseCopy.meta["item"] = item - yield LomBase.parse(self, responseCopy) + yield await LomBase.parse(self, responseCopy) def getId(self, response): return response.meta["item"].xpath("link//text()").get() @@ -36,8 +36,8 @@ def getId(self, response): def getHash(self, response): return self.version + str(response.meta["item"].xpath("pubDate//text()").get()) - def mapResponse(self, response): - r = LomBase.mapResponse(self, response) + async def mapResponse(self, response): + r = await LomBase.mapResponse(self, response) return r def getBase(self, response): diff --git a/converter/spiders/digitallearninglab_spider.py b/converter/spiders/digitallearninglab_spider.py index 762ae2c0..cde295ce 100644 --- a/converter/spiders/digitallearninglab_spider.py +++ b/converter/spiders/digitallearninglab_spider.py @@ -8,7 +8,7 @@ from converter.constants import Constants from converter.valuespace_helper import ValuespaceHelper from .base_classes import LrmiBase, LomBase -from ..items import LicenseItemLoader, LomLifecycleItemloader +from ..items import LicenseItemLoader, LomLifecycleItemloader, ResponseItemLoader from ..util.license_mapper import LicenseMapper @@ -36,8 +36,8 @@ class DigitallearninglabSpider(CrawlSpider, LrmiBase): def __init__(self, **kwargs): LrmiBase.__init__(self, **kwargs) - def mapResponse(self, response, **kwargs): - return LrmiBase.mapResponse(self, response) + async def mapResponse(self, response, **kwargs): + return await LrmiBase.mapResponse(self, response) def getId(self, response): return response.meta["item"].get("id") @@ -236,7 +236,7 @@ def getValuespaces(self, response): pass return valuespaces - def parse(self, response, **kwargs): + async def parse(self, response, **kwargs): if self.shouldImport(response) is False: logging.debug( "Skipping entry {} because shouldImport() returned false".format(str(self.getId(response))) @@ -268,7 +268,9 @@ def parse(self, response, **kwargs): base.add_value("lom", lom.load_item()) base.add_value("license", self.getLicense(response).load_item()) base.add_value("permissions", self.getPermissions(response).load_item()) - base.add_value("response", self.mapResponse(response).load_item()) + + response_itemloader: ResponseItemLoader = await self.mapResponse(response) + base.add_value("response", response_itemloader.load_item()) base.add_value("valuespaces", self.getValuespaces(response).load_item()) return base.load_item() diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index 3e426ff9..43b023a0 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -5,10 +5,9 @@ import w3lib.html from scrapy.spiders import CrawlSpider -from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ - LicenseItemLoader + LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase from converter.util.sitemap import from_xml_response @@ -126,7 +125,7 @@ def getId(self, response=None) -> str: def getHash(self, response=None) -> str: pass - def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: """ Gathers metadata from a video-url, nests the metadata within a BaseItemLoader and yields a complete BaseItem by calling the .load_item()-method. @@ -330,7 +329,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader: ResponseItemLoader = await super().mapResponse(response) base.add_value('response', response_loader.load_item()) yield base.load_item() diff --git a/converter/spiders/dwu_spider.py b/converter/spiders/dwu_spider.py index 16b24011..94f65d83 100644 --- a/converter/spiders/dwu_spider.py +++ b/converter/spiders/dwu_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, LomLifecycleItemloader, \ - LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader + LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase @@ -118,7 +118,7 @@ def parse_topic_overview(self, response: scrapy.http.Response): # making sure that we don't crawl the overview-page more than once: self.parsed_urls.add(response.url) - def parse(self, response: scrapy.http.Response, **kwargs): + async def parse(self, response: scrapy.http.Response, **kwargs): base = super().getBase(response=response) lom = LomBaseItemloader() general = LomGeneralItemloader(response=response) @@ -273,7 +273,8 @@ def parse(self, response: scrapy.http.Response, **kwargs): permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - base.add_value('response', super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value('response', response_itemloader.load_item()) # print(self.parsed_urls) # print("debug_url_set length:", len(self.parsed_urls)) diff --git a/converter/spiders/edulabs_spider.py b/converter/spiders/edulabs_spider.py index 5c1d336f..5ac27a2d 100644 --- a/converter/spiders/edulabs_spider.py +++ b/converter/spiders/edulabs_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ - LicenseItemLoader + LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase @@ -56,7 +56,7 @@ def getId(self, response=None) -> str: def getHash(self, response=None) -> str: pass - def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: """ Scrapy Contracts: @@ -133,7 +133,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: json_ld: str = response.xpath('//script[@type="application/ld+json"]/text()').get() json_ld: dict = json.loads(json_ld) - type_str: str = response.xpath('//head/meta[@property="og:type"]/@content').get() + # og_type: str = response.xpath('//head/meta[@property="og:type"]/@content').get() date_published: str = response.xpath('//head/meta[@property="article:published_time"]/@content').get() language: str = response.xpath('//head/meta[@property="og:locale"]/@content').get() @@ -257,7 +257,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader: ResponseItemLoader = await super().mapResponse(response) base.add_value('response', response_loader.load_item()) yield base.load_item() diff --git a/converter/spiders/geogebra_spider.py b/converter/spiders/geogebra_spider.py index 930788d9..33258816 100644 --- a/converter/spiders/geogebra_spider.py +++ b/converter/spiders/geogebra_spider.py @@ -52,9 +52,9 @@ def parse(self, response): ) i += 1 - def parseEntry(self, response): + async def parseEntry(self, response): if self.get("language", response=response) == "de": - return LomBase.parse(self, response) + return await LomBase.parse(self, response) logging.info( "Skpping entry with language " + self.get("language", response=response) ) diff --git a/converter/spiders/ginkgomaps_spider.py b/converter/spiders/ginkgomaps_spider.py index e58e0a63..92f518ba 100644 --- a/converter/spiders/ginkgomaps_spider.py +++ b/converter/spiders/ginkgomaps_spider.py @@ -5,7 +5,7 @@ from converter.constants import Constants from converter.items import LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, LomLifecycleItemloader, \ - LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader + LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase @@ -184,7 +184,7 @@ def get_navigation_urls_fourth_level(self, response: scrapy.http.Response): # print("fourth level Method: current url = ", str(response.url), " amount of URLs in total: ", # len(self.navigation_urls)) - def parse(self, response: scrapy.http.Response, **kwargs): + async def parse(self, response: scrapy.http.Response, **kwargs): """ Scrapy Contracts: @@ -283,7 +283,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): # "Sekundarstufe II", # "Berufliche Bildung", # "Erwachsenenbildung"]) - vs.add_value('new_lrt', [Constants.NEW_LRT_MATERIAL, 'b6ceade0-58d3-4179-af71-d53ebc6e49d4']) # karte + vs.add_value('new_lrt', [Constants.NEW_LRT_MATERIAL, 'b6ceade0-58d3-4179-af71-d53ebc6e49d4']) # karte vs.add_value('intendedEndUserRole', ["learner", "teacher", "parent"]) @@ -306,6 +306,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - base.add_value('response', super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value('response', response_itemloader.load_item()) yield base.load_item() diff --git a/converter/spiders/grundschulkoenig_spider.py b/converter/spiders/grundschulkoenig_spider.py index d7d9e1c5..d51f5bab 100644 --- a/converter/spiders/grundschulkoenig_spider.py +++ b/converter/spiders/grundschulkoenig_spider.py @@ -100,7 +100,7 @@ def parse_sitemap(self, response: scrapy.http.XmlResponse): if skip_url is False: yield response.follow(item.loc, callback=self.parse, cb_kwargs={'sitemap_entry': item}) - def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): + async def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None): title = response.xpath('//span[@class="nav__crumb nav__crumb--current"]/span/text()').get() # content = response.xpath('//div[@class="page__content"]') # Worksheets are grouped, sometimes several worksheet-containers per page exist @@ -196,7 +196,7 @@ def parse(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader = await super().mapResponse(response) base.add_value('response', response_loader.load_item()) yield base.load_item() diff --git a/converter/spiders/kindoergarten_spider.py b/converter/spiders/kindoergarten_spider.py index bad34cc8..b76fc362 100644 --- a/converter/spiders/kindoergarten_spider.py +++ b/converter/spiders/kindoergarten_spider.py @@ -72,7 +72,7 @@ def parse(self, response: scrapy.http.XmlResponse, **kwargs): if self.hasChanged(response) and skip_check is False: yield response.follow(item.loc, callback=self.parse_site, cb_kwargs={'sitemap_entry': item}) - def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None) -> BaseItem: + async def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapEntry = None) -> BaseItem: """ parses metadata from an individual item both by its HtmlResponse and its sitemap tags @@ -82,7 +82,8 @@ def parse_site(self, response: scrapy.http.HtmlResponse, sitemap_entry: SitemapE """ response.meta['sitemap_entry'] = sitemap_entry base = super().getBase(response=response) - base.add_value("response", super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value("response", response_itemloader.load_item()) # we assume that content is imported. Please use replace_value if you import something different # thumbnail_href = response.css('.post-thumbnail img::attr(src)').get() base.add_value('thumbnail', response.css('.post-thumbnail img::attr(src)').get()) diff --git a/converter/spiders/kmap_spider.py b/converter/spiders/kmap_spider.py index c9441e39..d6cbc78c 100644 --- a/converter/spiders/kmap_spider.py +++ b/converter/spiders/kmap_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader + LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase from converter.util.sitemap import from_xml_response from converter.web_tools import WebEngine, WebTools @@ -130,7 +130,8 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) - base.add_value('response', super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value('response', response_itemloader.load_item()) # KMap doesn't deliver fulltext to neither splash nor playwright, the fulltext object will be showing up as # 'text': 'JavaScript wird benötigt!\n\n', # in the final "scrapy.Item". As long as KMap doesn't change the way it's delivering its JavaScript content, diff --git a/converter/spiders/learning_apps_spider.py b/converter/spiders/learning_apps_spider.py index bc1d482a..83cd4b67 100644 --- a/converter/spiders/learning_apps_spider.py +++ b/converter/spiders/learning_apps_spider.py @@ -50,8 +50,8 @@ def parseList(self, response): offset += len(response.xpath('//results/app')) yield self.startRequest(response.meta['cat'], response.meta['subcat'], offset) - def parse(self, response): - return LomBase.parse(self, response) + async def parse(self, response): + return await LomBase.parse(self, response) def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) diff --git a/converter/spiders/leifi_spider.py b/converter/spiders/leifi_spider.py index 8dc0d1eb..4b0946f7 100644 --- a/converter/spiders/leifi_spider.py +++ b/converter/spiders/leifi_spider.py @@ -50,8 +50,8 @@ def parse_xml(self, response): copy_response.meta["item"] = item yield self.parse(copy_response) - def parse(self, response): - return LomBase.parse(self, response) + async def parse(self, response): + return await LomBase.parse(self, response) def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index 310403f3..79316b46 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -32,7 +32,7 @@ def start_requests(self): for url in self.start_urls: yield Request(url=url, callback=self.parse) - def parse(self, response: scrapy.http.TextResponse, **kwargs): + async def parse(self, response: scrapy.http.TextResponse, **kwargs): data = self.getUrlData(response.url) response.meta["rendered_data"] = data # as of Scrapy 2.2 the JSON of a TextResponse can be loaded like this, @@ -42,7 +42,7 @@ def parse(self, response: scrapy.http.TextResponse, **kwargs): copy_response = response.copy() # Passing the dictionary for easier access to its attributes. copy_response.meta["item"] = element - yield LomBase.parse(self, response=copy_response) + yield await LomBase.parse(self, response=copy_response) # def _if_exists_add(self, edu_dict: dict, element_dict: dict, edu_attr: str, element_attr: str): # if element_attr in element_dict: diff --git a/converter/spiders/memucho_spider.py b/converter/spiders/memucho_spider.py index 1130a347..94c292b8 100644 --- a/converter/spiders/memucho_spider.py +++ b/converter/spiders/memucho_spider.py @@ -40,8 +40,8 @@ def start_requests(self): yield Request(url=url, callback=self.parse_sitemap) pass - def mapResponse(self, response): - return LomBase.mapResponse(self, response) + async def mapResponse(self, response): + return await LomBase.mapResponse(self, response) def getId(self, response): return response.meta["item"].get("TopicId") @@ -65,8 +65,8 @@ def parse_sitemap(self, response): meta={"item": item}, ) - def parse(self, response): - return LomBase.parse(self, response) + async def parse(self, response): + return await LomBase.parse(self, response) # thumbnail is always the same, do not use the one from rss def getBase(self, response): diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index e7c4afba..8c3dd16b 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -37,7 +37,7 @@ def start_requests(self): headers={"Accept": "application/xml", "Content-Type": "application/xml"}, ) - def parse(self, response: scrapy.http.Response): + async def parse(self, response: scrapy.http.Response): print("Parsing URL: " + response.url) # Call Splash only once per page (that contains multiple XML elements). @@ -78,7 +78,7 @@ def parse(self, response: scrapy.http.Response): yield self.handleEntry(copyResponse) # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. - LomBase.parse(self, copyResponse) + await LomBase.parse(self, copyResponse) # TODO: To not stress the Rest APIs. # time.sleep(0.1) @@ -123,8 +123,8 @@ def mapResponse(self, response): r.add_value("url", self.getUri(response)) return r - def handleEntry(self, response): - return LomBase.parse(self, response) + async def handleEntry(self, response): + return await LomBase.parse(self, response) def getBase(self, response): base = LomBase.getBase(self, response) diff --git a/converter/spiders/planet_schule_spider.py b/converter/spiders/planet_schule_spider.py index ac53bac6..7a46c3cb 100644 --- a/converter/spiders/planet_schule_spider.py +++ b/converter/spiders/planet_schule_spider.py @@ -36,8 +36,8 @@ def start_requests(self): for url in self.start_urls: yield scrapy.Request(url=url, callback=self.parse) - def mapResponse(self, response): - return LomBase.mapResponse(self, response) + async def mapResponse(self, response): + return await LomBase.mapResponse(self, response) def startHandler(self, response): for item in response.xpath("//rss/channel/item"): @@ -56,8 +56,8 @@ def startHandler(self, response): # }) ) - def handleLink(self, response): - return LomBase.parse(self, response) + async def handleLink(self, response): + return await LomBase.parse(self, response) # thumbnail is always the same, do not use the one from rss def getBase(self, response): diff --git a/converter/spiders/sample_spider.py b/converter/spiders/sample_spider.py index 9395e312..34f852f4 100644 --- a/converter/spiders/sample_spider.py +++ b/converter/spiders/sample_spider.py @@ -15,8 +15,8 @@ class SampleSpider(CrawlSpider, LomBase): def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) - def parse(self, response): - return LomBase.parse(self, response) + async def parse(self, response): + return await LomBase.parse(self, response) # return a (stable) id of the source def getId(self, response): diff --git a/converter/spiders/schule_im_aufbruch_spider.py b/converter/spiders/schule_im_aufbruch_spider.py index 53f8695f..09840abc 100755 --- a/converter/spiders/schule_im_aufbruch_spider.py +++ b/converter/spiders/schule_im_aufbruch_spider.py @@ -80,7 +80,7 @@ def get_video_urls_from_overview(self, response): # following each video_url to the dedicated video-subpage to grab metadata yield response.follow(url=video_full_url, callback=self.parse_video_page) - def parse_video_page(self, response: scrapy.http.Response = None): + async def parse_video_page(self, response: scrapy.http.Response = None): """ parses a video-page (e.g. https://vimeo.com/videoID whereby videoID is a number) for metadata (condition: only if there is a "json+ld"-script found within the video-page). @@ -101,7 +101,7 @@ def parse_video_page(self, response: scrapy.http.Response = None): # response.xpath('//*[@id="wrap"]/div[2]/script[1]/text()').get() # might have to access it and split it up with regEx - return LomBase.parse(self, response) + return await LomBase.parse(self, response) else: logging.debug("Could not find ld+json script, skipping entry: " + response.url) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index 95749fe8..a4417640 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -109,7 +109,7 @@ def extract_and_parse_date(response): date_published = datetime.datetime.now() return date_published - def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: """ Crawls an individual article and extracts metadata. Afterward creates a BaseItem by filling up metadata-fields by calling .load_item() on the respective ItemLoaders. @@ -385,7 +385,7 @@ def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader = await super().mapResponse(response) base.add_value("response", response_loader.load_item()) yield base.load_item() diff --git a/converter/spiders/segu_spider.py b/converter/spiders/segu_spider.py index 1395e8a8..55470a32 100644 --- a/converter/spiders/segu_spider.py +++ b/converter/spiders/segu_spider.py @@ -19,8 +19,8 @@ class SeguSpider(CrawlSpider, LomBase, JSONBase): def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) - def mapResponse(self, response, **kwargs): - r = LomBase.mapResponse(self, response, fetchData=False) + async def mapResponse(self, response, **kwargs): + r = await LomBase.mapResponse(self, response, fetchData=False) r.replace_value("text", "") r.replace_value("html", "") r.replace_value("url", response.meta["item"].get("link")) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index b4f214b7..7480906d 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -8,7 +8,7 @@ from scrapy.spiders import CrawlSpider from .base_classes import LomBase, JSONBase -from ..items import LomBaseItemloader, BaseItemLoader +from ..items import LomBaseItemloader, BaseItemLoader, ResponseItemLoader from ..web_tools import WebEngine, WebTools @@ -116,7 +116,7 @@ def check_if_item_should_be_dropped(self, response) -> bool: drop_item_flag = True return drop_item_flag - def parse(self, response, **kwargs): + async def parse(self, response, **kwargs): try: item_dict_from_api: dict = kwargs["item_dict"] response.meta["item"] = item_dict_from_api @@ -125,7 +125,7 @@ def parse(self, response, **kwargs): drop_item_flag: bool = self.check_if_item_should_be_dropped(response) if drop_item_flag is True: - return None + return # if we need more metadata from the DOM, this could be a suitable place to move up the call to Playwright base_loader: BaseItemLoader = self.getBase(response) lom_loader: LomBaseItemloader = self.getLOM(response) @@ -136,7 +136,8 @@ def parse(self, response, **kwargs): base_loader.add_value("valuespaces", self.getValuespaces(response).load_item()) base_loader.add_value("license", self.getLicense(response).load_item()) base_loader.add_value("permissions", self.getPermissions(response).load_item()) - base_loader.add_value("response", self.mapResponse(response, fetchData=False).load_item()) + response_itemloader: ResponseItemLoader = await self.mapResponse(response, fetchData=False) + base_loader.add_value("response", response_itemloader.load_item()) yield base_loader.load_item() def getBase(self, response=None): diff --git a/converter/spiders/umwelt_im_unterricht_spider.py b/converter/spiders/umwelt_im_unterricht_spider.py index 8c8768c7..7d0e21f4 100644 --- a/converter/spiders/umwelt_im_unterricht_spider.py +++ b/converter/spiders/umwelt_im_unterricht_spider.py @@ -107,7 +107,7 @@ def parse_category_overview_for_topics_and_subpages(self, response: scrapy.http. parsed_urls.add(url) self.topic_urls_parsed.update(parsed_urls) - def parse(self, response: scrapy.http.Response, **kwargs): + async def parse(self, response: scrapy.http.Response, **kwargs): """ Parses an individual topic url for metadata and yields a BaseItem. @@ -211,7 +211,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): if "/hintergrund/" in current_url: # vs.add_value('learningResourceType', 'Text') # ToDo vs.add_value('new_lrt', ['b98c0c8c-5696-4537-82fa-dded7236081e', '7381f17f-50a6-4ce1-b3a0-9d85a482eec0']) - # "Artikel und Einzelpublikation" , "Unterrichtsplanung" + # "Artikel und Einzelpublikation", "Unterrichtsplanung" if "/medien/dateien/" in current_url: # topics categorized as "Arbeitsmaterial" offer customizable worksheets to teachers, most of the time # consisting of both an "Unterrichtsvorschlag" and a worksheet @@ -222,13 +222,13 @@ def parse(self, response: scrapy.http.Response, **kwargs): # each video is served together with one or several "Unterrichtsvorschlag"-documents # vs.add_value('learningResourceType', 'video') # ToDo vs.add_value('new_lrt', ['7a6e9608-2554-4981-95dc-47ab9ba924de', '7381f17f-50a6-4ce1-b3a0-9d85a482eec0']) - # "Video (Material)" ,"Unterrichtsplanung" + # "Video (Material)", "Unterrichtsplanung" if "/medien/bilder/" in current_url: # topics categorized as "Bilderserie" hold several images in a gallery (with individual licenses), # they also come with one or several "Unterrichtsvorschlag"-documents that are linked to further below # vs.add_value('learningResourceType', 'image') # ToDo vs.add_value('new_lrt', ["a6d1ac52-c557-4151-bc6f-0d99b0b96fb9", "7381f17f-50a6-4ce1-b3a0-9d85a482eec0"]) - # "Bild (Material)" , "Unterrichtsplanung" + # "Bild (Material)", "Unterrichtsplanung" # ToDo: once new_lrt goes live: # - remove the old learningResourceType with the next crawler update vs.add_value('price', 'no') @@ -307,7 +307,7 @@ def parse(self, response: scrapy.http.Response, **kwargs): permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - response_loader = super().mapResponse(response) + response_loader = await super().mapResponse(response) base.add_value('response', response_loader.load_item()) yield base.load_item() diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index a2e5b0f5..82461fde 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -210,13 +210,13 @@ def request_videos(self, ids: List[str], meta: dict): ) return Request(request_url, meta=meta, callback=self.parse_videos) - def parse_videos(self, response: Response): + async def parse_videos(self, response: Response): body = json.loads(response.body) assert body["kind"] == "youtube#videoListResponse" for item in body["items"]: response_copy = response.replace(url=self.get_video_url(item)) response_copy.meta["item"] = item - yield self.lomLoader.parse(response_copy) + yield await self.lomLoader.parse(response_copy) def parse_custom_url(self, response: Response) -> Request: match = re.search('', response.text) @@ -260,8 +260,8 @@ def getHash(self, response: Response) -> str: return self.version + response.meta["item"]["snippet"]["publishedAt"] @overrides # LomBase - def mapResponse(self, response) -> items.ResponseItemLoader: - return LomBase.mapResponse(self, response, False) + async def mapResponse(self, response) -> items.ResponseItemLoader: + return await LomBase.mapResponse(self, response, False) @overrides # LomBase def getBase(self, response: Response) -> items.BaseItemLoader: diff --git a/converter/spiders/zum_mathe_apps_spider.py b/converter/spiders/zum_mathe_apps_spider.py index a669b90d..0176d632 100644 --- a/converter/spiders/zum_mathe_apps_spider.py +++ b/converter/spiders/zum_mathe_apps_spider.py @@ -7,7 +7,7 @@ from converter.constants import Constants from converter.items import LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, LomLifecycleItemloader, \ - LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader + LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase from converter.web_tools import WebTools, WebEngine @@ -175,7 +175,7 @@ async def parse(self, response: scrapy.http.Response, **kwargs): permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - # TODO: fix super().mapResponse - base.add_value('response', super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value('response', response_itemloader.load_item()) yield base.load_item() diff --git a/converter/spiders/zum_physik_apps_spider.py b/converter/spiders/zum_physik_apps_spider.py index f136739b..3f025d44 100644 --- a/converter/spiders/zum_physik_apps_spider.py +++ b/converter/spiders/zum_physik_apps_spider.py @@ -6,7 +6,7 @@ from converter.constants import Constants from converter.items import LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, LomLifecycleItemloader, \ - LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader + LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader from converter.spiders.base_classes import LomBase from converter.web_tools import WebTools, WebEngine @@ -21,7 +21,7 @@ class ZumPhysikAppsSpider(scrapy.Spider, LomBase): # "https://www.zum.de/ma/fendt/phde/" ] version = "0.0.6" # last update: 2022-05-23 - # expected amount of items after a successful crawl: 55 + # expected number of items after a successful crawl: 55 custom_settings = { "AUTOTHROTTLE_ENABLED": True, # "AUTOTHROTTLE_DEBUG": True @@ -152,6 +152,7 @@ async def parse(self, response: scrapy.http.Response, **kwargs): permissions = super().getPermissions(response) base.add_value('permissions', permissions.load_item()) - base.add_value('response', super().mapResponse(response).load_item()) + response_itemloader: ResponseItemLoader = await super().mapResponse(response) + base.add_value('response', response_itemloader.load_item()) yield base.load_item() From 37f13797ee7e7ad55f11833a0b61b08eb8dcad5d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 7 Dec 2023 21:01:01 +0100 Subject: [PATCH 404/590] change: drop Semaphore from serlo_spider - since WebTools is controlled by Semaphores, the Semaphore (that was used for debugging) is no longer needed within the serlo_spider class Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/serlo_spider.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index 74811f59..bfe47bb3 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -1,4 +1,3 @@ -import asyncio import datetime import json import logging @@ -39,7 +38,6 @@ class SerloSpider(scrapy.Spider, LomBase): } GRAPHQL_MODIFIED_AFTER_PARAMETER: str = "" GRAPHQL_INSTANCE_PARAMETER: str = "" - sem = asyncio.Semaphore(value=10) # used to control the amount of concurrent requests in "parse"-method graphql_items = list() # Mapping from EducationalAudienceRole (LRMI) to IntendedEndUserRole(LOM), see: @@ -330,8 +328,7 @@ async def parse(self, response, **kwargs): json_ld = response.xpath('//*[@type="application/ld+json"]/text()').get() json_ld = json.loads(json_ld) - async with self.sem: - playwright_dict = await WebTools.getUrlData(response.url, WebEngine.Playwright) + playwright_dict = await WebTools.getUrlData(response.url, WebEngine.Playwright) html_body = playwright_dict.get("html") screenshot_bytes = playwright_dict.get("screenshot_bytes") html_text = playwright_dict.get("text") From 342159b8d74bf349d14a0e12d9d651db761417cf Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Dec 2023 14:21:54 +0100 Subject: [PATCH 405/590] change: enable Autothrottle / Playwright for rpi_virtuell - change: since Splash can't handle RPI-virtuell anymore (lots of time is wasted with HTTP Status 415 Responses), switching to Playwright - change: enable Scrapy Autothrottle to fix/reduce observed TCPTimedOutErrors during thumbnail downloads -- thumbnail downloads were sometimes sent in bursts, which caused "TCPTimedOutError"s to appear, especially on off-site URLs (= thumbnails which aren't hosted on rpi-virtuell.de servers) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/rpi_virtuell_spider.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/converter/spiders/rpi_virtuell_spider.py b/converter/spiders/rpi_virtuell_spider.py index a94a5eb2..c3588cc7 100644 --- a/converter/spiders/rpi_virtuell_spider.py +++ b/converter/spiders/rpi_virtuell_spider.py @@ -10,6 +10,7 @@ LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ BaseItemLoader, LomAgeRangeItemLoader from converter.spiders.base_classes import LomBase +from converter.web_tools import WebEngine class RpiVirtuellSpider(CrawlSpider, LomBase): @@ -21,11 +22,15 @@ class RpiVirtuellSpider(CrawlSpider, LomBase): friendlyName = "rpi-virtuell" start_urls = ['https://material.rpi-virtuell.de/wp-json/mymaterial/v1/material/'] - version = "0.0.7" + version = "0.0.8" # last update: 2023-12-08 custom_settings = { 'ROBOTSTXT_OBEY': False, - # 'AUTOTHROTTLE_ENABLED': False, + 'AUTOTHROTTLE_ENABLED': True, + 'AUTOTHROTTLE_DEBUG': True, + 'AUTOTHROTTLE_TARGET_CONCURRENCY': 12, + 'CONCURRENT_REQUESTS_PER_DOMAIN': 6, + 'WEB_TOOLS': WebEngine.Playwright, # 'DUPEFILTER_DEBUG': True } wp_json_pagination_parameters = { From 79f48d760719ddf4bf905c0b1dee3b4f03092d6a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Dec 2023 19:02:55 +0100 Subject: [PATCH 406/590] feat: additional (MIME-Type) checks for thumbnail URLs - during debugging we observed web-servers 'lying' in their response when queried for thumbnail URLs, which made debugging failed Items increasingly frustrating -- web-servers would serve HTTP Status Code '200', but actually forward our request to a 404-placeholder-html site instead - the thumbnail pipeline now checks the header for 'Content-Type' first before trying to transform an image -- unexpected MIME-types will fall back to a Playwright screenshot - fix: some logging calls accidentally used the root logger Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 51 +++++++++++++++++++++++++++++++----------- 1 file changed, 38 insertions(+), 13 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 59a01e6c..144b579b 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -293,7 +293,7 @@ def process_item(self, raw_item, spider): duration = int(duration) except: duration = None - logging.warning("duration {} could not be normalized to seconds".format(raw_duration)) + log.warning("duration {} could not be normalized to seconds".format(raw_duration)) item["lom"]["technical"]["duration"] = duration return raw_item @@ -389,13 +389,38 @@ async def process_item(self, raw_item, spider): # a thumbnail (url) is given - we will try to fetch it from the url url: str = item["thumbnail"] time_start = datetime.datetime.now() - response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) + thumbnail_response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) time_end = datetime.datetime.now() log.debug(f"Loading thumbnail from {url} took {time_end - time_start}.") - if response.status != 200: - log.debug(f"Thumbnail-Pipeline received unexpected response (status: {response.status}) from {url}") + + if thumbnail_response.status != 200: + log.debug(f"Thumbnail-Pipeline received a unexpected response (status: {thumbnail_response.status}) " + f"from {url} (-> resolved URL: {thumbnail_response.url}") _thumbnail_url_success = False # flipping the thumbnail flag to False triggers a website screenshot by Playwright (fallback) + else: + # Some web-servers 'lie' in regard to their HTTP status, e.g., they forward to a 404 HTML page and still + # respond with a '200' code. + try: + # We need to do additional checks before accepting the response object as a valid candidate for the + # image transformation + _mimetype: bytes = thumbnail_response.headers["Content-Type"] + _mimetype: str = _mimetype.decode() + if _mimetype.startswith("image/"): + # we expect thumbnail URLs to be of MIME-Type 'image/...' + # see: https://www.iana.org/assignments/media-types/media-types.xhtml#image + response = thumbnail_response + _thumbnail_url_success = True + else: + log.warning(f"Thumbnail URL {url} does not seem to be an image! " + f"Header contained Content-Type '{_mimetype}' instead.") + _thumbnail_url_success = False + except KeyError: + log.warning(f"Thumbnail URL response did not contain a Content-Type / MIME-Type! " + f"Thumbnail URL queried: {url} " + f"-> resolved URL: {thumbnail_response.url} " + f"(HTTP Status: {thumbnail_response.status}") + _thumbnail_url_success = False log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") # nothing was given, we try to screenshot the page either via Splash or Playwright elif ( @@ -695,7 +720,7 @@ async def process_item(self, raw_item, spider): title = str(item["lom"]["general"]["title"]) entryUUID = EduSharing.build_uuid(item["response"]["url"] if "url" in item["response"] else item["hash"]) await self.insert_item(spider, entryUUID, item) - logging.info("item " + entryUUID + " inserted/updated") + log.info("item " + entryUUID + " inserted/updated") # @TODO: We may need to handle Collections # if 'collection' in item: @@ -910,7 +935,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy case _: # due to having the 'custom'-field as a (raw) list of all eafCodes, this mainly serves # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum - logging.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " + log.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") match discipline_eaf_code: # catching edge-cases where OEH 'discipline'-vocab-keys don't line up with eafsys.txt values @@ -924,24 +949,24 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy discipline_eafcodes.add("2600103") # Körperpflege if eaf_code_digits_only_regex.search(discipline_eaf_code): # each numerical eafCode must have a length of (minimum) 3 digits to be considered valid - logging.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " + log.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " f"used later for 'ccm:taxonentry').") if discipline_eaf_code not in self.EAFCODE_EXCLUSIONS: # making sure to only save eafCodes that are part of the standard eafsys.txt discipline_eafcodes.add(discipline_eaf_code) else: - logging.debug(f"LisumPipeline: eafCode {discipline_eaf_code} is not part of 'EAF " + log.debug(f"LisumPipeline: eafCode {discipline_eaf_code} is not part of 'EAF " f"Sachgebietssystematik' (see: eafsys.txt), therefore skipping this " f"value.") else: # our 'discipline.ttl'-vocab holds custom keys (e.g. 'niederdeutsch', 'oeh04010') which # shouldn't be saved into 'ccm:taxonentry' (since they are not part of the regular # "EAF Sachgebietssystematik" - logging.debug(f"LisumPipeline eafCode fallback for {discipline_eaf_code} to " + log.debug(f"LisumPipeline eafCode fallback for {discipline_eaf_code} to " f"'ccm:taxonentry' was not possible. Only eafCodes with a minimum length " f"of 3+ digits are valid. (Please confirm if the provided value is part of " f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))") - logging.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " + log.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") valuespaces["discipline"] = list() # clearing 'discipline'-field, so we don't accidentally write the # remaining OEH w3id-URLs to Lisum's 'ccm:taxonid'-field @@ -962,7 +987,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy educational_context_w3id_key) educational_context_lisum_keys.add(educational_context_w3id_key) case _: - logging.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key} " + log.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key} " f"not found in mapping table.") educational_context_list = list(educational_context_lisum_keys) educational_context_list.sort() @@ -1049,14 +1074,14 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy taxon_set = set(taxon_entries) taxon_set.update(discipline_eafcodes) taxon_entries = list(taxon_set) - logging.debug(f"LisumPipeline: Saving eafCodes {taxon_entries} to 'ccm:taxonentry'.") + log.debug(f"LisumPipeline: Saving eafCodes {taxon_entries} to 'ccm:taxonentry'.") base_item_adapter["custom"]["ccm:taxonentry"] = taxon_entries else: # oeh_spider typically won't have neither the 'custom'-field nor the 'ccm:taxonentry'-field # Therefore we have to create and fill it with the eafCodes that we gathered from our # 'discipline'-vocabulary-keys. discipline_eafcodes_list = list(discipline_eafcodes) - logging.debug(f"LisumPipeline: Saving eafCodes {discipline_eafcodes_list} to 'ccm:taxonentry'.") + log.debug(f"LisumPipeline: Saving eafCodes {discipline_eafcodes_list} to 'ccm:taxonentry'.") base_item_adapter.update( {'custom': { 'ccm:taxonentry': discipline_eafcodes_list}}) From b1cfc45f30947dbb4100792256e4a8f2383c1420 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 8 Dec 2023 20:10:38 +0100 Subject: [PATCH 407/590] change: increase priority of thumbnail downloads - intention behind this change: while watching a crawler "fill up" items in the front-end, this should reduce the delay between the creation of the learning object and the thumbnail data appearing Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 144b579b..2fe0f1f6 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -444,7 +444,8 @@ async def process_item(self, raw_item, spider): request_splash = scrapy.FormRequest( url=_splash_url, formdata=_splash_dict, - callback=NO_CALLBACK + callback=NO_CALLBACK, + priority=1 ) splash_response: scrapy.http.Response = await maybe_deferred_to_future( spider.crawler.engine.download(request_splash) @@ -550,7 +551,9 @@ async def download_thumbnail_url(self, url: str, spider: scrapy.Spider): :return: Response or None """ try: - request = scrapy.Request(url=url, callback=NO_CALLBACK) + request = scrapy.Request(url=url, callback=NO_CALLBACK, priority=1) + # Thumbnail downloads will be executed with a slightly higher priority (default: 0), so there's less delay + # between metadata processing and thumbnail retrieval steps in the pipelines response: Deferred | Future = await maybe_deferred_to_future( spider.crawler.engine.download(request) ) From 3a3334e7c25455be953525236129e18534649ee7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 9 Dec 2023 01:14:21 +0100 Subject: [PATCH 408/590] fix: fallback to Playwright on failed thumbnail download Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 40 +++++++++++++++++----------------------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 2fe0f1f6..f30dd5cf 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -373,7 +373,6 @@ async def process_item(self, raw_item, spider): # checking if the (optional) attribute WEB_TOOLS exists: web_tools = settings_crawler.get("WEB_TOOLS", default=WebEngine.Splash) _splash_success: bool | None = None # control flag flips to False if Splash can't handle a URL - _thumbnail_url_success: bool | None = None # flips to False if there was an error during thumbnail download # if screenshot_bytes is provided (the crawler has already a binary representation of the image, # the pipeline will convert/scale the given image @@ -386,18 +385,19 @@ async def process_item(self, raw_item, spider): # Therefore, we delete it after we're done with processing it del item["screenshot_bytes"] elif "thumbnail" in item: - # a thumbnail (url) is given - we will try to fetch it from the url + # a thumbnail (url) was provided within the item -> we will try to fetch it from the url url: str = item["thumbnail"] - time_start = datetime.datetime.now() + time_start: datetime = datetime.datetime.now() thumbnail_response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) - time_end = datetime.datetime.now() - log.debug(f"Loading thumbnail from {url} took {time_end - time_start}.") - + time_end: datetime = datetime.datetime.now() + log.debug(f"Loading thumbnail from {url} took {time_end - time_start} (incl. awaiting).") + log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") if thumbnail_response.status != 200: log.debug(f"Thumbnail-Pipeline received a unexpected response (status: {thumbnail_response.status}) " f"from {url} (-> resolved URL: {thumbnail_response.url}") - _thumbnail_url_success = False - # flipping the thumbnail flag to False triggers a website screenshot by Playwright (fallback) + # fall back to website screenshot + del item["thumbnail"] + return await self.process_item(raw_item, spider) else: # Some web-servers 'lie' in regard to their HTTP status, e.g., they forward to a 404 HTML page and still # respond with a '200' code. @@ -410,19 +410,20 @@ async def process_item(self, raw_item, spider): # we expect thumbnail URLs to be of MIME-Type 'image/...' # see: https://www.iana.org/assignments/media-types/media-types.xhtml#image response = thumbnail_response - _thumbnail_url_success = True + # only set the response if thumbnail retrieval was successful! else: log.warning(f"Thumbnail URL {url} does not seem to be an image! " - f"Header contained Content-Type '{_mimetype}' instead.") - _thumbnail_url_success = False + f"Header contained Content-Type '{_mimetype}' instead. " + f"(Falling back to screenshot)") + del item["thumbnail"] + return await self.process_item(raw_item, spider) except KeyError: log.warning(f"Thumbnail URL response did not contain a Content-Type / MIME-Type! " f"Thumbnail URL queried: {url} " f"-> resolved URL: {thumbnail_response.url} " f"(HTTP Status: {thumbnail_response.status}") - _thumbnail_url_success = False - log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") - # nothing was given, we try to screenshot the page either via Splash or Playwright + del item["thumbnail"] + return await self.process_item(raw_item, spider) elif ( "location" in item["lom"]["technical"] and len(item["lom"]["technical"]["location"]) > 0 @@ -465,7 +466,6 @@ async def process_item(self, raw_item, spider): playwright_websocket_endpoint: str | None = env.get("PLAYWRIGHT_WS_ENDPOINT") if (not bool(_splash_success) and playwright_websocket_endpoint - or not bool(_thumbnail_url_success) and playwright_websocket_endpoint or playwright_websocket_endpoint and web_tools == WebEngine.Playwright): # we're using Playwright to take a website screenshot if: # - the spider explicitly defined Playwright in its 'custom_settings'-dict @@ -483,7 +483,7 @@ async def process_item(self, raw_item, spider): else: if settings_crawler.get("DISABLE_SPLASH") is False: log.warning( - "No thumbnail provided and SPLASH_URL was not configured for screenshots!" + "No thumbnail provided (and .env variable 'SPLASH_URL' was not configured for screenshots!)" ) if response is None: if settings_crawler.get("DISABLE_SPLASH") is False: @@ -516,13 +516,7 @@ async def process_item(self, raw_item, spider): self.create_thumbnails_from_image_bytes(img, item, settings_crawler) except Exception as e: if url is not None: - log.warning( - "Could not read thumbnail at " - + url - + ": " - + str(e) - + " (falling back to screenshot)" - ) + log.warning(f"Could not read thumbnail at {url}: {str(e)} (falling back to screenshot)") if "thumbnail" in item: del item["thumbnail"] return await self.process_item(raw_item, spider) From 989fe8a565dd70d05dfdc5cb3eea6a092edb7eac Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 9 Dec 2023 03:05:26 +0100 Subject: [PATCH 409/590] change: decrease WebTools 'trafilatura' logging verbosity Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/converter/web_tools.py b/converter/web_tools.py index ea691bae..9fcd1dd6 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -1,4 +1,5 @@ import json +import logging from asyncio import Semaphore from enum import Enum @@ -10,6 +11,9 @@ from converter import env +log = logging.getLogger(__name__) +logging.getLogger('trafilatura').setLevel(logging.INFO) # trafilatura is quite spammy + class WebEngine(Enum): # Splash (default engine) From 3e7d799b00c59e6dec04b928483397b560b2d6d2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sat, 9 Dec 2023 03:06:26 +0100 Subject: [PATCH 410/590] change: increase autothrottle concurrency settings Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 6b39cc32..c86cbf1b 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -44,8 +44,8 @@ class OersiSpider(scrapy.Spider, LomBase): custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 20, - "CONCURRENT_REQUESTS_PER_DOMAIN": 4, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 60, + "CONCURRENT_REQUESTS_PER_DOMAIN": 6, "WEB_TOOLS": WebEngine.Playwright, } From 8baf28db3542d849a6f962e0575152d103c99fa4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sun, 10 Dec 2023 22:04:25 +0100 Subject: [PATCH 411/590] feat: Exception Handling for failed Thumbnail downloads - since we cannot trust that the provided thubmnail URLs are available and correct, trying to catch the most common Exceptions when the image download failed -- TCPTimedOutError and DNSLookupErrors were observed during test-crawls of RPI-virtuell, where the image URLs themselves were either pointing to: --- off-site images within 3rd-party image caches --- images that were no longer available --- images that were hosted on a (completely offline) server, where no DNS records could be fetched Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index f30dd5cf..3b35cb66 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -22,6 +22,7 @@ import isodate import scrapy import scrapy.crawler +import twisted.internet.error from PIL import Image from async_lru import alru_cache from itemadapter import ItemAdapter @@ -388,7 +389,21 @@ async def process_item(self, raw_item, spider): # a thumbnail (url) was provided within the item -> we will try to fetch it from the url url: str = item["thumbnail"] time_start: datetime = datetime.datetime.now() - thumbnail_response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) + try: + thumbnail_response: scrapy.http.Response = await self.download_thumbnail_url(url, spider) + # we expect that some thumbnail URLs will be wrong, outdated or already offline, which is why we catch + # the most common Exceptions while trying to dwonload the image. + except twisted.internet.error.TCPTimedOutError: + log.warning(f"Thumbnail download of URL {url} failed due to TCPTimedOutError. " + f"(You might see this error if the image is unavailable under that specific URL.) " + f"Falling back to website screenshot.") + del item["thumbnail"] + return await self.process_item(raw_item, spider) + except twisted.internet.error.DNSLookupError: + log.warning(f"Thumbnail download of URL {url} failed due to DNSLookupError. " + f"(The webserver might be offline.) Falling back to website screenshot.") + del item["thumbnail"] + return await self.process_item(raw_item, spider) time_end: datetime = datetime.datetime.now() log.debug(f"Loading thumbnail from {url} took {time_end - time_start} (incl. awaiting).") log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") From 8052ac7a53140e65ffa9a504708ebe50dac942ef Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 13 Dec 2023 15:08:17 +0100 Subject: [PATCH 412/590] oersi_spider v0.1.6 feat: save "sourceOrganization" values in "lifecycle.publisher" (WLO-BIRD-Connector v2 requirement) - as discussed on 2023-12-13 with Frank/Manuel/Wolli, we're saving "sourceOrganization" values to "ccm:lifecyclecontributer_publisher" -- to avoid duplicate entries from the AMB field "publisher", a set of publisher names is kept by the get_lifecycle_publisher method Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 50 ++++++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 8 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index c86cbf1b..092d8f2d 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -39,7 +39,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.5" # last update: 2023-08-12 + version = "0.1.6" # last update: 2023-12-13 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -634,11 +634,12 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lifecycle_metadata_provider.add_value("url", metadata_provider_url) lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) - def get_lifecycle_publisher( - self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, date_published: Optional[str] = None - ): + def get_lifecycle_publisher(self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, + organizations_from_publisher_fields: set[str], date_published: Optional[str] = None): """ - Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. + Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. Successfully + collected 'publisher.name'-strings are added to an organizations set for duplicate detection in the + 'sourceOrganization' field. """ if "publisher" in elastic_item_source: # see: https://dini-ag-kim.github.io/amb/draft/#publisher @@ -651,6 +652,10 @@ def get_lifecycle_publisher( publisher_name: str = publisher_item.get("name") if publisher_type == "Organization": lifecycle_publisher.add_value("organization", publisher_name) + # to avoid duplicate entries in 'publisher'-lifecycle items, we need to keep a set of previously + # collected publisher names to compare them later in the 'sourceOrganization'-method for the + # WLO-BIRD-Connector v2 + organizations_from_publisher_fields.add(publisher_name) elif publisher_type == "Person": self.split_names_if_possible_and_add_to_lifecycle( name_string=publisher_name, @@ -706,6 +711,26 @@ def get_lifecycle_organization_from_source_organization_fallback( lifecycle_org.add_value("url", org_url) lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) + def get_lifecycle_publisher_from_source_organization( + self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, previously_collected_publishers: set[str] + ): + source_organizations: list[dict] = elastic_item_source.get("sourceOrganization") + for so in source_organizations: + if "name" in so and "name" not in previously_collected_publishers: + source_org_name: str = so.get("name") + lifecycle_org = LomLifecycleItemloader() + lifecycle_org.add_value("role", "publisher") + lifecycle_org.add_value("organization", source_org_name) + if "id" in so: + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=so, lifecycle_item_loader=lifecycle_org + ) + if "url" in so: + org_url: str = so.get("url") + if org_url: + lifecycle_org.add_value("url", org_url) + lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) + @staticmethod def lifecycle_determine_type_of_identifier_and_save_uri( item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader @@ -875,16 +900,25 @@ async def parse(self, response: scrapy.http.Response, **kwargs): author_list=authors, ) - self.get_lifecycle_publisher( - lom_base_item_loader=lom, elastic_item_source=elastic_item_source, date_published=date_published - ) + organizations_from_publisher_fields: set[str] = set() + self.get_lifecycle_publisher(lom_base_item_loader=lom, elastic_item_source=elastic_item_source, + organizations_from_publisher_fields=organizations_from_publisher_fields, + date_published=date_published) if "sourceOrganization" in elastic_item_source: + # ToDo: this fallback might no longer be necessary: self.get_lifecycle_organization_from_source_organization_fallback( elastic_item_source=elastic_item_source, lom_item_loader=lom, organization_fallback=organizations_from_affiliation_fields, ) + # ToDo: WLO-BIRD-Connector v2 REQUIREMENT: + # 'sourceOrganization' -> 'ccm:lifecyclecontributer_publisher' + self.get_lifecycle_publisher_from_source_organization( + lom_item_loader=lom, + elastic_item_source=elastic_item_source, + previously_collected_publishers=organizations_from_publisher_fields, + ) educational = LomEducationalItemLoader() if in_languages: From e171470382a039efe386ef9f32b1def739deee0a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 13 Dec 2023 15:45:25 +0100 Subject: [PATCH 413/590] change: allow MIME-Type 'application/octet-stream' in Thumbnail-Pipeline - a large amount of ORCA.nrw thumbnail URLs (from OERSI) appear to be of type 'application/octet-stream', which caused the Pipeline to fall back to taking a website screenshot instead -- ToDo: octet streams might need special Handling in the future Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 3b35cb66..cbde4a49 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -426,6 +426,13 @@ async def process_item(self, raw_item, spider): # see: https://www.iana.org/assignments/media-types/media-types.xhtml#image response = thumbnail_response # only set the response if thumbnail retrieval was successful! + elif _mimetype == "application/octet-stream": + # ToDo: special handling for 'application/octet-stream' necessary? + log.debug(f"Thumbnail URL of MIME-Type 'image/...' expected, " + f"but received '{_mimetype}' instead. " + f"(If thumbnail conversion throws unexpected errors further down the line, " + f"the Thumbnail-Pipeline needs to be re-visited! URL: {url} )") + response = thumbnail_response else: log.warning(f"Thumbnail URL {url} does not seem to be an image! " f"Header contained Content-Type '{_mimetype}' instead. " From 383179800da6b307bc8e41e819028972fad81b21 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 14 Dec 2023 13:13:54 +0100 Subject: [PATCH 414/590] build: version pin "browserless v2" docker image to 2023-12-13 build - as suggested by Torsten, we're version-pinning the browserless v2 container to the recent build instead of using "latest" by default -- browserless v2 currently ONLY tags their latest version, therefore we have to use a sha256 hash of specific daily builds at the moment --- ToDo: regularly check the newest builds and inform Torsten if the screenshotter-service in our Kubernetes cluster needs to be updated with the same version! - for details about this version, see: https://github.com/browserless/browserless/pkgs/container/chrome/157666785 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index bc11f76e..966e6c04 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,7 +19,7 @@ services: retries: 3 start_period: 40s headless_chrome: - image: ghcr.io/browserless/chrome + image: ghcr.io/browserless/chrome@sha256:f27f9fa0d9c2344180c0fc5af7c6ea4a1df6f2a7a3efc555de876dbea6ded7a1 restart: always environment: - TIMEOUT=120000 From a00f37bf4ceab9f11949690d652ba19825667403 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 14 Dec 2023 14:46:47 +0100 Subject: [PATCH 415/590] oersi_spider v0.1.7 - change: get_item_url method (fallback no longer necessary) -- the previous fallback with 'mainEntityOfPage.id' is no longer necessary, therefore get_item_url either retrieves a valid URL or drops the item completely from now on - fix: 'sourceOrganization' comparison with previously collected publisher names - change: disable DupeFilter for initial requests and enable DupeFilter log -- during previous crawls we observed that the DupeFilter "swallows" URLs: --- example 1: some courses redirect to their most-current / up-to-date course (e.g. if a course was held in 2020, the URL automatically redirects to the 2023 URl) -> this would cause the item to not be crawled because Scrapy's Dupefilter detects the same resolved URL target --- example 2: some URLs for learning materials or books automatically redirect Scrapy to their newest edition (e.g. if the OERSI item points to Edition 1 of a book, the scrapy Request would automatically get redirected to Edition 3 of said book -> scrapy would detect that the resolved URL is the same for both books and the "faster" book in the Scrapy scheduler queue would "win" - update: list of Metadata-Providers as of 2023-12-14 --- converter/spiders/oersi_spider.py | 78 +++++++++++++++++-------------- 1 file changed, 42 insertions(+), 36 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 092d8f2d..42eaaecd 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -39,13 +39,14 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.6" # last update: 2023-12-13 + version = "0.1.7" # last update: 2023-12-14 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, "AUTOTHROTTLE_TARGET_CONCURRENCY": 60, "CONCURRENT_REQUESTS_PER_DOMAIN": 6, + "DUPEFILTER_DEBUG": True, "WEB_TOOLS": WebEngine.Playwright, } @@ -60,6 +61,7 @@ class OersiSpider(scrapy.Spider, LomBase): # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) ELASTIC_PROVIDERS_TO_CRAWL: list = [ # "BC Campus", # ToDo: BC Campus website cannot be crawled at the moment, needs further investigation + # "ComeIn", # should not be crawled, datasets were exported to OERSI from WLO "detmoldMusicTools", "digiLL", "DuEPublico", @@ -75,11 +77,12 @@ class OersiSpider(scrapy.Spider, LomBase): "HOOU", "iMoox", "KI Campus", - # "langSci Press", # new provider as of 2023-04-27 - disappeared on 2023-05-04 + "langSci Press", # new provider as of 2023-04-27 + "lecture2go (Hamburg)", # new provider as of 2023-12-14 "MIT OpenCourseWare", "OEPMS", # new provider as of 2023-04-27 "OER Portal Uni Graz", - "oncampus", + "oncampus", # (temporarily) not available? (2023-12-14) "Open Music Academy", "Open Textbook Library", "Opencast Universität Osnabrück", @@ -88,6 +91,7 @@ class OersiSpider(scrapy.Spider, LomBase): "OpenRub", "ORCA.nrw", "Phaidra Uni Wien", + "Pressbooks Directory", # new provider as of 2023-12-14 "RWTH Aachen GitLab", "TIB AV-Portal", "TU Delft OpenCourseWare", @@ -177,7 +181,7 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req if not self.hasChanged(None, elastic_item=elastic_item): return None # by omitting the callback parameter, individual requests are yielded to the parse-method - yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}) + yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}, dont_filter=True) def elastic_pit_create(self) -> dict: """ @@ -347,24 +351,17 @@ def get_uuid(elastic_item: dict): return EduSharing.build_uuid(item_url) @staticmethod - def get_item_url(elastic_item) -> str: + def get_item_url(elastic_item: dict) -> str | None: """ - Tries to gather the to-be-parsed URL from OERSI's 'MainEntityOfPage'-field and if that field is not available, - falls back to the '_source.id'-field. Returns an URL-string. + Retrieves the to-be-parsed URL from OERSI's '_source.id'-field. + If that (REQUIRED) field was not available, returns None. """ - main_entity_of_page: list[dict] = elastic_item["_source"]["mainEntityOfPage"] - if main_entity_of_page: - item_url: str = main_entity_of_page[0]["id"] - # "id" is a REQUIRED sub-field of MainEntityOfPage and will always contain more stable URLs than - # '_source.id' + item_url: str = elastic_item["_source"]["id"] + if item_url: return item_url else: - item_url: str = elastic_item["_source"]["id"] - logging.debug( - f"get_uuid fallback activated: The field 'MainEntityOfPage.id' for '{elastic_item['_id']}' was not " - f"available. Using fallback value '_source.id': {item_url} instead." - ) - return item_url + logging.warning(f"OERSI Item {elastic_item['_id']} did not provide a URL string. Dropping item.") + return None def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: elastic_item = elastic_item @@ -634,8 +631,13 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lifecycle_metadata_provider.add_value("url", metadata_provider_url) lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) - def get_lifecycle_publisher(self, lom_base_item_loader: LomBaseItemloader, elastic_item_source: dict, - organizations_from_publisher_fields: set[str], date_published: Optional[str] = None): + def get_lifecycle_publisher( + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organizations_from_publisher_fields: set[str], + date_published: Optional[str] = None, + ): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. Successfully collected 'publisher.name'-strings are added to an organizations set for duplicate detection in the @@ -716,20 +718,21 @@ def get_lifecycle_publisher_from_source_organization( ): source_organizations: list[dict] = elastic_item_source.get("sourceOrganization") for so in source_organizations: - if "name" in so and "name" not in previously_collected_publishers: + if "name" in so: source_org_name: str = so.get("name") - lifecycle_org = LomLifecycleItemloader() - lifecycle_org.add_value("role", "publisher") - lifecycle_org.add_value("organization", source_org_name) - if "id" in so: - self.lifecycle_determine_type_of_identifier_and_save_uri( - item_dictionary=so, lifecycle_item_loader=lifecycle_org - ) - if "url" in so: - org_url: str = so.get("url") - if org_url: - lifecycle_org.add_value("url", org_url) - lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) + if source_org_name not in previously_collected_publishers: + lifecycle_org = LomLifecycleItemloader() + lifecycle_org.add_value("role", "publisher") + lifecycle_org.add_value("organization", source_org_name) + if "id" in so: + self.lifecycle_determine_type_of_identifier_and_save_uri( + item_dictionary=so, lifecycle_item_loader=lifecycle_org + ) + if "url" in so: + org_url: str = so.get("url") + if org_url: + lifecycle_org.add_value("url", org_url) + lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) @staticmethod def lifecycle_determine_type_of_identifier_and_save_uri( @@ -901,9 +904,12 @@ async def parse(self, response: scrapy.http.Response, **kwargs): ) organizations_from_publisher_fields: set[str] = set() - self.get_lifecycle_publisher(lom_base_item_loader=lom, elastic_item_source=elastic_item_source, - organizations_from_publisher_fields=organizations_from_publisher_fields, - date_published=date_published) + self.get_lifecycle_publisher( + lom_base_item_loader=lom, + elastic_item_source=elastic_item_source, + organizations_from_publisher_fields=organizations_from_publisher_fields, + date_published=date_published, + ) if "sourceOrganization" in elastic_item_source: # ToDo: this fallback might no longer be necessary: From 398d48bc0de314b4a44efa26f6ea4b5b075ef95c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 15 Dec 2023 18:37:34 +0100 Subject: [PATCH 416/590] feat: clean up lifecycle 'name' strings before trying to split them into firstName / lastName - remove unnecessary debug message - docs: OEPMS cannot currently be crawled (requests end up in a '403' response) - fix: split names into firstName and lastName only if they're valid strings (and ignore whitespace-only raw strings) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 42eaaecd..db3e0225 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -80,7 +80,7 @@ class OersiSpider(scrapy.Spider, LomBase): "langSci Press", # new provider as of 2023-04-27 "lecture2go (Hamburg)", # new provider as of 2023-12-14 "MIT OpenCourseWare", - "OEPMS", # new provider as of 2023-04-27 + # "OEPMS", # new provider as of 2023-04-27 # ToDo: cannot be crawled "OER Portal Uni Graz", "oncampus", # (temporarily) not available? (2023-12-14) "Open Music Academy", @@ -297,7 +297,6 @@ def elastic_fetch_all_provider_pages(self): if "sort" in last_entry: last_sort_result: list = last_entry.get("sort") if last_sort_result: - logging.info(f"The last_sort_result is {last_sort_result}") has_next_page = True pagination_parameter = last_sort_result else: @@ -775,13 +774,16 @@ def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_ite into two parts: first_name and last_name. Afterward saves the split values to their respective 'lifecycle'-fields or saves the string as a whole. """ + if " " in name_string: + # clean up empty / erroneous whitespace-only strings before trying to split the string + name_string = name_string.strip() if " " in name_string: name_parts = name_string.split(maxsplit=1) first_name = name_parts[0] last_name = name_parts[1] lifecycle_item_loader.add_value("firstName", first_name) lifecycle_item_loader.add_value("lastName", last_name) - else: + elif name_string: lifecycle_item_loader.add_value("firstName", name_string) async def parse(self, response: scrapy.http.Response, **kwargs): From 117a69a6e4062616eb6a6e129e64626df05b8bb1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 15 Dec 2023 21:42:03 +0100 Subject: [PATCH 417/590] fix: thumbnails URLs fail to download when obeying robots.txt directive - we need to explicitly disregard robots.txt files for some specific metadata providers (e.g. DuEPublico), otherwise thumbnail downloads would throw exceptions Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index db3e0225..e69e33f4 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -48,7 +48,9 @@ class OersiSpider(scrapy.Spider, LomBase): "CONCURRENT_REQUESTS_PER_DOMAIN": 6, "DUPEFILTER_DEBUG": True, "WEB_TOOLS": WebEngine.Playwright, + "ROBOTSTXT_OBEY": False, } + # if robots.txt is obeyed, the thumbnail downloads fail on some metadata-providers (e.g., DuEPublico) ELASTIC_PARAMETER_KEEP_ALIVE: str = "1m" # for reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units From 8e8f150a29d112f5adc50d287d381a225732866b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 15 Dec 2023 22:02:01 +0100 Subject: [PATCH 418/590] change: disable crawling of "OpenRub" (because all URLs are 404) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index e69e33f4..ab82faa0 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -90,7 +90,7 @@ class OersiSpider(scrapy.Spider, LomBase): "Opencast Universität Osnabrück", "openHPI", "OpenLearnWare", - "OpenRub", + # "OpenRub", # all OpenRub URLs are deadlinks (as of 2023-12-15) "ORCA.nrw", "Phaidra Uni Wien", "Pressbooks Directory", # new provider as of 2023-12-14 From 2e45dcb20fef81496d99b7c847eee6b627b9beb2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Sun, 17 Dec 2023 16:31:42 +0100 Subject: [PATCH 419/590] change: enable two metadata providers (BC Campus / Finnish Library of OER) - URLs from these providers previously caused problems with Scrapy, but should be less problematic with the recent upgrade to Scrapy v2.11 and oersi_spider v0.1.7 Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index ab82faa0..b1dde823 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -39,7 +39,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.7" # last update: 2023-12-14 + version = "0.1.7" # last update: 2023-12-17 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -62,7 +62,7 @@ class OersiSpider(scrapy.Spider, LomBase): # the provider-filter at https://oersi.org/resources/ shows you which String values can be used as a provider-name # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) ELASTIC_PROVIDERS_TO_CRAWL: list = [ - # "BC Campus", # ToDo: BC Campus website cannot be crawled at the moment, needs further investigation + "BC Campus", # BC Campus website cannot be crawled at the moment, needs further investigation # "ComeIn", # should not be crawled, datasets were exported to OERSI from WLO "detmoldMusicTools", "digiLL", @@ -70,7 +70,7 @@ class OersiSpider(scrapy.Spider, LomBase): "eaDNURT", "eCampusOntario", "eGov-Campus", - # "Finnish Library of Open Educational Resources", # ToDo: URLs of this metadata-provider cannot be resolved + "Finnish Library of Open Educational Resources", # URLs of this metadata-provider cannot be resolved "GitHub", "GitLab", "Helmholtz Codebase", @@ -306,7 +306,7 @@ def elastic_fetch_all_provider_pages(self): break else: logging.info( - f"reached the end of the ElasticSearch results for '{provider_name}' // " + f"Reached the end of the ElasticSearch results for '{provider_name}' // " f"Total amount of items collected (across all metadata-providers): {len(all_items)}" ) break From c0eb051cef55fdc6049aa324e7973105ad04d644 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 20 Dec 2023 17:55:09 +0100 Subject: [PATCH 420/590] oersi_spider v0.1.8 ("offline"-import-mode) - change/perf: the crawler does not make individual requests to websites anymore, which should increase performance and completeness of crawls -- this also means that we cannot detect duplicate URLs during the crawl process anymore: --- if two metadata-providers within OERSI's dataset reference the same item (= same item url), both will be scraped - remove: "continue crawl"-setting (at least until a stable solution can be tested/implemented in the future) - remove: remove crawler-specific fallback to website screenshot if AMB 'image' was not found -- the Thumbnail-Pipeline handles this by itself - code cleanup in regards to "response"-object -- since the crawler does not have a Response to parse, removed all obsolete code fragments that handled 'response.url's etc. Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/oersi_spider.py | 111 +++++++++++------------------- 1 file changed, 41 insertions(+), 70 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index b1dde823..68e57094 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -22,9 +22,8 @@ ResponseItemLoader, ) from converter.spiders.base_classes import LomBase -from converter.util.edu_sharing_precheck import EduSharingPreCheck from converter.util.license_mapper import LicenseMapper -from converter.web_tools import WebEngine, WebTools +from converter.web_tools import WebEngine class OersiSpider(scrapy.Spider, LomBase): @@ -39,7 +38,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.7" # last update: 2023-12-17 + version = "0.1.8" # last update: 2023-12-20 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -90,7 +89,7 @@ class OersiSpider(scrapy.Spider, LomBase): "Opencast Universität Osnabrück", "openHPI", "OpenLearnWare", - # "OpenRub", # all OpenRub URLs are deadlinks (as of 2023-12-15) + "OpenRub", "ORCA.nrw", "Phaidra Uni Wien", "Pressbooks Directory", # new provider as of 2023-12-14 @@ -137,29 +136,15 @@ def __init__(self, **kwargs): logging.info(f"ElasticSearch API response (upon PIT delete): {json_response}") def start_requests(self): + # yield dummy request, so that Scrapy's start_item method requirement is satisfied, + # then use callback method to crawl all items + yield scrapy.Request(url="https://oersi.org", callback=self.handle_collected_elastic_items) + + def handle_collected_elastic_items(self, response: scrapy.http.Response): random.shuffle(self.ELASTIC_ITEMS_ALL) # shuffling the list of ElasticSearch items to improve concurrency and # distribute the load between several target domains. - continue_from_previous_crawl = env.get_bool("CONTINUE_CRAWL", True, False) - # checking if a previously aborted crawl should be completed (by skipping updates of previously collected items) - if continue_from_previous_crawl: - # ToDo: for time-stable results this feature needs to be reworked: uuids need to be used to keep consistent - # results across longer crawling processes - es_id_collector = EduSharingPreCheck() - previously_crawled_replication_source_ids: list[str] = es_id_collector.get_replication_source_id_list() - for elastic_item in self.ELASTIC_ITEMS_ALL: - elastic_item_identifier: str = elastic_item["_id"] - if elastic_item_identifier in previously_crawled_replication_source_ids: - logging.debug( - f"Found Elastic item '_id': {elastic_item_identifier} within previously crawled " - f"results in the edu-sharing repository. Skipping item because '.env'-setting " - f"'CONTINUE_CRAWL' is enabled." - ) - continue - else: - yield from self.check_item_and_yield_to_parse_method(elastic_item) - else: - for elastic_item in self.ELASTIC_ITEMS_ALL: - yield from self.check_item_and_yield_to_parse_method(elastic_item) + for elastic_item in self.ELASTIC_ITEMS_ALL: + yield from self.check_item_and_yield_to_parse_method(elastic_item) def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Request | None: """ @@ -182,8 +167,11 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req ): if not self.hasChanged(None, elastic_item=elastic_item): return None + # ToDo: implement crawling mode toggle? + # (online) crawl vs. "offline"-import (without making requests to the item urls) # by omitting the callback parameter, individual requests are yielded to the parse-method - yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}, dont_filter=True) + # yield scrapy.Request(url=item_url, cb_kwargs={"elastic_item": elastic_item}, dont_filter=True) + yield from self.parse(elastic_item=elastic_item) def elastic_pit_create(self) -> dict: """ @@ -788,7 +776,7 @@ def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_ite elif name_string: lifecycle_item_loader.add_value("firstName", name_string) - async def parse(self, response: scrapy.http.Response, **kwargs): + def parse(self, response=None, **kwargs): elastic_item: dict = kwargs.get("elastic_item") elastic_item_source: dict = elastic_item.get("_source") # _source is the original JSON body passed for the document at index time @@ -839,7 +827,14 @@ async def parse(self, response: scrapy.http.Response, **kwargs): base.add_value("sourceId", self.getId(response, elastic_item=elastic_item)) base.add_value("hash", self.getHash(response, elastic_item_source=elastic_item_source)) - thumbnail_url = str() + try: + thumbnail_url: str = elastic_item_source.get("image") + # see: https://dini-ag-kim.github.io/amb/draft/#image + if thumbnail_url: + base.add_value("thumbnail", thumbnail_url) + except KeyError: + logging.debug(f"OERSI Item {elastic_item['_id']} " + f"(name: {elastic_item_source['name']}) did not provide a thumbnail.") if "image" in elastic_item_source: thumbnail_url = elastic_item_source.get("image") # thumbnail if thumbnail_url: @@ -848,7 +843,6 @@ async def parse(self, response: scrapy.http.Response, **kwargs): # every item gets sorted into a //-subfolder to make QA more feasable base.add_value("origin", provider_name) - general.add_value("identifier", response.url) if "keywords" in elastic_item_source: keywords: list = elastic_item_source.get("keywords") if keywords: @@ -871,19 +865,16 @@ async def parse(self, response: scrapy.http.Response, **kwargs): lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() - identifier_url: str = str() - if "id" in elastic_item_source: - identifier_url: str = elastic_item_source.get("id") # this URL is REQUIRED and should always be available + try: + identifier_url: str = self.get_item_url(elastic_item=elastic_item) + # this URL is REQUIRED and should always be available # see https://dini-ag-kim.github.io/amb/draft/#id - if identifier_url: - general.replace_value("identifier", identifier_url) - technical.add_value("location", identifier_url) - if identifier_url != response.url: - # the identifier_url should be more stable/robust than the (resolved) response.url in the long run, - # so we will save both URLs in case the resolved URL is different - technical.add_value("location", response.url) - elif not identifier_url: - technical.add_value("location", response.url) + except KeyError: + logging.warning(f"Item {elastic_item['_id']} did not have an item URL (AMB 'id' was missing)!") + return + if identifier_url: + general.replace_value("identifier", identifier_url) + technical.add_value("location", identifier_url) lom.add_value("technical", technical.load_item()) organizations_from_affiliation_fields: set[str] = set() @@ -916,13 +907,14 @@ async def parse(self, response: scrapy.http.Response, **kwargs): ) if "sourceOrganization" in elastic_item_source: - # ToDo: this fallback might no longer be necessary: - self.get_lifecycle_organization_from_source_organization_fallback( - elastic_item_source=elastic_item_source, - lom_item_loader=lom, - organization_fallback=organizations_from_affiliation_fields, - ) - # ToDo: WLO-BIRD-Connector v2 REQUIREMENT: + # # ToDo: this fallback might no longer be necessary: + # self.get_lifecycle_organization_from_source_organization_fallback( + # elastic_item_source=elastic_item_source, + # lom_item_loader=lom, + # organization_fallback=organizations_from_affiliation_fields, + # ) + + # WLO-BIRD-Connector v2 REQUIREMENT: # 'sourceOrganization' -> 'ccm:lifecyclecontributer_publisher' self.get_lifecycle_publisher_from_source_organization( lom_item_loader=lom, @@ -1119,28 +1111,7 @@ async def parse(self, response: scrapy.http.Response, **kwargs): base.add_value("permissions", permissions.load_item()) response_loader = ResponseItemLoader() - # ToDo: skip the scrapy.Request altogether? (-> would be a huge time benefit) - response_loader.add_value("status", response.status) - if not thumbnail_url: - # only use the headless browser if we need to take a website screenshot, otherwise skip this (expensive) - # part of the program flow completely - url_data = await WebTools.getUrlData(url=response.url, engine=WebEngine.Playwright) - if "html" in url_data: - response_loader.add_value("html", url_data["html"]) - if "text" in url_data: - response_loader.add_value("text", url_data["text"]) - if "cookies" in url_data: - response_loader.add_value("cookies", url_data["cookies"]) - if "har" in url_data: - response_loader.add_value("har", url_data["har"]) - if not thumbnail_url and "screenshot_bytes" in url_data: - # if a thumbnail was provided, use that first - otherwise try to use Playwright website screenshot - # ToDo: optional feature - control which thumbnail is used, depending on the metadata-provider? - # metadata-provider 'Open Music Academy' serves generic thumbnails, which is why a screenshot of the - # website will always be more interesting to users than the same generic image across ~650 materials - base.add_value("screenshot_bytes", url_data["screenshot_bytes"]) - response_loader.add_value("headers", response.headers) - response_loader.add_value("url", response.url) + response_loader.add_value("url", identifier_url) base.add_value("response", response_loader.load_item()) yield base.load_item() From 49b9cd0bb63a57f1c4edff42dc6086e587715645 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Dec 2023 14:57:44 +0100 Subject: [PATCH 421/590] debug: use class-based logger instead of 'root'-logger - to increase readability of debug logs Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 64 ++++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 31 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 27d4f97b..8875277b 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -26,6 +26,8 @@ from edu_sharing_client.configuration import Configuration from edu_sharing_client.rest import ApiException +log = logging.getLogger(__name__) + class EduSharingConstants: HOME = "-home-" @@ -115,7 +117,7 @@ class CreateGroupType(Enum): def __init__(self): cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) if cookie_threshold: - logging.info("Setting COOKIE_REBUILD_THRESHOLD to " + str(cookie_threshold) + " seconds") + log.info("Setting COOKIE_REBUILD_THRESHOLD to " + str(cookie_threshold) + " seconds") self.COOKIE_REBUILD_THRESHOLD = cookie_threshold self.enabled = env.get("MODE", default="edu-sharing") == "edu-sharing" if self.enabled: @@ -149,12 +151,12 @@ def sync_node(self, spider, type, properties): try: json_error: dict = json.loads(e.body) if json_error["error"] == "java.lang.IllegalStateException": - logging.warning( + log.warning( "Node '" + properties["cm:name"][0] + "' probably blocked for sync: " + json_error["message"] ) return None except json.JSONDecodeError: - logging.error( + log.error( f"ES_CONNECTOR: edu-sharing ApiException 'body'-attribute was't a deserializable JSON " f"String for item '{properties['cm:name'][0]}' " f"(replicationsourceid: '{properties['ccm:replicationsourceid']}'). " @@ -198,7 +200,7 @@ def set_permissions(self, uuid, permissions) -> bool: async def set_node_binary_data(self, uuid, item) -> bool: if "binary" in item: - logging.info( + log.info( get_project_settings().get("EDU_SHARING_BASE_URL") + "rest/node/v1/nodes/-home-/" + uuid @@ -237,7 +239,7 @@ async def set_node_preview(self, uuid, item) -> bool: ) return response.status_code == 200 else: - logging.warning("No thumbnail provided for " + uuid) + log.warning("No thumbnail provided for " + uuid) def map_license(self, spaces, license): if "url" in license: @@ -312,7 +314,7 @@ def map_license(self, spaces, license): case Constants.LICENSE_PDM: spaces["ccm:commonlicense_key"] = "PDM" case _: - logging.warning( + log.warning( f"License.url {license['url']} could not be mapped to a license from Constants.\n" f"If you are sure that you provided a correct URL to a license, " f"please check if the license-mapping within es_connector.py is up-to-date." @@ -326,7 +328,7 @@ def map_license(self, spaces, license): if "description" in license: spaces["cclom:rights_description"] = license["description"] case _: - logging.warning( + log.warning( f"Received a value for license['internal'] that is not recognized by es_connector. " f"Please double-check if the provided value {license['internal']} is correctly " f"mapped within Constants AND es_connector." @@ -383,7 +385,7 @@ def transform_item(self, uuid, spider, item): # edusharing requires milliseconds duration = int(float(duration) * 1000) except: - logging.debug( + log.debug( f"The supplied 'technical.duration'-value {duration} could not be converted from " f"seconds to milliseconds. ('cclom:duration' expects ms)" ) @@ -395,7 +397,7 @@ def transform_item(self, uuid, spider, item): if "role" not in person: continue if not person["role"].lower() in EduSharingConstants.LIFECYCLE_ROLES_MAPPING: - logging.warning( + log.warning( "The lifecycle role " + person["role"] + " is currently not supported by the edu-sharing connector" @@ -498,9 +500,9 @@ def transform_item(self, uuid, spider, item): if mdsId != "default": spaces["cm:edu_metadataset"] = mdsId spaces["cm:edu_forcemetadataset"] = "true" - logging.debug("Using metadataset " + mdsId) + log.debug("Using metadataset " + mdsId) else: - logging.debug("Using default metadataset") + log.debug("Using default metadataset") for key in spaces: if type(spaces[key]) is tuple: @@ -517,16 +519,16 @@ def create_groups_if_not_exists(self, groups, type: CreateGroupType): else: uuid = EduSharingConstants.GROUP_PREFIX + group if uuid in EduSharing.groupCache: - logging.debug("Group " + uuid + " is existing in cache, no need to create") + log.debug("Group " + uuid + " is existing in cache, no need to create") continue - logging.debug("Group " + uuid + " is not in cache, checking consistency...") + log.debug("Group " + uuid + " is not in cache, checking consistency...") try: group = EduSharing.iamApi.get_group(EduSharingConstants.HOME, uuid) - logging.info("Group " + uuid + " was found in edu-sharing (cache inconsistency), no need to create") + log.info("Group " + uuid + " was found in edu-sharing (cache inconsistency), no need to create") EduSharing.groupCache.append(uuid) continue except ApiException as e: - logging.info("Group " + uuid + " was not found in edu-sharing, creating it") + log.info("Group " + uuid + " was not found in edu-sharing, creating it") pass if type == EduSharing.CreateGroupType.MediaCenter: @@ -542,7 +544,7 @@ def create_groups_if_not_exists(self, groups, type: CreateGroupType): def set_node_permissions(self, uuid, item): if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is False: - logging.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") + log.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") return if "permissions" in item: permissions = { @@ -552,7 +554,7 @@ def set_node_permissions(self, uuid, item): public = item["permissions"]["public"] if public is True: if "groups" in item["permissions"] or "mediacenters" in item["permissions"]: - logging.error( + log.error( "Invalid state detected: Permissions public is set to true but groups or mediacenters are also set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!" ) return @@ -571,7 +573,7 @@ def set_node_permissions(self, uuid, item): else: # Makes not much sense, may no permissions at all should be set # if not 'groups' in item['permissions'] and not 'mediacenters' in item['permissions']: - # logging.error('Invalid state detected: Permissions public is set to false but neither groups or mediacenters are set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!') + # log.error('Invalid state detected: Permissions public is set to false but neither groups or mediacenters are set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!') # return mergedGroups = [] if "groups" in item["permissions"]: @@ -617,10 +619,10 @@ def set_node_permissions(self, uuid, item): } ) if not self.set_permissions(uuid, permissions): - logging.error( + log.error( "Failed to set permissions, please check that the given groups/mediacenters are existing in the repository or set the autoCreate mode to true" ) - logging.error(item["permissions"]) + log.error(item["permissions"]) async def insert_item(self, spider, uuid, item): async with self._sem: @@ -637,7 +639,7 @@ async def update_item(self, spider, uuid, item): @staticmethod def init_cookie(): - logging.debug("Init edu sharing cookie...") + log.debug("Init edu sharing cookie...") settings = get_project_settings() auth = requests.get( settings.get("EDU_SHARING_BASE_URL") + "rest/authentication/v1/validateSession", @@ -648,7 +650,7 @@ def init_cookie(): headers={"Accept": "application/json"}, ) isAdmin = json.loads(auth.text)["isAdmin"] - logging.info("Got edu sharing cookie, admin status: " + str(isAdmin)) + log.info("Got edu sharing cookie, admin status: " + str(isAdmin)) if isAdmin: cookies = [] for cookie in auth.headers["SET-COOKIE"].split(","): @@ -695,7 +697,7 @@ def init_api_client(self): ): raise Exception(f"Given repository api version is unsupported: " + version_str) else: - logging.info("Detected edu-sharing bulk api with version " + version_str) + log.info("Detected edu-sharing bulk api with version " + version_str) if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is True: EduSharing.groupCache = list( map( @@ -703,11 +705,11 @@ def init_api_client(self): EduSharing.iamApi.search_groups(EduSharingConstants.HOME, "", max_items=1000000)["groups"], ) ) - logging.debug("Built up edu-sharing group cache: {}".format(EduSharing.groupCache)) + log.debug("Built up edu-sharing group cache: {}".format(EduSharing.groupCache)) return else: return - logging.warning(auth.text) + log.warning(auth.text) raise Exception( "Could not authentify as admin at edu-sharing. Please check your settings for repository " + settings.get("EDU_SHARING_BASE_URL") @@ -741,12 +743,12 @@ def find_item(self, id, spider): if e.status == 401: # Typically happens when the edu-sharing session cookie is lost and needs to be renegotiated. # (edu-sharing error-message: "Admin rights are required for this endpoint") - logging.info( + log.info( f"ES_CONNECTOR: edu-sharing returned HTTP-statuscode {e.status} for (replicationsourceid " f"'{id}')." ) - logging.debug(f"(HTTP-Body: '{e.body}\n')" f"Reason: {e.reason}\n" f"HTTP Headers: {e.headers}") - logging.info("ES_CONNECTOR: Re-initializing edu-sharing API Client...") + log.debug(f"(HTTP-Body: '{e.body}\n')" f"Reason: {e.reason}\n" f"HTTP Headers: {e.headers}") + log.info("ES_CONNECTOR: Re-initializing edu-sharing API Client...") self.init_api_client() return None if e.status == 404: @@ -757,12 +759,12 @@ def find_item(self, id, spider): # when there is no already existing node in the edu-sharing repository, edu-sharing returns # a "DAOMissingException". The following debug message is commented out to reduce log-spam: # error_message: str = error_dict["message"] - # logging.debug(f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode 404 " + # log.debug(f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode 404 " # f"('{error_message}') for\n '{id}'. \n(This typically means that there was no " # f"existing node in the edu-sharing repository. Continuing...)") return None else: - logging.debug( + log.debug( f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode {e.status} " f"(replicationsourceid '{id}'):\n" f"HTTP Body: {e.body}\n" @@ -770,7 +772,7 @@ def find_item(self, id, spider): ) return None except json.JSONDecodeError: - logging.debug( + log.debug( f"ES_CONNECTOR 'find_item': edu-sharing returned HTTP-statuscode {e.status} " f"(replicationsourceid '{id}'):\n" f"HTTP Body: {e.body}\n" From d40cab97f4ecccd6486d1d5de56583d2b9b68c36 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Dec 2023 22:06:18 +0100 Subject: [PATCH 422/590] build: update Pillow to 10.1.0 --- poetry.lock | 114 +++++++++++++++++++++++------------------------ pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 58 insertions(+), 60 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9dddab5a..e8e2dd26 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1224,67 +1224,65 @@ files = [ [[package]] name = "pillow" -version = "10.0.0" +version = "10.1.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f62406a884ae75fb2f818694469519fb685cc7eaff05d3451a9ebe55c646891"}, - {file = "Pillow-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5db32e2a6ccbb3d34d87c87b432959e0db29755727afb37290e10f6e8e62614"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf4392b77bdc81f36e92d3a07a5cd072f90253197f4a52a55a8cec48a12483b"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:520f2a520dc040512699f20fa1c363eed506e94248d71f85412b625026f6142c"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:8c11160913e3dd06c8ffdb5f233a4f254cb449f4dfc0f8f4549eda9e542c93d1"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a74ba0c356aaa3bb8e3eb79606a87669e7ec6444be352870623025d75a14a2bf"}, - {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d0dae4cfd56969d23d94dc8e89fb6a217be461c69090768227beb8ed28c0a3"}, - {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22c10cc517668d44b211717fd9775799ccec4124b9a7f7b3635fc5386e584992"}, - {file = "Pillow-10.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dffe31a7f47b603318c609f378ebcd57f1554a3a6a8effbc59c3c69f804296de"}, - {file = "Pillow-10.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:9fb218c8a12e51d7ead2a7c9e101a04982237d4855716af2e9499306728fb485"}, - {file = "Pillow-10.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d35e3c8d9b1268cbf5d3670285feb3528f6680420eafe35cccc686b73c1e330f"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ed64f9ca2f0a95411e88a4efbd7a29e5ce2cea36072c53dd9d26d9c76f753b3"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6eb5502f45a60a3f411c63187db83a3d3107887ad0d036c13ce836f8a36f1d"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c1fbe7621c167ecaa38ad29643d77a9ce7311583761abf7836e1510c580bf3dd"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cd25d2a9d2b36fcb318882481367956d2cf91329f6892fe5d385c346c0649629"}, - {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, - {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, - {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, - {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, - {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, - {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce543ed15570eedbb85df19b0a1a7314a9c8141a36ce089c0a894adbfccb4568"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:685ac03cc4ed5ebc15ad5c23bc555d68a87777586d970c2c3e216619a5476223"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d72e2ecc68a942e8cf9739619b7f408cc7b272b279b56b2c83c6123fcfa5cdff"}, - {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, - {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, - {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, - {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, - {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, - {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f07ea8d2f827d7d2a49ecf1639ec02d75ffd1b88dcc5b3a61bbb37a8759ad8d"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:040586f7d37b34547153fa383f7f9aed68b738992380ac911447bb78f2abe530"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f88a0b92277de8e3ca715a0d79d68dc82807457dae3ab8699c758f07c20b3c51"}, - {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c7cf14a27b0d6adfaebb3ae4153f1e516df54e47e42dcc073d7b3d76111a8d86"}, - {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3400aae60685b06bb96f99a21e1ada7bc7a413d5f49bce739828ecd9391bb8f7"}, - {file = "Pillow-10.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbc02381779d412145331789b40cc7b11fdf449e5d94f6bc0b080db0a56ea3f0"}, - {file = "Pillow-10.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9211e7ad69d7c9401cfc0e23d49b69ca65ddd898976d660a2fa5904e3d7a9baa"}, - {file = "Pillow-10.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:faaf07ea35355b01a35cb442dd950d8f1bb5b040a7787791a535de13db15ed90"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f72a021fbb792ce98306ffb0c348b3c9cb967dce0f12a49aa4c3d3fdefa967"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f7c16705f44e0504a3a2a14197c1f0b32a95731d251777dcb060aa83022cb2d"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:76edb0a1fa2b4745fb0c99fb9fb98f8b180a1bbceb8be49b087e0b21867e77d3"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:368ab3dfb5f49e312231b6f27b8820c823652b7cd29cfbd34090565a015e99ba"}, - {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:608bfdee0d57cf297d32bcbb3c728dc1da0907519d1784962c5f0c68bb93e5a3"}, - {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c6e3df6bdd396749bafd45314871b3d0af81ff935b2d188385e970052091017"}, - {file = "Pillow-10.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:7be600823e4c8631b74e4a0d38384c73f680e6105a7d3c6824fcf226c178c7e6"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:92be919bbc9f7d09f7ae343c38f5bb21c973d2576c1d45600fce4b74bafa7ac0"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8182b523b2289f7c415f589118228d30ac8c355baa2f3194ced084dac2dbba"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:38250a349b6b390ee6047a62c086d3817ac69022c127f8a5dc058c31ccef17f3"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88af2003543cc40c80f6fca01411892ec52b11021b3dc22ec3bc9d5afd1c5334"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c189af0545965fa8d3b9613cfdb0cd37f9d71349e0f7750e1fd704648d475ed2"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7b031a6fc11365970e6a5686d7ba8c63e4c1cf1ea143811acbb524295eabed"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db24668940f82321e746773a4bc617bfac06ec831e5c88b643f91f122a785684"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:efe8c0681042536e0d06c11f48cebe759707c9e9abf880ee213541c5b46c5bf3"}, - {file = "Pillow-10.0.0.tar.gz", hash = "sha256:9c82b5b3e043c7af0d95792d0d20ccf68f61a1fec6b3530e718b688422727396"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, + {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, + {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, + {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, + {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, + {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, + {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, ] [package.extras] @@ -2182,4 +2180,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "bfab694eeb1b66b9383643eabb65947057108c2c884e73bf01f210fdf54e784e" +content-hash = "706d900dfbb99c202dd614add1dc3a5898d3da3af3d5e602660f2dd25e3a041e" diff --git a/pyproject.toml b/pyproject.toml index 61904fe4..2f3e5c8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,7 @@ itemloaders="1.1.0" isodate="0.6.1" lxml="4.9.3" overrides="3.1.0" -Pillow="10.0.0" +Pillow="10.1.0" playwright="1.40" pyOpenSSL="23.3.0" pytest="^7.4.3" diff --git a/requirements.txt b/requirements.txt index d9db2647..d121d20e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -52,7 +52,7 @@ overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" packaging==23.2 ; python_version >= "3.10" and python_version < "4.0" parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" pathspec==0.11.2 ; python_version >= "3.10" and python_version < "4.0" -pillow==10.0.0 ; python_version >= "3.10" and python_version < "4.0" +pillow==10.1.0 ; python_version >= "3.10" and python_version < "4.0" platformdirs==4.0.0 ; python_version >= "3.10" and python_version < "4.0" playwright==1.40.0 ; python_version >= "3.10" and python_version < "4.0" pluggy==1.3.0 ; python_version >= "3.10" and python_version < "4.0" From 32e154259d006e58f11b9f6f2783a2d9471c3778 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Dec 2023 22:39:12 +0100 Subject: [PATCH 423/590] change: Thumbnail fallback, Thumbnail URL handling for PNGs - fix: falling back to Splash/Playwright Thumnbnails failed when items did not have "technical.format" information -- in case of OERSI and SODIX items we might not have a valid "technical.format" value readily available for every item and hard-coding "text/html" for each item would make problems worse (causing Exceptions and items to be dropped) -- by moving the responsibility of URL-checking to WebTools, we don't have to hard-code "text/html" to each item anymore - change/fix: Warnings caused by PNG files with "RGBA"-palette present -- PNGs with "RGBA"-palettes present caused thumbnails to not be rendered correctly, leading to Exceptions and dropped items -- change: if a PNG image is detected after downloading, it will not be converted to JPEG anymore (instead we stick to the PNG format for these files) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 66 ++++++++++++++++++++++++++---------------- 1 file changed, 41 insertions(+), 25 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index cbde4a49..60958c4d 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -449,8 +449,6 @@ async def process_item(self, raw_item, spider): elif ( "location" in item["lom"]["technical"] and len(item["lom"]["technical"]["location"]) > 0 - and "format" in item["lom"]["technical"] - and item["lom"]["technical"]["format"] == "text/html" ): if settings_crawler.get("SPLASH_URL") and web_tools == WebEngine.Splash: target_url: str = item["lom"]["technical"]["location"][0] @@ -539,6 +537,7 @@ async def process_item(self, raw_item, spider): except Exception as e: if url is not None: log.warning(f"Could not read thumbnail at {url}: {str(e)} (falling back to screenshot)") + raise e if "thumbnail" in item: del item["thumbnail"] return await self.process_item(raw_item, spider) @@ -580,29 +579,46 @@ async def download_thumbnail_url(self, url: str, spider: scrapy.Spider): # override the project settings with the given ones from the current spider # see PR 56 for details - def create_thumbnails_from_image_bytes(self, image, item, settings): - small = BytesIO() - self.scale_image(image, settings.get("THUMBNAIL_SMALL_SIZE")).save( - small, - "JPEG", - mode="RGB", - quality=settings.get("THUMBNAIL_SMALL_QUALITY"), - ) - large = BytesIO() - self.scale_image(image, settings.get("THUMBNAIL_LARGE_SIZE")).save( - large, - "JPEG", - mode="RGB", - quality=settings.get("THUMBNAIL_LARGE_QUALITY"), - ) - item["thumbnail"] = {} - item["thumbnail"]["mimetype"] = "image/jpeg" - item["thumbnail"]["small"] = base64.b64encode( - small.getvalue() - ).decode() - item["thumbnail"]["large"] = base64.b64encode( - large.getvalue() - ).decode() + def create_thumbnails_from_image_bytes(self, image: Image.Image, item, settings): + small_buffer: BytesIO = BytesIO() + large_buffer: BytesIO = BytesIO() + if image.format == "PNG": + # PNG images with image.mode == "RGBA" cannot be converted cleanly to JPEG, + # which is why we're handling PNGs separately + small_copy = image.copy() + large_copy = image.copy() + # Pillow modifies the image object in place -> remember to use the correct copy + small_copy.thumbnail(size=(250, 250)) + large_copy.thumbnail(size=(800, 800)) + # ToDo: + # Rework settings.py thumbnail config to retrieve values as width & height instead of sum(int) + small_copy.save(small_buffer, format="PNG") + large_copy.save(large_buffer, format="PNG") + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = "image/png" + item["thumbnail"]["small"] = base64.b64encode(large_buffer.getvalue()).decode() + item["thumbnail"]["large"] = base64.b64encode(large_buffer.getvalue()).decode() + else: + self.scale_image(image, settings.get("THUMBNAIL_SMALL_SIZE")).save( + small_buffer, + "JPEG", + mode="RGB", + quality=settings.get("THUMBNAIL_SMALL_QUALITY"), + ) + self.scale_image(image, settings.get("THUMBNAIL_LARGE_SIZE")).save( + large_buffer, + "JPEG", + mode="RGB", + quality=settings.get("THUMBNAIL_LARGE_QUALITY"), + ) + item["thumbnail"] = {} + item["thumbnail"]["mimetype"] = "image/jpeg" + item["thumbnail"]["small"] = base64.b64encode( + small_buffer.getvalue() + ).decode() + item["thumbnail"]["large"] = base64.b64encode( + large_buffer.getvalue() + ).decode() def get_settings_for_crawler(spider) -> scrapy.settings.Settings: From 45dda08f14000ca11314abc91fc48161eeec23a2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Dec 2023 22:44:03 +0100 Subject: [PATCH 424/590] feat: WebTools file extension check before parsing - the Thumbnail Pipeline uses "technical.location" URLs as a fallback for website screenshots, but oftentimes (especially in the case of OERSI and SODIX) URLs are sent to the headless browser which cannot be rendered, either causing long timeouts or Exceptions - a filetype excluson list is used to (rudimentarily) check URLs for problematic file types -- ToDo: checking for these types needs to be improved and fine-tuned, either via RegExp or preliminary HTTP Requests (method: HEADER only) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 85 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 82 insertions(+), 3 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index 9fcd1dd6..fe97bdc7 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -12,14 +12,63 @@ from converter import env log = logging.getLogger(__name__) -logging.getLogger('trafilatura').setLevel(logging.INFO) # trafilatura is quite spammy +logging.getLogger("trafilatura").setLevel(logging.INFO) # trafilatura is quite spammy + +ignored_file_extensions: list[str] = [ + # file extensions that cause unexpected behavior when trying to render them with a headless browser + ".aac", + ".avi", + ".bin", + ".bmp", + ".bz", + ".cda", + ".csv", + ".doc", + ".docx", + ".epub", + ".gz", + ".mbz", + ".mid", + ".midi", + ".mp3", + ".mp4", + ".mpeg", + ".mpkg", + ".odp", + ".ods", + ".odt", + ".oga", + ".ogx", + ".opus", + ".otf", + ".pdf", + ".pptx", + ".rar", + ".rtf", + ".sh", + ".tar", + ".ts", + ".ttf", + ".txt", + ".vsd", + ".wav", + ".weba", + ".webm", + ".webp", + ".xls", + ".xlsx", + ".zip", + ".3gp", + ".3g2", + ".7z", +] class WebEngine(Enum): # Splash (default engine) - Splash = 'splash', + Splash = "splash" # Playwright is controlling a headless Chrome browser - Playwright = 'playwright' + Playwright = "playwright" class WebTools: @@ -38,8 +87,38 @@ async def __safely_get_playwright_response(cls, url: str): async with cls._sem_playwright: return await WebTools.__getUrlDataPlaywright(url) + @classmethod + def url_cant_be_rendered_by_headless_browsers(cls, url: str) -> bool: + # ToDo: + # - extend the list of problematic file extensions as they occur during debugging + # - implement check for parametrized URLs (e.g. "/image.png?token=..." and other edge-cases + if isinstance(url, str) and url: + # checking if the provided URL is actually a string + for file_extension in ignored_file_extensions: + if url.endswith(file_extension): + log.warning( + f"Problematic file extension {file_extension} detected in URL {url} ! " + f"Headless browsers can't render this file type." + ) + return True + else: + log.debug(f"URL {url} does not appear to be a string value. WebTools REQUIRE an URL string.") + return False + @classmethod async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Playwright): + url_contains_problematic_file_extension: bool = cls.url_cant_be_rendered_by_headless_browsers(url=url) + if url_contains_problematic_file_extension: + # most binary files cannot be rendered by Playwright or Splash and would cause unexpected behavior in the + # Thumbnail Pipeline + # ToDo: handle websites that redirect to binary downloads gracefully + # - maybe by checking the MIME-Type in response headers first? + log.warning( + f"File extension in URL {url} detected which cannot be rendered by headless browsers. " + f"Skipping WebTools rendering for this url..." + ) + return + if engine == WebEngine.Splash: return await cls.__safely_get_splash_response(url) elif engine == WebEngine.Playwright: From c20c333d0245bc34c91589552feceff9eef49964 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 22 Dec 2023 14:49:56 +0100 Subject: [PATCH 425/590] change: use Playwright in LomBase as default Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/lom_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 9ee6a105..e9e036bb 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -23,7 +23,7 @@ class LomBase: # you can specify custom settings which will later influence the behaviour of the pipelines for your crawler custom_settings = settings.BaseSettings({ # web tools to use, relevant for screenshots/thumbnails - "WEB_TOOLS": WebEngine.Splash, + "WEB_TOOLS": WebEngine.Playwright, }, 'spider') def __init__(self, **kwargs): From d705cc785e83e41919ef55d96b749518d25d3222 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 16 Jan 2024 14:38:23 +0100 Subject: [PATCH 426/590] docs: DocStrings, explanations regarding Semaphore usage Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index fe97bdc7..e2c1da29 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -74,21 +74,43 @@ class WebEngine(Enum): class WebTools: _sem_splash: Semaphore = Semaphore(10) _sem_playwright: Semaphore = Semaphore(10) + # reminder: if you increase this Semaphore value, you NEED to change the "browserless v2"-docker-container + # configuration accordingly! (e.g., by increasing the MAX_CONCURRENT_SESSIONS and MAX_QUEUE_LENGTH configuration + # settings, see: https://www.browserless.io/docs/docker) @classmethod async def __safely_get_splash_response(cls, url: str): - # ToDo: Docs + """Send a URL string to the Splash container for HTTP / Screenshot rendering if a Semaphore can be acquired. + + (The Semaphore is used to control / throttle the number of concurrent pending requests to the Splash container, + which is necessary because Splash can only handle a specific number of connections at the same time.) + """ async with cls._sem_splash: return await WebTools.__getUrlDataSplash(url) @classmethod async def __safely_get_playwright_response(cls, url: str): - # ToDo: Docs + """Send a URL string to the Playwright container ("browserless v2") for HTTP / Screenshot rendering if a + Semaphore can be acquired. + + (The Semaphore is used to control / throttle the number of concurrent pending requests to the Playwright + container, which is necessary because Playwright only allows a specific number of connections / requests in the + queue at the same time. + browserless v2 defaults to: 5 concurrent requests // 5 requests in the queue + => Semaphore value of 10 should guarantee that neither the crawler nor the pipelines make more requests than the + container is able to handle.) + + For details, see: + https://www.browserless.io/docs/docker#max-concurrent-sessions + https://www.browserless.io/docs/docker#max-queue-length + """ async with cls._sem_playwright: return await WebTools.__getUrlDataPlaywright(url) @classmethod def url_cant_be_rendered_by_headless_browsers(cls, url: str) -> bool: + """Rudimentary check for problematic file extensions within a provided URL string. + Returns True if a problematic extension was detected.""" # ToDo: # - extend the list of problematic file extensions as they occur during debugging # - implement check for parametrized URLs (e.g. "/image.png?token=..." and other edge-cases @@ -149,7 +171,6 @@ async def __getUrlDataSplash(url: str): # html = None if settings.get("SPLASH_URL") and not url.endswith(".pdf") and not url.endswith(".docx"): # Splash can't handle some binary direct-links (Splash will throw "LUA Error 400: Bad Request" as a result) - # ToDo: which additional filetypes need to be added to the exclusion list? - media files (.mp3, mp4 etc.?) async with httpx.AsyncClient() as client: result = await client.post( settings.get("SPLASH_URL") + "/render.json", From 2b885a1bbf5038d7414d0285914bfb0ce3665f92 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Jan 2024 19:32:30 +0100 Subject: [PATCH 427/590] feat: improve Error-Handling for broken image files ("PIL.UnidentifiedImageError") - during debugging it was observed that RPI-virtuell served over 700 image files which could not be read / opened by the Thumbnail-Pipeline, thereby causing Exceptions and dropped items -- if an "UnidentifiedImageError" occurs during thumbnail downloads, fallback to a website-screenshot and log a warning Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 60958c4d..43d15684 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -17,6 +17,7 @@ from io import BytesIO from typing import BinaryIO, TextIO, Optional +import PIL import dateparser import dateutil.parser import isodate @@ -534,6 +535,13 @@ async def process_item(self, raw_item, spider): else: img = Image.open(BytesIO(response.body)) self.create_thumbnails_from_image_bytes(img, item, settings_crawler) + except PIL.UnidentifiedImageError: + # this error can be observed when a website serves broken / malformed images + if url: + log.warning(f"Thumbnail download of image file {url} failed: image file could not be identified " + f"(Image might be broken or corrupt). Falling back to website-screenshot.") + del item["thumbnail"] + return await self.process_item(raw_item, spider) except Exception as e: if url is not None: log.warning(f"Could not read thumbnail at {url}: {str(e)} (falling back to screenshot)") From 566ea4830bab9a61952b1211235492bc09f03e2a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 24 Jan 2024 11:56:49 +0100 Subject: [PATCH 428/590] rpi_virtuell_spider v0.0.9 - fix: edge-case (AttributeError) handling for 'educational.typicalAgeRange' when encountering empty / non-valid strings in "material_altersstufe" - fix: getId() and getHash() implementation -- the old implementation did not properly skip items which didn't warrant an update - style: code formatting via black Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/rpi_virtuell_spider.py | 220 ++++++++++++----------- 1 file changed, 120 insertions(+), 100 deletions(-) diff --git a/converter/spiders/rpi_virtuell_spider.py b/converter/spiders/rpi_virtuell_spider.py index c3588cc7..7eda8ee0 100644 --- a/converter/spiders/rpi_virtuell_spider.py +++ b/converter/spiders/rpi_virtuell_spider.py @@ -1,3 +1,4 @@ +import logging import re from typing import Optional @@ -6,124 +7,144 @@ from scrapy.spiders import CrawlSpider from converter.constants import Constants -from converter.items import LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, \ - BaseItemLoader, LomAgeRangeItemLoader +from converter.items import ( + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + ResponseItemLoader, + BaseItemLoader, + LomAgeRangeItemLoader, +) from converter.spiders.base_classes import LomBase from converter.web_tools import WebEngine +logger = logging.getLogger(__name__) + class RpiVirtuellSpider(CrawlSpider, LomBase): """ scrapes materials from https://material.rpi-virtuell.de via wp-json API: https://material.rpi-virtuell.de/wp-json/ """ + name = "rpi_virtuell_spider" friendlyName = "rpi-virtuell" - start_urls = ['https://material.rpi-virtuell.de/wp-json/mymaterial/v1/material/'] + start_urls = ["https://material.rpi-virtuell.de/wp-json/mymaterial/v1/material/"] - version = "0.0.8" # last update: 2023-12-08 + version = "0.0.9" # last update: 2024-01-24 custom_settings = { - 'ROBOTSTXT_OBEY': False, - 'AUTOTHROTTLE_ENABLED': True, - 'AUTOTHROTTLE_DEBUG': True, - 'AUTOTHROTTLE_TARGET_CONCURRENCY': 12, - 'CONCURRENT_REQUESTS_PER_DOMAIN': 6, - 'WEB_TOOLS': WebEngine.Playwright, + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 12, + "CONCURRENT_REQUESTS_PER_DOMAIN": 6, + "WEB_TOOLS": WebEngine.Playwright, # 'DUPEFILTER_DEBUG': True } wp_json_pagination_parameters = { # wp-json API returns up to 100 records per request, with the amount of pages in total depending on the chosen # pagination parameters, see https://developer.wordpress.org/rest-api/using-the-rest-api/pagination/ - 'start_page_number': 0, + "start_page_number": 0, # number of records that should be returned per request: - 'per_page_elements': 100 + "per_page_elements": 100, } # Mapping "material_bildungsstufe" -> SkoHub: # see https://vocabs.openeduhub.de/w3id.org/openeduhub/vocabs/educationalContext/index.html mapping_edu_context = { - 'Arbeit mit Jugendlichen': "", 'Arbeit mit Kindern': "", - 'Ausbildung': "http://w3id.org/openeduhub/vocabs/educationalContext/berufliche_bildung", - 'Berufsschule': "http://w3id.org/openeduhub/vocabs/educationalContext/berufliche_bildung", - 'Elementarbereich': "http://w3id.org/openeduhub/vocabs/educationalContext/elementarbereich", - 'Erwachsenenbildung': "http://w3id.org/openeduhub/vocabs/educationalContext/erwachsenenbildung", 'Gemeinde': "", - 'Grundschule': "http://w3id.org/openeduhub/vocabs/educationalContext/grundschule", 'Kindergottesdienst': "", - 'Konfirmandenarbeit': "", - 'Oberstufe': "http://w3id.org/openeduhub/vocabs/educationalContext/sekundarstufe_2", - 'Schulstufen': "", # alle Schulstufen? age range? - 'Sekundarstufe': "http://w3id.org/openeduhub/vocabs/educationalContext/sekundarstufe_1", 'Unterrichtende': "" + "Arbeit mit Jugendlichen": "", + "Arbeit mit Kindern": "", + "Ausbildung": "http://w3id.org/openeduhub/vocabs/educationalContext/berufliche_bildung", + "Berufsschule": "http://w3id.org/openeduhub/vocabs/educationalContext/berufliche_bildung", + "Elementarbereich": "http://w3id.org/openeduhub/vocabs/educationalContext/elementarbereich", + "Erwachsenenbildung": "http://w3id.org/openeduhub/vocabs/educationalContext/erwachsenenbildung", + "Gemeinde": "", + "Grundschule": "http://w3id.org/openeduhub/vocabs/educationalContext/grundschule", + "Kindergottesdienst": "", + "Konfirmandenarbeit": "", + "Oberstufe": "http://w3id.org/openeduhub/vocabs/educationalContext/sekundarstufe_2", + "Schulstufen": "", # alle Schulstufen? age range? + "Sekundarstufe": "http://w3id.org/openeduhub/vocabs/educationalContext/sekundarstufe_1", + "Unterrichtende": "", } # copyright is only available as a String (description) on the material_review_url itself, this debug list could be # deleted once it's confirmed with rpi-virtuell which OER model they actually use here: copyright_debug_list = { - 'Zur Wiederverwendung und Veränderung gekennzeichnet': "", - 'Zur Wiederverwendung und Veränderung gekennzeichnet\t \t \t\t frei zugänglich': "", - 'Zur nicht kommerziellen Wiederverwendung gekennzeichnet': "", - 'Zur nicht kommerziellen Wiederverwendung gekennzeichnet\t \t \t\t frei zugänglich': "", - 'Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet': "", - 'Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet' - '\t \t \t\t frei zugänglich': "", - 'frei zugänglich': "", - 'kostenfrei nach Anmeldung': "", - 'kostenpflichtig': "" + "Zur Wiederverwendung und Veränderung gekennzeichnet": "", + "Zur Wiederverwendung und Veränderung gekennzeichnet\t \t \t\t frei zugänglich": "", + "Zur nicht kommerziellen Wiederverwendung gekennzeichnet": "", + "Zur nicht kommerziellen Wiederverwendung gekennzeichnet\t \t \t\t frei zugänglich": "", + "Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet": "", + "Zur nicht kommerziellen Wiederverwendung und Veränderung gekennzeichnet" + "\t \t \t\t frei zugänglich": "", + "frei zugänglich": "", + "kostenfrei nach Anmeldung": "", + "kostenpflichtig": "", } # rpi-virtuell has clarified their license-description: # 'Zur Wiederverwendung und Veränderung gekennzeichnet' can be both CC_BY and CC_BY_SA # since CC_BY_SA is slightly more restricting, we choose this mapping rather than the more liberal CC_BY mapping_copyright_url = { - '?fwp_lizenz=non-commercial-remixable': Constants.LICENSE_CC_BY_NC_SA_30, - '?fwp_lizenz=non-commercial-copyable': Constants.LICENSE_CC_BY_NC_ND_40, - '?fwp_lizenz=remixable': Constants.LICENSE_CC_BY_SA_40, - '?fwp_verfuegbarkeit=kostenpflichtig': Constants.LICENSE_COPYRIGHT_LAW + "?fwp_lizenz=non-commercial-remixable": Constants.LICENSE_CC_BY_NC_SA_30, + "?fwp_lizenz=non-commercial-copyable": Constants.LICENSE_CC_BY_NC_ND_40, + "?fwp_lizenz=remixable": Constants.LICENSE_CC_BY_SA_40, + "?fwp_verfuegbarkeit=kostenpflichtig": Constants.LICENSE_COPYRIGHT_LAW # unclear to map to anything # '?fwp_lizenz=copyable': Constants. } - mapping_media_types = {'Anforderungssituation': "", - 'Arbeitsblatt': "worksheet", - 'Audio': "audio", - 'Aufgabenstellung': "", - 'Bild': "image", - 'Dossier': "", - 'E-Learning': "", - 'Erzählung': "", - 'Fachinformation': "", # reference (Primärquelle?) - 'Gamification': "", # maybe map to educational game ? - 'Gebet/Lied': "", - 'Gottesdienstentwurf': "", - 'Internetportal': "web page", - 'Lernorte': "", - 'Lernstationen': "", - 'Lokale Einrichtung': "", - 'Medien': "audiovisual medium", - 'Online Lesson': "", - 'Praxishilfen': "", - 'Projektplanung': "", - 'Präsentation': "presentation", - 'Text/Aufsatz': "text", - 'Unterrichtsentwurf': "lesson plan", - 'Video': "video", - 'Video im Medienportal': "video", - 'Virtueller Lernort': "", - 'Vorbereitung': "lesson plan", - 'Zeitschrift/Buch': "text"} + mapping_media_types = { + "Anforderungssituation": "", + "Arbeitsblatt": "worksheet", + "Audio": "audio", + "Aufgabenstellung": "", + "Bild": "image", + "Dossier": "", + "E-Learning": "", + "Erzählung": "", + "Fachinformation": "", # reference (Primärquelle?) + "Gamification": "", # maybe map to educational game ? + "Gebet/Lied": "", + "Gottesdienstentwurf": "", + "Internetportal": "web page", + "Lernorte": "", + "Lernstationen": "", + "Lokale Einrichtung": "", + "Medien": "audiovisual medium", + "Online Lesson": "", + "Praxishilfen": "", + "Projektplanung": "", + "Präsentation": "presentation", + "Text/Aufsatz": "text", + "Unterrichtsentwurf": "lesson plan", + "Video": "video", + "Video im Medienportal": "video", + "Virtueller Lernort": "", + "Vorbereitung": "lesson plan", + "Zeitschrift/Buch": "text", + } def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def getId(self, response=None) -> str: """ - returns the review_url of the element + Return the current item URL (resolved "review_url") as a string. """ - pass + return response.url def getHash(self, response=None) -> Optional[str]: """ - returns a string of the date + version of the crawler + Return a string of the last modified date and the version of the crawler as a string. """ - pass + date_modified: str = response.xpath('//meta[@property="og:article:modified_time"]/@content').get() + hash_temp: str = f"{date_modified}v{self.version}" + return hash_temp def start_requests(self): """ @@ -137,13 +158,13 @@ def start_requests(self): # or # with [...]/material/?parameters for url in self.start_urls: - if (url.split('/')[-2] == 'material') and (url.split('/')[-1] == ''): + if (url.split("/")[-2] == "material") and (url.split("/")[-1] == ""): # making sure that the crawler is at the correct url and starting at whatever page we choose: first_page_number = self.get_first_page_parameter() per_page = self.get_per_page_parameter() - first_url = url + f'?page={first_page_number}&per_page={per_page}' + first_url = url + f"?page={first_page_number}&per_page={per_page}" yield scrapy.Request(url=first_url, callback=self.parse) - elif (url.split('/')[-2] == 'material') and (url.split('/') != ''): + elif (url.split("/")[-2] == "material") and (url.split("/") != ""): yield scrapy.Request(url=url, callback=self.parse) def parse(self, response: scrapy.http.TextResponse, **kwargs): @@ -168,8 +189,7 @@ def parse(self, response: scrapy.http.TextResponse, **kwargs): i += 1 yield from self.parse_page(response) else: - url_temp = response.urljoin( - f'?page={i}&per_page={self.get_per_page_parameter()}') + url_temp = response.urljoin(f"?page={i}&per_page={self.get_per_page_parameter()}") yield response.follow(url=url_temp, callback=self.parse_page) # only use this iteration method if you want to (slowly) go through pages one-by-one: @@ -183,7 +203,7 @@ def iterate_through_pages_slowly(self, current_url, response): next_page_number = current_page_number + 1 if current_page_number < last_page: # logging.debug(f"Next Page #: {next_page_number}") - next_url = response.urljoin(f'?page={next_page_number}&per_page={self.get_per_page_parameter()}') + next_url = response.urljoin(f"?page={next_page_number}&per_page={self.get_per_page_parameter()}") # logging.debug(f"Next URL will be: {next_url}") yield response.follow(next_url, callback=self.parse) @@ -212,8 +232,8 @@ def get_current_page_number(response) -> int: :return: number of the current "wp_json"-page as Integer """ # last part of the current url will look like this: '?page=1&per_page=10' - last_part_of_url = response.url.split('/')[-1] - page_regex = re.compile(r'(\?page=)(\d+)') + last_part_of_url = response.url.split("/")[-1] + page_regex = re.compile(r"(\?page=)(\d+)") current_page_number = int(page_regex.search(last_part_of_url).group(2)) print("Current Page #: ", current_page_number) return current_page_number @@ -250,14 +270,9 @@ def parse_page(self, response: scrapy.http.TextResponse = None): for item in current_page_json: item_copy: dict = item - wp_json_item = { - "id": item_copy.get("material_review_url"), - "item": item_copy - } + wp_json_item = {"id": item_copy.get("material_review_url"), "item": item_copy} review_url = item_copy.get("material_review_url") - yield scrapy.Request(url=review_url, - callback=self.get_metadata_from_review_url, - cb_kwargs=wp_json_item) + yield scrapy.Request(url=review_url, callback=self.get_metadata_from_review_url, cb_kwargs=wp_json_item) def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs): """ @@ -269,18 +284,19 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) """ # logging.debug("DEBUG inside get_metadata_from_review_url: wp_json_item id", kwargs.get("id")) wp_json_item = kwargs.get("item") - # logging.debug("DEBUG inside get_metadata_from_review_url: response type = ", type(response), - # "url =", response.url) + + if self.shouldImport(response) is False: + logger.debug(f"Skipping entry {response.url} because shouldImport() returned False") + return None + if self.getId(response) is not None and self.getHash(response) is not None: + if not self.hasChanged(response): + return None base = BaseItemLoader() - base.add_value("sourceId", response.url) + base.add_value("sourceId", self.getId(response=response)) date_modified: str = response.xpath('//meta[@property="og:article:modified_time"]/@content').get() - hash_temp = date_modified + self.version - base.add_value("hash", hash_temp) - - # base.add_value("response", super().mapResponse(response).load_item()) + base.add_value("hash", self.getHash(response=response)) - # base.add_value("type", Constants.TYPE_MATERIAL) base.add_value("thumbnail", wp_json_item.get("material_screenshot")) # base.add_value("lastModified", wp_json_item.get("date")) # is "date" from wp_json for lastModified correct? base.add_value("lastModified", date_modified) # or is this one better (grabbed from material_review_url)? @@ -319,16 +335,19 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) if wp_json_item.get("material_altersstufe") is not None: # age range is returned as a list of --Strings, possible return values are: # e.g. "01-05", "05-10", "10-13", "13-15", "15-19" and "18-99" - age_regex = re.compile(r'(\d{1,2})-(\d{1,2})') + age_regex = re.compile(r"(\d{1,2})-(\d{1,2})") age_range = set() age_range_item_loader = LomAgeRangeItemLoader() for item in wp_json_item.get("material_altersstufe"): - age_range_temp = item.get("name") - age_from = str(age_regex.search(age_range_temp).group(1)) - age_to = str(age_regex.search(age_range_temp).group(2)) - age_range.add(age_from) - age_range.add(age_to) - # print("FINAL AGE_RANGE: min = ", min(age_range), " max = ", max(age_range)) + age_range_raw: str = item.get("name") + match_age_from: re.Match[str] | None = age_regex.search(age_range_raw) + match_age_to: re.Match[str] | None = age_regex.search(age_range_raw) + if match_age_from: + age_from: str = match_age_from.group(1) + age_range.add(age_from) + if match_age_to: + age_to: str = match_age_to.group(2) + age_range.add(age_to) if len(age_range) != 0: age_range_item_loader.add_value("fromRange", min(age_range)) age_range_item_loader.add_value("toRange", max(age_range)) @@ -381,7 +400,8 @@ def get_metadata_from_review_url(self, response: scrapy.http.Response, **kwargs) lic.add_value("url", self.mapping_copyright_url[key]) break - # by default, all materials should be CC_BY_SA - according to the rpi-virtuell ToS + # (original assumption during the initial development of this crawler in regard to licenses: + # by default, all materials should be CC_BY_SA - according to the rpi-virtuell ToS) # changed/decided on 2022-10-13: We can't assume that this license is correct and will not set any license if response.xpath('//a[contains(@href,"' + "?fwp_verfuegbarkeit=kostenpflichtig" + '")]').get(): From 457351c2e45974ce03e0e53d16149fbf4f7edd5d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 17 Nov 2023 01:17:21 +0100 Subject: [PATCH 429/590] feat: EduSharingSourceTemplateHelper utility (squashed) - first version of the "Quellendatensatz"-Template feature - when the .env setting for edu-sharing source templates is explicitly enabled, we do expect whitelisted metadata properties to be available in the edu-sharing API response and DO NOT want to crawl items without them -- raises ValueError when 'source template'-setting is active, but whitelisted metadata property retrieval was not successful --- .../edu_sharing_source_template_helper.py | 317 ++++++++++++++++++ 1 file changed, 317 insertions(+) create mode 100644 converter/util/edu_sharing_source_template_helper.py diff --git a/converter/util/edu_sharing_source_template_helper.py b/converter/util/edu_sharing_source_template_helper.py new file mode 100644 index 00000000..60611687 --- /dev/null +++ b/converter/util/edu_sharing_source_template_helper.py @@ -0,0 +1,317 @@ +import logging +from pprint import pp + +import requests + +from converter import env + +logging.basicConfig(level=logging.DEBUG) +log = logging.getLogger(__name__) + + +class EduSharingSourceTemplateHelper: + """ + Helper class for retrieving (whitelisted) metadata properties from an edu-sharing crawler "source template" + (= "Quellen-Datensatz"-Template) from a specified edu-sharing repository. + The retrieved metadata properties will later be used as a fallback for crawler items when a crawler couldn't + scrape a specific metadata property by itself. + + This feature REQUIRES an API endpoint in the edu-sharing repository (available in v8.1 or higher)! + """ + + _edu_sharing_base_url: str = "https://localhost:8000/edu-sharing/" + _api_path: str = "rest/search/v1/queries/" + _repository: str = "-home-" + _meta_data_set: str = "mds_oeh" + _api_endpoint: str = "wlo_crawler_element" + _api_endpoint_params: str = "propertyFilter=-all-" + _url: str = ( + f"{_edu_sharing_base_url}{_api_path}{_repository}" + f"/{_meta_data_set}" + f"/{_api_endpoint}?{_api_endpoint_params}" + ) + + _headers: dict = { + "accept": "application/json", + "Content-Type": "application/json", + } + _crawler_name: str = None + _payload: dict = dict() + + # ToDo: + # - code cleanup (improve readability of logging messages) + # - implement pytest test-scenarios + + def __init__(self, crawler_name: str = None): + """ + Initialize the 'source template'-helper class with the provided settings from the '.env'-file and prepare the + API queryy. + + After initiating the EduSharingSourceTemplateHelper class, + call "get_whitelisted_metadata_properties()" on its instance. + Example: + + >>> esth = EduSharingSourceTemplateHelper(crawler_name="zum_klexikon_spider") + >>> whitelisted_properties: dict = esth.get_whitelisted_metadata_properties() + + :param crawler_name: the spider_name ('spider.friendlyName'), e.g. "zum_klexikon_spider" + """ + if crawler_name: + self._set_crawler_name_for_payload(crawler_name=crawler_name) + self._initiate_from_dotenv() + self._build_payload() + + def _initiate_from_dotenv(self): + edu_sharing_source_template_repository_from_dotenv: str = env.get( + key="EDU_SHARING_SOURCE_TEMPLATE_BASE_URL", allow_null=True, default=None + ) + edu_sharing_base_url_from_dot_env: str = env.get(key="EDU_SHARING_BASE_URL", allow_null=True, default=None) + if edu_sharing_source_template_repository_from_dotenv: + # explicitly specify from which edu-sharing repository a "Quellen-Datensatz"-Template should be retrieved + # (e.g., if you're crawling against pre-Staging, but want to fetch the template from either Prod or Staging) + self._set_edu_sharing_url(edu_sharing_source_template_repository_from_dotenv) + elif edu_sharing_base_url_from_dot_env: + # fallback for convenience: if no repository was explicitly set in the .env, we assume that the crawler + # source template shall be fetched from the same edu-sharing repository that is used for storing the items + # (e.g., crawling against production) + self._set_edu_sharing_url(edu_sharing_base_url_from_dot_env) + else: + log.info( + f"Could not read '.env'-Setting 'EDU_SHARING_BASE_URL'. Please check your '.env'-file! " + f"(For additional help, see: oeh-search-etl/converter/.env.example )." + ) + pass + + def _set_edu_sharing_url(self, edu_sharing_source_template_repository: str): + self._edu_sharing_base_url = edu_sharing_source_template_repository + self._url = ( + f"{self._edu_sharing_base_url}{self._api_path}" + f"{self._repository}/" + f"{self._meta_data_set}/" + f"{self._api_endpoint}?{self._api_endpoint_params}" + ) + + def _set_crawler_name_for_payload(self, crawler_name: str): + self._crawler_name = crawler_name + + def _build_payload(self) -> dict | None: + """ + Build JSON payload object. Class variable 'crawler_name' needs to be set beforehand. + + :return: payload object as 'dict' or None. + """ + if self._crawler_name: + payload: dict = { + "criteria": [ + { + "property": "ccm:general_identifier", + "values": [f"{self._crawler_name}"], + } + ], + } + self._payload = payload + return payload + else: + log.error( + f"Cannot build query payload without valid crawler_name. Please make sure that you instantiate " + f"EduSharingTemplateHelper with a valid 'crawler_name'-parameter!" + ) + return None + + def _retrieve_whitelisted_metadata_properties(self) -> dict | None: + """ + Query the edu-sharing repository for a crawler 'source dataset'-template (= "Quellen-Datensatz"-Template) and + return the whitelisted metadata properties as a dict. + If the response was invalid for whatever reason, return None. + + :return: whitelisted metadata properties as dict or None. + """ + response: requests.Response = requests.request("POST", url=self._url, json=self._payload, headers=self._headers) + + status_code: int = response.status_code + if status_code == 200: + # ToDo: even if the crawler_name doesn't exist, the edu-sharing response will be HTTP-Status-Code 200: + # - ALWAYS check validity of 'nodes' and 'pagination'! (-> 'nodes' is empty & 'pagination.count' == 0) + try: + result_dict = response.json() + except requests.exceptions.JSONDecodeError as jde: + log.error(f"The edu-sharing response could not be parsed as JSON. Response:\n" f"{response.text}") + raise jde + + try: + pagination: dict = result_dict["pagination"] + pagination_total = pagination["total"] + pagination_count = pagination["count"] + except KeyError: + log.error( + f"Missing 'pagination'-object in edu-sharing response. " + f"Aborting EduSharingSourceTemplateHelper process..." + ) + raise KeyError + + if pagination_count and pagination_total and pagination_count == 1 and pagination_total == 1: + # this is our happy case: + # 'count' and 'total' should BOTH be 1 if there is a (valid) crawler source dataset + pass + else: + # unexpected API behavior -> abort here by returning None + log.error( + f"The edu-sharing API returned an unexpected number of crawler 'source template' results:\n" + f"Expected 'pagination.count': 1 (received: {pagination_count} ) // " + f"expected 'pagination.total': 1 (received: {pagination_total} )" + ) + if pagination_count == 0 and pagination_total == 0: + log.error( + f"Please make sure that a 'source template' ('Quellen-Datensatz'-template) for crawler " + f"'{self._crawler_name}' exists within the specified edu-sharing repository " + f"{self._edu_sharing_base_url} !" + ) + if pagination_count > 1 or pagination_total > 1: + log.error( + f"edu-sharing returned more than one 'crawler source template' for the specified " + f"crawler '{self._crawler_name}'. " + ) + return None + + nodes_list: list[dict] = result_dict["nodes"] + if nodes_list and isinstance(nodes_list, list) and len(nodes_list) == 1: + # 'nodes' should contain exactly 1 dict -> if more are returned, the API shows unexpected behaviour + nodes: dict = nodes_list[0] + nodes_properties: dict = nodes["properties"] + _whitelisted_properties: dict = dict() + _oeh_cdi: str = "ccm:oeh_crawler_data_inherit" + """The property 'ccm:oeh_crawler_data_inherit' contains all whitelisted property keys, but NOT their + values!""" + try: + if _oeh_cdi in nodes_properties: + # checking if "ccm:oeh_crawler_data_inherit" is part of the API response + # the whitelist-property should be available within 'nodes[0].properties' and might look like: + # "ccm:oeh_crawler_data_inherit": [ + # "ccm:containsAdvertisement", + # "ccm:oeh_quality_login", + # "ccm:oeh_languageTarget", + # "ccm:oeh_languageLevel" + # ] + whitelist_keys: list[str] = nodes_properties[_oeh_cdi] + log.info(f"'{_oeh_cdi}' contains the following properties: \n" f"{whitelist_keys}") + if whitelist_keys and isinstance(whitelist_keys, list): + for whitelist_key in whitelist_keys: + # the values for each property need to be looked up separately + whitelisted_property_value: list[str] = nodes_properties.get(whitelist_key) + # ToDo: implement check for empty properties / strings? + # OR: trust that the edu-sharing API response is always valid? + if whitelisted_property_value: + _whitelisted_properties.update({f"{whitelist_key}": whitelisted_property_value}) + else: + log.error( + f"Received unexpected value type of metadata property '{_oeh_cdi}': " + f"{type(whitelist_keys)} . (Expected type: 'list[str]')" + ) + else: + log.error( + f"Could not find '{_oeh_cdi}' in edu-sharing API response. " + f"Source template retrieval FAILED!" + ) + log.debug(response.text) + except KeyError as ke: + raise ke + + # the result dict with all whitelisted metadata properties might look like this example: + # _whitelisted_properties = { + # "ccm:oeh_quality_login": ["1"], + # "ccm:oeh_quality_protection_of_minors": ["0"], + # "ccm:taxonid": [ + # "http://w3id.org/openeduhub/vocabs/discipline/720", + # "http://w3id.org/openeduhub/vocabs/discipline/120", + # ], + # "cclom:general_keyword": ["Kinderlexikon", "Lexikon"], + # } + + return _whitelisted_properties + else: + log.error( + f"edu-sharing API returned an unexpected 'nodes'-object:" + f"Expected list[dict] of length 1, received length: {len(nodes_list)} .\n" + f"Please make sure that a 'source template' ('Quellendatensatz'-template) for crawler " + f"{self._crawler_name} exists within the edu-sharing repository {self._edu_sharing_base_url} !" + ) + return None + else: + # sad-case: we catch unexpected HTTP responses here + log.error( + f"Received unexpected HTTP response (status code: {status_code} ) from the edu-sharing " + f"repository while trying to retrieve whitelisted 'source template'-metadata-properties." + ) + if status_code == 401: + # ToDo: specify exact edu-sharing version that provides the necessary API endpoint + log.error( + f"edu-sharing API returned HTTP Status Code {status_code}. " + f"(This might happen when the necessary API endpoint might not be available (yet) in the " + f"edu-sharing repository (edu-sharing v8.1+ required).)" + ) + if status_code == 500: + # code 500 might be accompanied by 'java.lang.NullPointerException' -> print whole response + # happens when the payload of our submitted request was empty + log.error(f"edu-sharing API returned HTTP status code {status_code}:\n" f"{response.text}") + response.raise_for_status() + # ToDo: extend Error-Handling for additional edge-cases (as / if they occur) + return None + + def get_whitelisted_metadata_properties(self) -> dict | None: + """ + Retrieve whitelisted metadata properties from a specified edu-sharing repository by using a 'source template' + (= "Quellen-Datensatz"-Template) which is expected to contain a "ccm:oeh_crawler_data_inherit"-property. + + :return: a 'dict' containing whitelisted metadata property key-value pairs or None + """ + # check user-defined .env Setting first if 'crawler source dataset' should be ignored: + est_enabled: bool = env.get_bool(key="EDU_SHARING_SOURCE_TEMPLATE_ENABLED", allow_null=True, default=None) + if est_enabled: + log.info( + f".env setting 'EDU_SHARING_SOURCE_TEMPLATE_ENABLED' is ACTIVE. Trying to retrieve whitelisted " + f"properties..." + ) + self._payload = self._build_payload() + if self._payload: + whitelisted_properties: dict = self._retrieve_whitelisted_metadata_properties() + if whitelisted_properties: + return whitelisted_properties + else: + # intentionally raising a ValueError to stop a crawl process when the 'source template'-setting + # is active. (If the .env variable is explicitly set, we expect whitelisted properties to be + # available and DO NOT want to crawl without them.) + raise ValueError( + "Failed to retrieve whitelisted metadata properties from edu-sharing " + "'source template' (= 'Quellendatensatz-Template')! " + "Aborting crawl process..." + ) + else: + log.error( + f"Could not build payload object to retrieve 'source template'-properties from " + f"edu-sharing repository. " + f"\nJSON Payload for crawler_name '{self._crawler_name}' was:\n" + f"{self._payload}" + f"\n(payload REQUIRES a valid 'crawler_name'!)" + ) + log.info( + "Aborting crawl... (If you didn't mean to retrieve an edu-sharing 'source template', please " + "set the .env variable 'EDU_SHARING_SOURCE_TEMPLATE_ENABLED' to False!)" + ) + return None + else: + # if the setting is explicitly disabled, do nothing -> continue with normal crawler behaviour + log.info( + f"Recognized '.env'-Setting EDU_SHARING_SOURCE_TEMPLATE_ENABLED: '{est_enabled}'.\n" + f"Crawler source dataset will be IGNORED. Continuing with default crawler behaviour..." + ) + return None + + +if __name__ == "__main__": + log.setLevel("DEBUG") + crawler_name_for_testing: str = "zum_deutschlernen_spider" + # crawler_name_for_testing: str = "does_not_exist_spider" + est_helper: EduSharingSourceTemplateHelper = EduSharingSourceTemplateHelper(crawler_name=crawler_name_for_testing) + whitelisted_props: dict | None = est_helper.get_whitelisted_metadata_properties() + print("Whitelisted properties: ") + pp(whitelisted_props, indent=4) From 792c868e4aa0c1c146a687c74af0aba504b7aa12 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 17 Nov 2023 19:51:42 +0100 Subject: [PATCH 430/590] feat: attach whitelisted metadata properties from an "edu-sharing source template" to crawled items es_connector.py - feat: check if a crawler 'source template' (= "Quellen-Datensatz"-Template for a specific crawler) was successfully retrieved from the edu-sharing repository -- if metadata properties were marked as whitelisted, save them to the 'BaseItem.custom'-field - change: move "BaseItem.custom"-field processing to earlier part of the program flow - hint / implementation detail: Edu-Sharing 'source template' is checked in a backwards-compatible manner -- to avoid breaking crawlers that do not have a "Quellendatensatz"-Template, check the "EDU_SHARING_SOURCE_TEMPLATE_ENABLED"-.env variable and default to False if the variable does not exist pipelines.py - feat: try to retrieve 'crawler source template' from new edu-sharing API endpoint and attach its result to spider process Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 31 ++++++++++++++++++++++++++----- converter/pipelines.py | 26 ++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 5 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 8875277b..0eec2573 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -113,6 +113,8 @@ class CreateGroupType(Enum): enabled: bool _client_async = httpx.AsyncClient() _sem: Semaphore = asyncio.Semaphore(25) + source_template_properties: dict # whitelisted crawler source metadata properties + # (from a "Quellen-Datensatz" / crawler source template) which should be attached to each processed item def __init__(self): cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) @@ -364,6 +366,30 @@ def transform_item(self, uuid, spider, item): if "origin" in item: spaces["ccm:replicationsourceorigin"] = item["origin"] # TODO currently not mapped in edu-sharing + if hasattr(spider, "EST_WHITELIST"): + # check if there were whitelisted metadata properties in the edu-sharing source template + # (= "Quellen-Datensatz"-Template) that need to be attached to all items + whitelisted_properties: dict = getattr(spider, "EST_WHITELIST") + if whitelisted_properties: + # if whitelisted properties exist, we re-use the 'custom' field in our data model (if possible). + # by inserting the whitelisted metadata properties early in the program flow, they should automatically + # be overwritten by the "real" metadata fields (if metadata was scraped for the specific field by the + # crawler) + if hasattr(item, "custom"): + custom: dict = item["custom"] + if custom: + # if 'BaseItem.custom' already exists -> update the dict + custom.update(whitelisted_properties) + item["custom"] = custom + else: + # otherwise create the 'BaseItem.custom'-field + item["custom"] = whitelisted_properties + + # map custom fields directly into the edu-sharing properties: + if "custom" in item: + for key in item["custom"]: + spaces[key] = item["custom"][key] + self.map_license(spaces, item["license"]) if "description" in item["lom"]["general"]: spaces["cclom:general_description"] = item["lom"]["general"]["description"] @@ -486,11 +512,6 @@ def transform_item(self, uuid, spider, item): if "toRange" in tar: spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] - # map custom fields directly into the edu-sharing properties - if "custom" in item: - for key in item["custom"]: - spaces[key] = item["custom"][key] - # intendedEndUserRole = Field(output_processor=JoinMultivalues()) # discipline = Field(output_processor=JoinMultivalues()) # educationalContext = Field(output_processor=JoinMultivalues()) diff --git a/converter/pipelines.py b/converter/pipelines.py index 43d15684..745a43c4 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -39,6 +39,7 @@ from converter.constants import * from converter.es_connector import EduSharing from converter.items import BaseItem +from converter.util.edu_sharing_source_template_helper import EduSharingSourceTemplateHelper from converter.util.language_mapper import LanguageMapper from converter.web_tools import WebTools, WebEngine from valuespace_converter.app.valuespaces import Valuespaces @@ -756,6 +757,31 @@ def __init__(self): super().__init__() self.counter = 0 + def open_spider(self, spider): + logging.debug("Entering EduSharingStorePipeline...\n" + "Checking if 'crawler source template' ('Quellendatensatz-Template') should be used " + "(see: 'EDU_SHARING_SOURCE_TEMPLATE_ENABLED' .env setting)...") + est_enabled: bool = env.get_bool("EDU_SHARING_SOURCE_TEMPLATE_ENABLED", allow_null=True, default=False) + # defaults to False for backwards-compatibility. + # (The EduSharingSourceTemplateHelper class is explicitly set to throw errors and abort a crawl if this setting + # is enabled! Activate this setting on a per-crawler basis!) + if est_enabled: + # "Quellendatensatz-Templates" might not be available on every edu-sharing instance. This feature is only + # active if explicitly set via the .env file. (This choice was made to avoid errors with + # old or unsupported crawlers.) + est_helper: EduSharingSourceTemplateHelper = EduSharingSourceTemplateHelper(crawler_name=spider.name) + whitelisted_properties: dict | None = est_helper.get_whitelisted_metadata_properties() + if whitelisted_properties: + setattr(spider, "EST_WHITELIST", whitelisted_properties) + logging.debug(f"Edu-sharing source template retrieval was successful. " + f"The following metadata properties will be whitelisted for all items:\n" + f"{whitelisted_properties}") + else: + logging.error(f"Edu-Sharing Source Template retrieval failed. " + f"(Does a 'Quellendatensatz' exist in the edu-sharing repository for this spider?)") + else: + log.debug(f"Edu-Sharing Source Template feature is NOT ENABLED. Continuing EduSharingStorePipeline...") + async def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) title = "" From 1f1e1b104a0206c22f4338f4363ea71301920700 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 24 Jan 2024 14:48:57 +0100 Subject: [PATCH 431/590] change: wait for 'load'-event before fetching HTML / screenshots with Playwright - waiting for 'load' to be fired fixes incomplete website screenshots with zum_klexikon_spider Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/web_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/web_tools.py b/converter/web_tools.py index e2c1da29..b23cf714 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -206,7 +206,7 @@ async def fetchDataPlaywright(url: str): async with async_playwright() as p: browser = await p.chromium.connect_over_cdp(endpoint_url=env.get("PLAYWRIGHT_WS_ENDPOINT")) page = await browser.new_page() - await page.goto(url, wait_until="domcontentloaded", timeout=90000) + await page.goto(url, wait_until="load", timeout=90000) # waits for a website to fire the DOMContentLoaded event or for a timeout of 90s # since waiting for 'networkidle' seems to cause timeouts content = await page.content() From 3da1131583c2d9c96d20916a38555071a6fc5f27 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 24 Jan 2024 15:12:03 +0100 Subject: [PATCH 432/590] docs: add edu-sharing "source template"-related documentation - fix whitespace warnings Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/.env.example | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/converter/.env.example b/converter/.env.example index b1889b99..6b6393bb 100644 --- a/converter/.env.example +++ b/converter/.env.example @@ -2,19 +2,19 @@ # Add a url for your log file. If not set, stdoutput will be used #LOG_FILE = "/var/log/scrapy.log" # Set the level for logs here. Supported values: "DEBUG", "INFO", "WARNING", "ERROR" -LOG_LEVEL = "WARNING" +LOG_LEVEL="WARNING" # --- Crawling-modes: control where crawled items should be stored/exported. # Available modes: 'edu-sharing', 'csv', 'json' or 'None' -MODE = "csv" +MODE="csv" # ------ CSV Export settings (Only used if MODE == "csv"!): # csv rows to export from dataset (comma seperated! field-names according to items.py!) -CSV_ROWS = "lom.general.title,lom.general.description,lom.general.keyword,lom.technical.location,valuespaces.discipline,valuespaces.learningResourceType" +CSV_ROWS="lom.general.title,lom.general.description,lom.general.keyword,lom.technical.location,valuespaces.discipline,valuespaces.learningResourceType" # --- 'Splash'-Integration settings for the local container, # for more information, see https://splash.readthedocs.io/en/stable/ -DISABLE_SPLASH = False -SPLASH_URL = "http://localhost:8050" +DISABLE_SPLASH=False +SPLASH_URL="http://localhost:8050" # --- headless-browser settings for the local container: # PYPPETEER Integration settings, as needed for the local container (as used in kmap_spider.py) @@ -24,9 +24,9 @@ PYPPETEER_WS_ENDPOINT="ws://localhost:3000" PLAYWRIGHT_WS_ENDPOINT="ws://localhost:3000" # --- Edu-Sharing instance that the crawlers should upload to -EDU_SHARING_BASE_URL = "http://localhost:8080/edu-sharing/" -EDU_SHARING_USERNAME = "admin" -EDU_SHARING_PASSWORD = "admin" +EDU_SHARING_BASE_URL="http://localhost:8080/edu-sharing/" +EDU_SHARING_USERNAME="admin" +EDU_SHARING_PASSWORD="admin" # Configure if permissions of edu-sharing nodes are handled by the crawler (default true) # You may want to set this to false if you don't want to apply permissions from crawlers or have a custom implementation in the repository @@ -35,7 +35,20 @@ EDU_SHARING_PASSWORD = "admin" # EDU_SHARING_METADATASET=mds_oeh # If set to true, don't upload to (above mentioned) Edu-Sharing instance -DRY_RUN = True +DRY_RUN=True + +# --- edu-sharing Source Template Settings +# Retrieve (whitelisted) metadata properties from an edu-sharing "source template" ("Quellen-Datensatz"-Template) and +# attach those metadata properties to each item. (This feature is DISABLED by default. Possible values: True / False) +# Before enabling this feature, make sure that: +# - a "Quellen-Datensatz" exists for the within the specified edu-sharing repository +# (see: EDU_SHARING_BASE_URL setting above!) +# - AND contains the "ccm:oeh_crawler_data_inherit" property ! +EDU_SHARING_SOURCE_TEMPLATE_ENABLED=False +# Define a separate "source template"-repository for testing/debugging. This setting is useful if +# the "Quellen-Datensatz" does not exist on the same edu-sharing repository as defined in EDU_SHARING_BASE_URL +# (e.g., when the "Quellen-Datensatz"-Template is only available on Staging, but you want save the items on pre-Staging) +#EDU_SHARING_SOURCE_TEMPLATE_BASE_URL="http://localhost:8080/edu-sharing/" # --- OERSI-specific settings (oersi_spider): # Only crawl a specific metadata provider from OERSI (separate multiple providers by semicolon!): From 9a86ef4e9eead8bd6423f91f2fcadc86ed705ad1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 25 Jan 2024 14:49:13 +0100 Subject: [PATCH 433/590] improve readability of source template variable name - change: whitelisted metadata properties from an edu-sharing "source template" are attached to the spider process as "edu_sharing_source_template_whitelist" - code cleanup: removed unused debugging variable ("source_template_properties") Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/es_connector.py | 6 ++---- converter/pipelines.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 0eec2573..4371695b 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -113,8 +113,6 @@ class CreateGroupType(Enum): enabled: bool _client_async = httpx.AsyncClient() _sem: Semaphore = asyncio.Semaphore(25) - source_template_properties: dict # whitelisted crawler source metadata properties - # (from a "Quellen-Datensatz" / crawler source template) which should be attached to each processed item def __init__(self): cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) @@ -366,10 +364,10 @@ def transform_item(self, uuid, spider, item): if "origin" in item: spaces["ccm:replicationsourceorigin"] = item["origin"] # TODO currently not mapped in edu-sharing - if hasattr(spider, "EST_WHITELIST"): + if hasattr(spider, "edu_sharing_source_template_whitelist"): # check if there were whitelisted metadata properties in the edu-sharing source template # (= "Quellen-Datensatz"-Template) that need to be attached to all items - whitelisted_properties: dict = getattr(spider, "EST_WHITELIST") + whitelisted_properties: dict = getattr(spider, "edu_sharing_source_template_whitelist") if whitelisted_properties: # if whitelisted properties exist, we re-use the 'custom' field in our data model (if possible). # by inserting the whitelisted metadata properties early in the program flow, they should automatically diff --git a/converter/pipelines.py b/converter/pipelines.py index 745a43c4..4f78f151 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -772,7 +772,7 @@ def open_spider(self, spider): est_helper: EduSharingSourceTemplateHelper = EduSharingSourceTemplateHelper(crawler_name=spider.name) whitelisted_properties: dict | None = est_helper.get_whitelisted_metadata_properties() if whitelisted_properties: - setattr(spider, "EST_WHITELIST", whitelisted_properties) + setattr(spider, "edu_sharing_source_template_whitelist", whitelisted_properties) logging.debug(f"Edu-sharing source template retrieval was successful. " f"The following metadata properties will be whitelisted for all items:\n" f"{whitelisted_properties}") From 96f09857846a62eefc4ea5237b6f8a67959c5c67 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 25 Jan 2024 22:46:36 +0100 Subject: [PATCH 434/590] kmap_spider v0.0.7 (rework with additional metadata) Bugfixes and Improvements: - fix: getHash() prefers "dateModified" from JSON_LD and falls back to "datePublished" if modified date was not available - fix: init and hasChanged() - feat: if resolved URL differs from "mainEntity.mainEntityOfPage"-URL (from JSON_LD), save both to "technical.location" - feat: skip item if 'mainEntity' metadata was not available within JSON_LD (-> empty knowledge map that should not be parsed) - fix: don't accidentally request a URL twice with "WebTools" while filling up the ResponseItemLoader - style: code formatting via black New or improved metadata properties: - feat: retrieve separate "lifecycle.author" / "lifecycle.publisher" metadata - feat: retrieve "educational.typicalAgeRange" metadata from JSON_LD - feat: retrieve "educationalContext" from JSON_LD (-> ValuespaceItem.educationalContext) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/kmap_spider.py | 324 ++++++++++++++++++++++++------- 1 file changed, 250 insertions(+), 74 deletions(-) diff --git a/converter/spiders/kmap_spider.py b/converter/spiders/kmap_spider.py index d6cbc78c..5d274b76 100644 --- a/converter/spiders/kmap_spider.py +++ b/converter/spiders/kmap_spider.py @@ -1,35 +1,45 @@ import json import logging +import re import scrapy from scrapy import Selector from scrapy.spiders import CrawlSpider from converter.constants import Constants -from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader +from converter.es_connector import EduSharing +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + ResponseItemLoader, + LomAgeRangeItemLoader, +) from converter.spiders.base_classes import LomBase from converter.util.sitemap import from_xml_response from converter.web_tools import WebEngine, WebTools +logger = logging.getLogger(__name__) + class KMapSpider(CrawlSpider, LomBase): name = "kmap_spider" friendlyName = "KMap.eu" - version = "0.0.6" # last update: 2022-05-20 - sitemap_urls = [ - "https://kmap.eu/server/sitemap/Mathematik", - "https://kmap.eu/server/sitemap/Physik" - ] - custom_settings = { - "ROBOTSTXT_OBEY": False, - "AUTOTHROTTLE_ENABLED": True, - # "AUTOTHROTTLE_DEBUG": True - } - allowed_domains = ['kmap.eu'] + version = "0.0.7" # last update: 2024-01-25 + sitemap_urls = ["https://kmap.eu/server/sitemap/Mathematik", "https://kmap.eu/server/sitemap/Physik"] + custom_settings = {"ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True} + allowed_domains = ["kmap.eu"] # keep the console clean from spammy DEBUG-level logging messages, adjust as needed: - logging.getLogger('websockets.server').setLevel(logging.ERROR) - logging.getLogger('websockets.protocol').setLevel(logging.ERROR) + logging.getLogger("websockets.server").setLevel(logging.ERROR) + logging.getLogger("websockets.protocol").setLevel(logging.ERROR) + + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) def start_requests(self) -> scrapy.Request: for sitemap_url in self.sitemap_urls: @@ -44,18 +54,52 @@ def parse_sitemap(self, response) -> scrapy.Request: """ sitemap_items = from_xml_response(response) for sitemap_item in sitemap_items: - temp_dict = { - 'lastModified': sitemap_item.lastmod - } + temp_dict = {"lastModified": sitemap_item.lastmod} yield scrapy.Request(url=sitemap_item.loc, callback=self.parse, cb_kwargs=temp_dict) def getId(self, response=None) -> str: return response.url - def getHash(self, response=None) -> str: - pass + def getHash(self, response: scrapy.http.Response = None, json_ld: dict = None) -> str | None: + try: + main_entity: dict = json_ld["mainEntity"] + if "dateModified" in main_entity: + date_modified: str = main_entity["dateModified"] + if date_modified: + item_hash: str = f"{date_modified}v{self.version}" + return item_hash + elif "datePublished" in main_entity: + date_published: str = main_entity["datePublished"] + if date_published: + item_hash: str = f"{date_published}v{self.version}" + return item_hash + except KeyError: + logger.warning( + f"KMap item {response.url} did not provide the necessary timestamps for building a hash. " + f"Dropping item..." + ) + return None - async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + def hasChanged(self, response=None, json_ld: dict = None) -> bool: + if self.forceUpdate: + return True + if self.uuid: + if self.getUUID(response) == self.uuid: + logging.info(f"Matching requested id: {self.uuid} // item URL: {response.url}") + return True + return False + if self.remoteId: + if str(self.getId(response)) == self.remoteId: + logging.info(f"Matching requested id: {self.remoteId} // item URL: {response.url}") + return True + return False + db = EduSharing().find_item(self.getId(response), self) + changed = db is None or db[1] != self.getHash(response=response, json_ld=json_ld) + if not changed: + logging.info(f"Item {self.getId(response)} (uuid: {db[0]}) has not changed") + return changed + + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader | None: """ Scrapy Contracts: @@ -63,77 +107,209 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade @returns item 1 """ last_modified = kwargs.get("lastModified") - url_data_web_tools_dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) - splash_html_string = url_data_web_tools_dict.get('html') - json_ld_string: str = Selector(text=splash_html_string).xpath('//*[@id="ld"]/text()').get() - json_ld: dict = json.loads(json_ld_string) - # TODO: skip item method - (skips item if it's an empty knowledge map) + url_data_playwright = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + html_string: str = url_data_playwright.get("html") + + try: + json_ld_string: str = Selector(text=html_string).xpath('//*[@id="ld"]/text()').get() + json_ld: dict = json.loads(json_ld_string) + except TypeError: + logger.warning(f"Item {response.url} did not contain a JSON_LD. Dropping item...") + return + + main_entity: dict | None = None + try: + if "mainEntity" in json_ld: + main_entity: dict = json_ld["mainEntity"] + except KeyError: + # while 'mainEntity' should be available within every (complete) knowledge card, placeholder cards which + # are still being worked on might not have this metadata + logger.warning( + f"Item {response.url} did not contain a complete JSON_LD: the REQUIRED 'mainEntity' dict " + f"was not available for parsing. (If you see this warning more often than a few times, " + f"a crawler update might be necessary!) Dropping item..." + ) + return base = BaseItemLoader() - base.add_value('sourceId', response.url) - hash_temp = json_ld.get("mainEntity").get("datePublished") - hash_temp += self.version - base.add_value('hash', hash_temp) - base.add_value('lastModified', last_modified) + base.add_value("sourceId", self.getId(response=response)) + hash_value: str = self.getHash(response=response, json_ld=json_ld) + if hash_value: + base.add_value("hash", hash_value) + else: + # drop items that cannot be hashed + return + + if self.shouldImport(response) is False: + logger.debug(f"Skipping entry {response.url} because shouldImport() returned False.") + return None + if self.getId(response) is not None and self.getHash(response=response, json_ld=json_ld) is not None: + if not self.hasChanged(response=response, json_ld=json_ld): + return None + + base.add_value("lastModified", last_modified) # Thumbnails have their own url path, which can be found in the json+ld: # "thumbnailUrl": "/snappy/Physik/Grundlagen/Potenzschreibweise" - # e.g. for the item https://kmap.eu/app/browser/Physik/Grundlagen/Potenzschreibweise + # e.g., for the item https://kmap.eu/app/browser/Physik/Grundlagen/Potenzschreibweise # the thumbnail can be found at https://kmap.eu/snappy/Physik/Grundlagen/Potenzschreibweise - thumbnail_path = json_ld.get("mainEntity").get("thumbnailUrl") - if thumbnail_path is not None: - base.add_value('thumbnail', 'https://kmap.eu' + thumbnail_path) + thumbnail_path = main_entity.get("thumbnailUrl") + # ToDo: KMap also serves "og:image", which seems to provide slightly different URL paths, + # but apparently the same image files + if thumbnail_path: + base.add_value("thumbnail", f"https://kmap.eu{thumbnail_path}") lom = LomBaseItemloader() general = LomGeneralItemloader() - general.add_value('identifier', json_ld.get("mainEntity").get("mainEntityOfPage")) - keywords_string: str = json_ld.get("mainEntity").get("keywords") - keyword_list = keywords_string.rsplit(", ") - general.add_value('keyword', keyword_list) - general.add_value('title', json_ld.get("mainEntity").get("name")) - general.add_value('description', json_ld.get("mainEntity").get("description")) - general.add_value('language', json_ld.get("mainEntity").get("inLanguage")) - lom.add_value('general', general.load_item()) + general.add_value("identifier", main_entity.get("mainEntityOfPage")) + if main_entity: + if "keywords" in main_entity: + keywords_string: str = main_entity.get("keywords") + if keywords_string: + keyword_list: list[str] = keywords_string.rsplit(", ") + if keyword_list: + general.add_value("keyword", keyword_list) + if "name" in main_entity: + general.add_value("title", main_entity.get("name")) + if "description" in main_entity: + general.add_value("description", main_entity.get("description")) + if "inLanguage" in main_entity: + general.add_value("language", main_entity.get("inLanguage")) + lom.add_value("general", general.load_item()) technical = LomTechnicalItemLoader() - technical.add_value('format', 'text/html') - technical.add_value('location', response.url) - lom.add_value('technical', technical.load_item()) - - lifecycle = LomLifecycleItemloader() - lifecycle.add_value('role', 'publisher') - lifecycle.add_value('organization', json_ld.get("mainEntity").get("publisher").get("name")) - author_email = json_ld.get("mainEntity").get("publisher").get("email") - if author_email is not None: - lifecycle.add_value('email', author_email) - lifecycle.add_value('url', 'https://kmap.eu/') - lifecycle.add_value('date', json_ld.get("mainEntity").get("datePublished")) - lom.add_value('lifecycle', lifecycle.load_item()) + technical.add_value("format", "text/html") + technical.add_value("location", response.url) + if main_entity: + # if resolved url is different from JSON_LD 'mainEntity.mainEntityOfPage' URL, save both URLs + if "mainEntityOfPage" in main_entity: + maeop_url: str = main_entity["mainEntityOfPage"] + if maeop_url and maeop_url != response.url: + technical.add_value("location", maeop_url) + lom.add_value("technical", technical.load_item()) + + if main_entity: + if "publisher" in main_entity: + publisher_object: dict = main_entity["publisher"] + if publisher_object and isinstance(publisher_object, dict): + lifecycle_publisher: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + if "name" in publisher_object: + publisher_name: str = publisher_object["name"] + if publisher_name: + lifecycle_publisher.add_value("organization", publisher_name) + if "email" in publisher_object: + publisher_email: str = publisher_object["email"] + if publisher_email: + lifecycle_publisher.add_value("email", publisher_email) + if "url" in publisher_object: + publisher_url: str = publisher_object["url"] + if publisher_url: + lifecycle_publisher.add_value("url", publisher_url) + if "datePublished" in main_entity: + date_published: str = main_entity["datePublished"] + if date_published: + lifecycle_publisher.add_value("date", date_published) + # ToDo: add publisher logo handling as soon as our item model supports it + # mainEntity.publisher.logo: logo.@type / logo.url + lom.add_value("lifecycle", lifecycle_publisher.load_item()) + if "author" in main_entity: + author_object: dict = main_entity["author"] + if author_object and isinstance(author_object, dict): + lifecycle_author: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle_author.add_value("role", "author") + if "name" in author_object: + author_name: str = author_object["name"] + if author_name and " " in author_name: + author_split: list[str] = author_name.split(sep=" ", maxsplit=1) + if author_split and len(author_split) == 2: + lifecycle_author.add_value("firstName", author_split[0]) + lifecycle_author.add_value("lastName", author_split[1]) + else: + lifecycle_author.add_value("firstName", author_name) + if "url" in author_object: + author_url: str = author_object["url"] + if author_url: + lifecycle_author.add_value("url", author_url) + if "datePublished" in main_entity: + date_published: str = main_entity["datePublished"] + if date_published: + lifecycle_author.add_value("date", date_published) + lom.add_value("lifecycle", lifecycle_author.load_item()) educational = LomEducationalItemLoader() - lom.add_value('educational', educational.load_item()) - base.add_value('lom', lom.load_item()) + if main_entity: + if "typicalAgeRange" in main_entity: + typical_age_range_raw: str = main_entity["typicalAgeRange"] + if typical_age_range_raw and isinstance(typical_age_range_raw, str): + if "-" in typical_age_range_raw: + # "typicalAgeRange": "15-18" + age_regex = re.compile(r"(\d{1,2})-(\d{1,2})") + match_age_range: re.Match[str] | None = age_regex.search(typical_age_range_raw) + if match_age_range: + if len(match_age_range.groups()) == 2: + age_range_from: str = match_age_range.group(1) + age_range_to: str = match_age_range.group(2) + if age_range_from and age_range_to: + age_range_loader: LomAgeRangeItemLoader = LomAgeRangeItemLoader() + age_range_loader.add_value("fromRange", age_range_from) + age_range_loader.add_value("toRange", age_range_to) + educational.add_value("typicalAgeRange", age_range_loader.load_item()) + lom.add_value("educational", educational.load_item()) + base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value('new_lrt', Constants.NEW_LRT_MATERIAL) - vs.add_value('discipline', json_ld.get("mainEntity").get("about")) - vs.add_value('intendedEndUserRole', json_ld.get("mainEntity").get("audience")) - vs.add_value('new_lrt', json_ld.get("mainEntity").get("learningResourceType")) - vs.add_value('price', 'no') - vs.add_value('conditionsOfAccess', 'login required for additional features') - base.add_value('valuespaces', vs.load_item()) + # the JSON-LD provides new metadata (spotted on 2024-01-25): + # - mainEntity.educationalLevel list[str] + vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) + if main_entity: + if "about" in main_entity: + about: list[str] = main_entity.get("about") + if about: + vs.add_value("discipline", about) + if "audience" in main_entity: + audience: list[str] = main_entity.get("audience") + if audience: + vs.add_value("intendedEndUserRole", audience) + if "learningResourceType" in main_entity: + lrt: list[str] = main_entity.get("learningResourceType") + if lrt: + vs.add_value("learningResourceType", lrt) + if "oeh:educationalContext" in main_entity: + educational_context: list[str] = main_entity["oeh:educationalContext"] + if educational_context: + # "oeh:educationalContext": ["Sekundarstufe I", "Sekundarstufe II"] + vs.add_value("educationalContext", educational_context) + vs.add_value("price", "no") + vs.add_value("conditionsOfAccess", "login_for_additional_features") + base.add_value("valuespaces", vs.load_item()) lic = LicenseItemLoader() - lic.add_value('author', json_ld.get("mainEntity").get("author").get("name")) - lic.add_value('url', json_ld.get("mainEntity").get("license")) - base.add_value('license', lic.load_item()) + if "author" in main_entity: + if "name" in main_entity["author"]: + author_name: str = main_entity["author"] + if author_name and isinstance(author_name, str): + lic.add_value("author", author_name) + if "license" in main_entity: + license_url: str = main_entity.get("license") + if license_url: + lic.add_value("url", license_url) + base.add_value("license", lic.load_item()) permissions = super().getPermissions(response) base.add_value("permissions", permissions.load_item()) - response_itemloader: ResponseItemLoader = await super().mapResponse(response) - base.add_value('response', response_itemloader.load_item()) - # KMap doesn't deliver fulltext to neither splash nor playwright, the fulltext object will be showing up as - # 'text': 'JavaScript wird benötigt!\n\n', - # in the final "scrapy.Item". As long as KMap doesn't change the way it's delivering its JavaScript content, - # our crawler won't be able to work around this limitation. + response_itemloader: ResponseItemLoader = ResponseItemLoader() + if url_data_playwright: + if html_string: + response_itemloader.add_value("html", html_string) + if "screenshot_bytes" in url_data_playwright: + sbytes: bytes = url_data_playwright["screenshot_bytes"] + if sbytes: + base.add_value("screenshot_bytes", sbytes) + # KMap doesn't deliver fulltext to neither splash nor playwright. + # The fulltext object will be showing up as + # 'text': 'JavaScript wird benötigt!\n\n', + # in the final "scrapy.Item". As long as KMap doesn't change the way it's delivering its JavaScript content, + # our crawler won't be able to work around this limitation. + base.add_value("response", response_itemloader.load_item()) return base.load_item() From 7912b55fd91c83cce87daf1b29185d60cb94246d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 25 Jan 2024 22:51:34 +0100 Subject: [PATCH 435/590] change: improve readability of logging messages during hash check - while checking the edu-sharing hash of an item, the 'sourceId'-value is logged to give you a rough idea which item is being handled by the pipeline background: - the previous logging messages were not nearly helpful enough as they didn't specify which item is being processed -- (the concurrent nature of Scrapy's item processing (and the recent 'async'-related changes) decreased the usefulness of the old logging messages even further and basically made them useless) Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/pipelines.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 4f78f151..30f0b98c 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -649,7 +649,10 @@ def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) if "hash" not in item: log.error( - "The spider did not provide a hash on the base object. The hash is required to detect changes on an element. May use the last modified date or something similar" + "The spider did not provide a hash on the base object. " + "The hash is required to detect changes on an element. " + "(You should use the last modified date or something similar, " + "e.g. 'v')" ) item["hash"] = time.time() @@ -661,9 +664,9 @@ def process_item(self, raw_item, spider): db_item = self.find_item(item["sourceId"], spider) if db_item: if item["hash"] != db_item[1]: - log.debug("hash has changed, continuing pipelines") + log.debug(f"hash has changed, continuing pipelines for item {item['sourceId']}") else: - log.debug("hash unchanged, skip item") + log.debug(f"hash unchanged, skipping item {item['sourceId']}") # self.update(item['sourceId'], spider) # for tests, we update everything for now # activate this later From d60a6483e03fead55117bf36e32b2f20b345a537 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 26 Jan 2024 09:27:50 +0100 Subject: [PATCH 436/590] chore: update dependencies Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- poetry.lock | 823 ++++++++++++++++++++++------------------------- pyproject.toml | 15 +- requirements.txt | 56 ++-- 3 files changed, 423 insertions(+), 471 deletions(-) diff --git a/poetry.lock b/poetry.lock index e8e2dd26..a2e70943 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,19 +2,20 @@ [[package]] name = "anyio" -version = "4.1.0" +version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, - {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -54,21 +55,22 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "automat" @@ -90,64 +92,68 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "babel" -version = "2.13.1" +version = "2.14.0" description = "Internationalization utilities" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, - {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] -[package.dependencies] -setuptools = {version = "*", markers = "python_version >= \"3.12\""} - [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" -version = "23.11.0" +version = "24.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, + {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, + {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, + {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, + {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, + {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, + {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, + {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, + {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, + {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, + {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, + {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, + {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, + {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, + {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, + {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, + {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, + {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, + {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, + {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, + {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, + {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, ] [package.dependencies] @@ -161,7 +167,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -377,13 +383,13 @@ files = [ [[package]] name = "courlan" -version = "0.9.4" +version = "0.9.5" description = "Clean, filter and sample URLs to optimize data collection – includes spam, content type and language filters." optional = false python-versions = ">=3.6" files = [ - {file = "courlan-0.9.4-py3-none-any.whl", hash = "sha256:72f03e307f3dfe91e0c790ab0766bab2aa319fdb09597daec920e4a394cfc14e"}, - {file = "courlan-0.9.4.tar.gz", hash = "sha256:6906aa9a15ae9d442821e06ae153c60f385cff41a8d44b9597c00b349f7043c5"}, + {file = "courlan-0.9.5-py3-none-any.whl", hash = "sha256:3c10fb06a26422b5c5e6f5f6d2c16e5d4308026f9dcea783ca6a88dae5922ee5"}, + {file = "courlan-0.9.5.tar.gz", hash = "sha256:38dc35b2e3bf1f5d516d00d51ac12ebde543e3417c6be6f6a2273c0fc5b5b353"}, ] [package.dependencies] @@ -393,34 +399,34 @@ urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} [[package]] name = "cryptography" -version = "41.0.6" +version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"}, - {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"}, - {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"}, - {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"}, - {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"}, - {file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"}, - {file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"}, - {file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"}, - {file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"}, - {file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"}, - {file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [package.dependencies] @@ -471,17 +477,17 @@ langdetect = ["langdetect"] [[package]] name = "django" -version = "4.2.7" +version = "5.0.1" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" files = [ - {file = "Django-4.2.7-py3-none-any.whl", hash = "sha256:e1d37c51ad26186de355cbcec16613ebdabfa9689bbade9c538835205a8abbe9"}, - {file = "Django-4.2.7.tar.gz", hash = "sha256:8e0f1c2c2786b5c0e39fe1afce24c926040fad47c8ea8ad30aaf1188df29fc41"}, + {file = "Django-5.0.1-py3-none-any.whl", hash = "sha256:f47a37a90b9bbe2c8ec360235192c7fddfdc832206fcf618bb849b39256affc1"}, + {file = "Django-5.0.1.tar.gz", hash = "sha256:8c8659665bc6e3a44fefe1ab0a291e5a3fb3979f9a8230be29de975e57e8f854"}, ] [package.dependencies] -asgiref = ">=3.6.0,<4" +asgiref = ">=3.7.0,<4" sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -545,88 +551,89 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "6.1.0" +version = "7.0.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "greenlet" -version = "3.0.1" +version = "3.0.3" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, - {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, - {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, - {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, - {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, - {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, - {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, - {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, - {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, - {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, - {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, - {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, - {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, - {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, - {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, - {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, - {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, - {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, - {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, - {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, - {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, - {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, - {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, - {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, - {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, - {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, - {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, ] [package.extras] -docs = ["Sphinx"] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -688,41 +695,19 @@ lxml = ["lxml"] [[package]] name = "htmldate" -version = "1.4.3" -description = "Fast and robust extraction of original and updated publication dates from URLs and web pages." -optional = false -python-versions = ">=3.6" -files = [ - {file = "htmldate-1.4.3-py3-none-any.whl", hash = "sha256:d529a319a2fae8329c2beaa54c45af9295d0eca425dfba33b81e4665e8e8a78e"}, - {file = "htmldate-1.4.3.tar.gz", hash = "sha256:ec50f084b997fdf6b26f8c31447e5789f4deb71fe69342cda1d7af0c9f91e01b"}, -] - -[package.dependencies] -charset-normalizer = {version = ">=3.1.0", markers = "python_version >= \"3.7\""} -dateparser = ">=1.1.2" -lxml = ">=4.9.2" -python-dateutil = ">=2.8.2" -urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} - -[package.extras] -all = ["cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "urllib3[brotli]"] -speed = ["cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "urllib3[brotli]"] - -[[package]] -name = "htmldate" -version = "1.6.0" +version = "1.7.0" description = "Fast and robust extraction of original and updated publication dates from URLs and web pages." optional = false python-versions = ">=3.6" files = [ - {file = "htmldate-1.6.0-py3-none-any.whl", hash = "sha256:6ee374849fe7491b3e6c0b26066e8f6940367b0215e7c4fec88774af065a4dbc"}, - {file = "htmldate-1.6.0.tar.gz", hash = "sha256:5827c8f626a16800a29e57e8188a3d32d0b08ca4c7bd662537b73bbbf22c45a6"}, + {file = "htmldate-1.7.0-py3-none-any.whl", hash = "sha256:d82265ac19571b78985d53585b63917d2d2f2c6b96fc9b5cd1928f2777636832"}, + {file = "htmldate-1.7.0.tar.gz", hash = "sha256:02a800dd224cbf74bf483b042f64e14f57ba0e40c6b4404b284e98bc6c30b68d"}, ] [package.dependencies] charset-normalizer = {version = ">=3.3.2", markers = "python_version >= \"3.7\""} dateparser = ">=1.1.2" -lxml = {version = ">=4.9.3", markers = "platform_system != \"Darwin\""} +lxml = {version = ">=4.9.4,<6", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} python-dateutil = ">=2.8.2" urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} @@ -753,13 +738,13 @@ trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" -version = "0.25.2" +version = "0.26.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, - {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, ] [package.dependencies] @@ -951,110 +936,96 @@ marisa-trie = ">=0.7.7,<0.8.0" [[package]] name = "lxml" -version = "4.9.3" +version = "5.1.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" -files = [ - {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, - {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, - {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, - {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, - {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, - {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, - {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, - {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, - {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, - {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, - {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, - {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, - {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, - {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, - {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, - {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, - {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, - {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, - {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, - {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, - {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, - {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, - {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, - {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, - {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, - {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, - {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, - {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, - {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, - {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, - {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, - {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, - {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, - {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +python-versions = ">=3.6" +files = [ + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, + {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, + {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, + {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, + {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, + {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, + {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, + {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, + {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, + {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, + {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, + {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, + {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, + {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, + {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, + {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, + {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, + {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, + {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, + {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, + {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, + {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, + {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, + {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, + {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, + {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, + {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, + {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, + {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, + {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, + {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, + {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, + {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, + {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, + {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, + {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, + {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, + {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, + {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, + {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, + {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.35)"] +source = ["Cython (>=3.0.7)"] [[package]] name = "marisa-trie" @@ -1147,19 +1118,19 @@ files = [ [[package]] name = "mf2py" -version = "1.1.3" -description = "Python Microformats2 parser" +version = "2.0.1" +description = "Microformats parser" optional = false -python-versions = ">=2.7" +python-versions = ">=3.8" files = [ - {file = "mf2py-1.1.3-py3-none-any.whl", hash = "sha256:8f9e2c147beadd56f8839644124c7d141d96e879319b9f50d02826c88766bf4d"}, - {file = "mf2py-1.1.3.tar.gz", hash = "sha256:4241e91ed4b644dd666d9fbd2557ed86e5bb7399c196026f7b0a1f413b33f59f"}, + {file = "mf2py-2.0.1-py3-none-any.whl", hash = "sha256:092806e17f1a93db4aafa5e8d3c4124b5e42cd89027e2db48a5248ef4eabde03"}, + {file = "mf2py-2.0.1.tar.gz", hash = "sha256:1380924633413b8d72e704b5c86b4382c4b1371699edecc907b01cd21138d7cd"}, ] [package.dependencies] -BeautifulSoup4 = ">=4.6.0" -html5lib = ">=1.0.1" -requests = ">=2.18.4" +beautifulsoup4 = ">=4.11.1,<5.0.0" +html5lib = ">=1.1,<2.0" +requests = ">=2.28.2,<3.0.0" [[package]] name = "mypy-extensions" @@ -1213,13 +1184,13 @@ w3lib = ">=1.19.0" [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -1291,13 +1262,13 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -1306,33 +1277,33 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "playwright" -version = "1.40.0" +version = "1.41.1" description = "A high-level API to automate web browsers" optional = false python-versions = ">=3.8" files = [ - {file = "playwright-1.40.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:35b7e0b389df2aa632f3614d35be7bace35f6f634d880db44b035c83e4481312"}, - {file = "playwright-1.40.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:382a7465cc0ea3bf7fa66716bd37fd53f66af4bcc5c72283a8eff3f6e87758a8"}, - {file = "playwright-1.40.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:f11e1ec32f3b3dbd7f24d1481c313cb527001955004ee88a73f9b4a610d0db28"}, - {file = "playwright-1.40.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:6a842dca4dd53feda1d7bd0e14aa65140e4e816452ebddd307e90cad184d92bd"}, - {file = "playwright-1.40.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ec3746de69e7ba912b70e0fe3a3c6b8af97f21ece793c5db27c251da4d2f3e6"}, - {file = "playwright-1.40.0-py3-none-win32.whl", hash = "sha256:3ae90ea5ad776fe5e1300a9c730244c8e57a183c6eb261044418710d51ae03c0"}, - {file = "playwright-1.40.0-py3-none-win_amd64.whl", hash = "sha256:ba5a89953aedb158025e4581eafb6fdeebb3d58acd9ce24b59f691b1e2a861bc"}, + {file = "playwright-1.41.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b456f25db38e4d93afc3c671e1093f3995afb374f14cee284152a30f84cfff02"}, + {file = "playwright-1.41.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53ff152506dbd8527aa815e92757be72f5df60810e8000e9419d29fd4445f53c"}, + {file = "playwright-1.41.1-py3-none-macosx_11_0_universal2.whl", hash = "sha256:70c432887b8b5e896fa804fb90ca2c8baf05b13a3590fb8bce8b3c3efba2842d"}, + {file = "playwright-1.41.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:f227a8d616fd3a02d45d68546ee69947dce4a058df134a9e7dc6167c543de3cd"}, + {file = "playwright-1.41.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:475130f879b4ba38b9db7232a043dd5bc3a8bd1a84567fbea7e21a02ee2fcb13"}, + {file = "playwright-1.41.1-py3-none-win32.whl", hash = "sha256:ef769414ea0ceb76085c67812ab6bc0cc6fac0adfc45aaa09d54ee161d7f637b"}, + {file = "playwright-1.41.1-py3-none-win_amd64.whl", hash = "sha256:316e1ba0854a712e9288b3fe49509438e648d43bade77bf724899de8c24848de"}, ] [package.dependencies] -greenlet = "3.0.1" +greenlet = "3.0.3" pyee = "11.0.1" [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1430,13 +1401,13 @@ dev = ["black", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", [[package]] name = "pyflakes" -version = "3.1.0" +version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] @@ -1483,28 +1454,29 @@ files = [ [[package]] name = "pyrdfa3" -version = "3.5.3" -description = "pyRdfa Libray" +version = "3.6.2" +description = "pyRdfa distiller/parser library" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "pyRdfa3-3.5.3-py3-none-any.whl", hash = "sha256:4da7ed49e8f524b573ed67e4f7bc7f403bff3be00546d7438fe263c924a91ccf"}, - {file = "pyRdfa3-3.5.3.tar.gz", hash = "sha256:157663a92b87df345b6f69bde235dff5f797891608e12fe1e4fa8dad687131ae"}, + {file = "pyRdfa3-3.6.2-py3-none-any.whl", hash = "sha256:290c2fa966ddd1b45ac94a727da144f5a233ed58c63c370e3d68e6d00b0dee5d"}, + {file = "pyRdfa3-3.6.2.tar.gz", hash = "sha256:73681dab957f60901696767388b956a5769c730bc451da6ffb2f0e36f18314c2"}, ] [package.dependencies] -html5lib = "*" -rdflib = "*" +html5lib = ">=1.1" +rdflib = ">=6.1.1" +requests = ">=2.25.1" [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1534,13 +1506,13 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] @@ -1591,99 +1563,104 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "regex" -version = "2023.10.3" +version = "2023.12.25" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.7" files = [ - {file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"}, - {file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"}, - {file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"}, - {file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"}, - {file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"}, - {file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"}, - {file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"}, - {file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"}, - {file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"}, - {file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"}, - {file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"}, - {file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"}, - {file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"}, - {file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"}, - {file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"}, - {file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"}, - {file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"}, - {file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"}, - {file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"}, - {file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"}, - {file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"}, - {file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"}, - {file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"}, - {file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"}, - {file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"}, - {file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"}, - {file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"}, - {file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"}, - {file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"}, - {file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"}, - {file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"}, - {file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, ] [[package]] @@ -1766,13 +1743,13 @@ files = [ [[package]] name = "service-identity" -version = "23.1.0" +version = "24.1.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = ">=3.8" files = [ - {file = "service_identity-23.1.0-py3-none-any.whl", hash = "sha256:87415a691d52fcad954a500cb81f424d0273f8e7e3ee7d766128f4575080f383"}, - {file = "service_identity-23.1.0.tar.gz", hash = "sha256:ecb33cd96307755041e978ab14f8b14e13b40f1fbd525a4dc78f46d2b986431d"}, + {file = "service_identity-24.1.0-py3-none-any.whl", hash = "sha256:a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a"}, + {file = "service_identity-24.1.0.tar.gz", hash = "sha256:6829c9d62fb832c2e1c435629b0a8c476e1929881f28bee4d20bc24161009221"}, ] [package.dependencies] @@ -1782,7 +1759,7 @@ pyasn1 = "*" pyasn1-modules = "*" [package.extras] -dev = ["pyopenssl", "service-identity[docs,idna,mypy,tests]"] +dev = ["pyopenssl", "service-identity[idna,mypy,tests]"] docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] idna = ["idna"] mypy = ["idna", "mypy", "types-pyopenssl"] @@ -1790,13 +1767,13 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "69.0.2" +version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, - {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] [package.extras] @@ -1897,50 +1874,26 @@ files = [ [[package]] name = "trafilatura" -version = "1.6.1" -description = "Python package and command-line tool designed to gather text on the Web. It includes discovery, extraction and text processing components. Its main applications are web crawling, downloads, scraping, and extraction of main texts, metadata and comments." -optional = false -python-versions = ">=3.6" -files = [ - {file = "trafilatura-1.6.1-py3-none-any.whl", hash = "sha256:fe94ed68fb50ec80ae698095010e1a0a7827bee0542ecd33c2cb55ca3e985aa7"}, - {file = "trafilatura-1.6.1.tar.gz", hash = "sha256:a7792b037d624d04ab05fcce556cfe08b771dfc8c1db1494c750879530c9a30c"}, -] - -[package.dependencies] -certifi = "*" -charset-normalizer = {version = ">=3.1.0", markers = "python_version >= \"3.7\""} -courlan = ">=0.9.3" -htmldate = ">=1.4.3" -justext = ">=3.0.0" -lxml = ">=4.9.2" -urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} - -[package.extras] -all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "htmldate[speed] (>=1.4.3)", "py3langid (>=0.2.2)", "pycurl (>=7.45.2)"] -gui = ["Gooey (>=1.0.1)"] - -[[package]] -name = "trafilatura" -version = "1.6.2" +version = "1.7.0" description = "Python package and command-line tool designed to gather text on the Web. It includes discovery, extraction and text processing components. Its main applications are web crawling, downloads, scraping, and extraction of main texts, metadata and comments." optional = false python-versions = ">=3.6" files = [ - {file = "trafilatura-1.6.2-py3-none-any.whl", hash = "sha256:5bf97ed0d09eda4393770360b6e2b8851781d35fdff2d0276ec705a4f7791047"}, - {file = "trafilatura-1.6.2.tar.gz", hash = "sha256:a984630ad9c54d9fe803555d00f5a028ca65c766ce89bfd87d976f561c55b503"}, + {file = "trafilatura-1.7.0-py3-none-any.whl", hash = "sha256:dd272c51f55c99b44e7c5d76a67dcb17d1cbcadd3f53768f6f7d7bc5ff3280a7"}, + {file = "trafilatura-1.7.0.tar.gz", hash = "sha256:a166e67f005a6a12ef194f48c7c9fa4e1b0e36756fdd2b64e02473c356962f04"}, ] [package.dependencies] certifi = "*" charset-normalizer = {version = ">=3.2.0", markers = "python_version >= \"3.7\""} -courlan = ">=0.9.4" -htmldate = ">=1.5.1" +courlan = ">=0.9.5" +htmldate = ">=1.7.0" justext = ">=3.0.0" -lxml = {version = ">=4.9.3", markers = "platform_system != \"Darwin\""} +lxml = {version = ">=4.9.4,<6", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} [package.extras] -all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.18)", "htmldate[speed] (>=1.5.1)", "py3langid (>=0.2.2)", "pycurl (>=7.45.2)"] +all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.19)", "htmldate[speed] (>=1.7.0)", "py3langid (>=0.2.2)", "pycurl (>=7.45.2)"] gui = ["Gooey (>=1.0.1)"] [[package]] @@ -2011,24 +1964,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] @@ -2180,4 +2133,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "706d900dfbb99c202dd614add1dc3a5898d3da3af3d5e602660f2dd25e3a041e" +content-hash = "bfb781f2a0a3538378685365efa2a1fb0c23a758a13dc5c4f303da60ecd172c5" diff --git a/pyproject.toml b/pyproject.toml index 2f3e5c8a..07322e13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,25 +60,24 @@ packages = [{include = "converter"}] [tool.poetry.dependencies] python = "^3.10" wheel = "^0.42.0" -black = "^23.7.0" +black = "24.1.0" certifi="^2023.11.17" dateparser="1.2" extruct="0.16.0" -flake8 = "^6.1.0" +flake8 = "7.0.0" html2text="2020.1.16" jmespath="1.0.1" image = "1.5.33" itemadapter="0.8.0" itemloaders="1.1.0" isodate="0.6.1" -lxml="4.9.3" overrides="3.1.0" Pillow="10.1.0" -playwright="1.40" +playwright="1.41.1" pyOpenSSL="23.3.0" pytest="^7.4.3" python-dateutil="2.8.2" -python-dotenv="1.0.0" +python-dotenv="1.0.1" requests="2.31.0" six="1.16.0" Scrapy="2.11" @@ -87,10 +86,10 @@ urllib3="2.1.0" vobject="0.9.6.1" w3lib="2.1.2" xmltodict="0.13.0" -trafilatura = "^1.6.1" -babel = "2.13.1" +trafilatura = "1.7" +babel = "2.14.0" langcodes = {extras = ["data"], version = "^3.3.0"} -httpx = "^0.25.2" +httpx = "0.26" async-lru = "2.0.4" [tool.poetry.group.dev.dependencies] diff --git a/requirements.txt b/requirements.txt index d121d20e..90cf180d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,34 +1,34 @@ -anyio==4.1.0 ; python_version >= "3.10" and python_version < "4.0" +anyio==4.2.0 ; python_version >= "3.10" and python_version < "4.0" asgiref==3.7.2 ; python_version >= "3.10" and python_version < "4.0" async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" -attrs==23.1.0 ; python_version >= "3.10" and python_version < "4.0" +attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" -babel==2.13.1 ; python_version >= "3.10" and python_version < "4.0" -beautifulsoup4==4.12.2 ; python_version >= "3.10" and python_version < "4.0" -black==23.11.0 ; python_version >= "3.10" and python_version < "4.0" +babel==2.14.0 ; python_version >= "3.10" and python_version < "4.0" +beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" +black==24.1.0 ; python_version >= "3.10" and python_version < "4.0" certifi==2023.11.17 ; python_version >= "3.10" and python_version < "4.0" cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" -courlan==0.9.4 ; python_version >= "3.10" and python_version < "4.0" -cryptography==41.0.6 ; python_version >= "3.10" and python_version < "4.0" +courlan==0.9.5 ; python_version >= "3.10" and python_version < "4.0" +cryptography==41.0.7 ; python_version >= "3.10" and python_version < "4.0" cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" dateparser==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -django==4.2.7 ; python_version >= "3.10" and python_version < "4.0" +django==5.0.1 ; python_version >= "3.10" and python_version < "4.0" exceptiongroup==1.2.0 ; python_version >= "3.10" and python_version < "3.11" extruct==0.16.0 ; python_version >= "3.10" and python_version < "4.0" filelock==3.13.1 ; python_version >= "3.10" and python_version < "4.0" -flake8==6.1.0 ; python_version >= "3.10" and python_version < "4.0" -greenlet==3.0.1 ; python_version >= "3.10" and python_version < "4.0" +flake8==7.0.0 ; python_version >= "3.10" and python_version < "4.0" +greenlet==3.0.3 ; python_version >= "3.10" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" html-text==0.5.2 ; python_version >= "3.10" and python_version < "4.0" html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" -htmldate==1.6.0 ; python_version >= "3.10" and python_version < "4.0" +htmldate==1.7.0 ; python_version >= "3.10" and python_version < "4.0" httpcore==1.0.2 ; python_version >= "3.10" and python_version < "4.0" -httpx==0.25.2 ; python_version >= "3.10" and python_version < "4.0" +httpx==0.26.0 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" idna==3.6 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" @@ -43,19 +43,19 @@ justext==3.0.0 ; python_version >= "3.10" and python_version < "4.0" langcodes==3.3.0 ; python_version >= "3.10" and python_version < "4.0" langcodes[data]==3.3.0 ; python_version >= "3.10" and python_version < "4.0" language-data==1.1 ; python_version >= "3.10" and python_version < "4.0" -lxml==4.9.3 ; python_version >= "3.10" and python_version < "4.0" +lxml==5.1.0 ; python_version >= "3.10" and python_version < "4.0" marisa-trie==0.7.8 ; python_version >= "3.10" and python_version < "4.0" mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" -mf2py==1.1.3 ; python_version >= "3.10" and python_version < "4.0" +mf2py==2.0.1 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" packaging==23.2 ; python_version >= "3.10" and python_version < "4.0" parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" -pathspec==0.11.2 ; python_version >= "3.10" and python_version < "4.0" +pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0" pillow==10.1.0 ; python_version >= "3.10" and python_version < "4.0" -platformdirs==4.0.0 ; python_version >= "3.10" and python_version < "4.0" -playwright==1.40.0 ; python_version >= "3.10" and python_version < "4.0" -pluggy==1.3.0 ; python_version >= "3.10" and python_version < "4.0" +platformdirs==4.1.0 ; python_version >= "3.10" and python_version < "4.0" +playwright==1.41.1 ; python_version >= "3.10" and python_version < "4.0" +pluggy==1.4.0 ; python_version >= "3.10" and python_version < "4.0" protego==0.3.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1==0.5.1 ; python_version >= "3.10" and python_version < "4.0" @@ -63,24 +63,24 @@ pycodestyle==2.11.1 ; python_version >= "3.10" and python_version < "4.0" pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" pyee==11.0.1 ; python_version >= "3.10" and python_version < "4.0" -pyflakes==3.1.0 ; python_version >= "3.10" and python_version < "4.0" +pyflakes==3.2.0 ; python_version >= "3.10" and python_version < "4.0" pyopenssl==23.3.0 ; python_version >= "3.10" and python_version < "4.0" pyparsing==3.1.1 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" -pyrdfa3==3.5.3 ; python_version >= "3.10" and python_version < "4.0" -pytest==7.4.3 ; python_version >= "3.10" and python_version < "4.0" +pyrdfa3==3.6.2 ; python_version >= "3.10" and python_version < "4.0" +pytest==7.4.4 ; python_version >= "3.10" and python_version < "4.0" python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" -python-dotenv==1.0.0 ; python_version >= "3.10" and python_version < "4.0" +python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" pytz==2023.3.post1 ; python_version >= "3.10" and python_version < "4.0" queuelib==1.6.2 ; python_version >= "3.10" and python_version < "4.0" rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" -regex==2023.10.3 ; python_version >= "3.10" and python_version < "4.0" +regex==2023.12.25 ; python_version >= "3.10" and python_version < "4.0" requests-file==1.5.1 ; python_version >= "3.10" and python_version < "4.0" requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" scrapy==2.11.0 ; python_version >= "3.10" and python_version < "4.0" -service-identity==23.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==69.0.2 ; python_version >= "3.10" and python_version < "4.0" +service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" +setuptools==69.0.3 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" sniffio==1.3.0 ; python_version >= "3.10" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.10" and python_version < "4.0" @@ -88,11 +88,11 @@ sqlparse==0.4.4 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" tldextract==5.1.1 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" -trafilatura==1.6.2 ; python_version >= "3.10" and python_version < "4.0" +trafilatura==1.7.0 ; python_version >= "3.10" and python_version < "4.0" twisted-iocpsupport==1.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" twisted==22.10.0 ; python_version >= "3.10" and python_version < "4.0" -typing-extensions==4.8.0 ; python_version >= "3.10" and python_version < "4.0" -tzdata==2023.3 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") +typing-extensions==4.9.0 ; python_version >= "3.10" and python_version < "4.0" +tzdata==2023.4 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" urllib3==2.1.0 ; python_version >= "3.10" and python_version < "4.0" vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" From 87ef589cf6d8ae189d07e217a8dcda943b1b1a54 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 26 Jan 2024 17:34:21 +0100 Subject: [PATCH 437/590] tutory_spider v0.2.0 Changelog: - fix: fixed Warnings and added missing TypeHints - change: increased priority of initial API requests to increase spider performance - change: moved Playwright request higher up in the program flow since its metadata - change: use playwright metadata in ResponseItemLoader when possible - change: enable Scrapy Autothrottle with target concurrency of 3 (default: 1) - style: code formatting via black New metadata: - feat: combined lifecycle "author" information from API and DOM into LifecycleItem Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 125 ++++++++++++++++++++++------- 1 file changed, 94 insertions(+), 31 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 7480906d..316a9616 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -8,9 +8,21 @@ from scrapy.spiders import CrawlSpider from .base_classes import LomBase, JSONBase -from ..items import LomBaseItemloader, BaseItemLoader, ResponseItemLoader +from ..items import ( + LomBaseItemloader, + BaseItemLoader, + ResponseItemLoader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + LicenseItemLoader, + ValuespaceItemLoader, +) from ..web_tools import WebEngine, WebTools +logger = logging.getLogger(__name__) + class TutorySpider(CrawlSpider, LomBase, JSONBase): name = "tutory_spider" @@ -18,10 +30,11 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.1.9" # last update: 2023-08-18 + version = "0.2.0" # last update: 2024-01-26 custom_settings = { - # "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 3, "WEB_TOOLS": WebEngine.Playwright, } @@ -38,9 +51,11 @@ def __init__(self, **kwargs): def start_requests(self): first_url: str = self.assemble_tutory_api_url(api_page=0) - yield scrapy.Request(url=first_url, callback=self.parse_api_page) + # by increasing the priority of these requests, we're making sure to parse all API pages first and only crawl + # individual items once we've got the complete list of URLs + yield scrapy.Request(url=first_url, callback=self.parse_api_page, priority=5) - def parse_api_page(self, response: scrapy.http.TextResponse): + def parse_api_page(self, response: scrapy.http.TextResponse) -> scrapy.Request: """ This method tries to parse the current pagination parameter from response.url and yields two types of scrapy.Requests: @@ -55,7 +70,7 @@ def parse_api_page(self, response: scrapy.http.TextResponse): pagination_current_page: int = pagination_parameter.groupdict().get("page") if "total" in json_data: total_items = json_data.get("total") - logging.info( + logger.info( f"Currently crawling Tutory API page {pagination_current_page} -> {response.url} // " f"Expected items (in total): {total_items}" ) @@ -66,7 +81,7 @@ def parse_api_page(self, response: scrapy.http.TextResponse): if worksheets_data: # only crawl the next page if the "worksheets"-dict isn't empty yield scrapy.Request(url=url_next_page, callback=self.parse_api_page) - logging.info( + logger.info( f"Tutory API page {pagination_current_page} is expected to yield " f"{len(worksheets_data)} items." ) for j in worksheets_data: @@ -76,14 +91,14 @@ def parse_api_page(self, response: scrapy.http.TextResponse): if self.hasChanged(response_copy): yield scrapy.Request(url=item_url, callback=self.parse, cb_kwargs={"item_dict": j}) - def assemble_tutory_api_url(self, api_page: int): + def assemble_tutory_api_url(self, api_page: int) -> str: url_current_page = ( f"{self.baseUrl}worksheet?groupSlug=entdecken&pageSize={str(self.API_PAGESIZE_LIMIT)}" f"&page={str(api_page)}" ) return url_current_page - def getId(self, response=None, **kwargs): + def getId(self, response=None, **kwargs) -> str: if "item" in response.meta: item_id: str = response.meta["item"]["id"] return item_id @@ -93,10 +108,10 @@ def getId(self, response=None, **kwargs): item_id: str = api_item["id"] return item_id except KeyError as ke: - logging.error(f"'getId'-method failed to retrieve item_id for '{response.url}'.") + logger.error(f"'getId'-method failed to retrieve item_id for '{response.url}'.") raise ke - def getHash(self, response=None): + def getHash(self, response=None) -> str: return response.meta["item"]["updatedAt"] + self.version # ToDo (performance): reduce the amount of scrapy Requests by executing hasChanged() earlier: @@ -108,7 +123,7 @@ def check_if_item_should_be_dropped(self, response) -> bool: identifier: str = self.getId(response) hash_str: str = self.getHash(response) if self.shouldImport(response) is False: - logging.debug(f"Skipping entry {identifier} because shouldImport() returned false") + logger.debug(f"Skipping entry {identifier} because shouldImport() returned false") drop_item_flag = True return drop_item_flag if identifier is not None and hash_str is not None: @@ -126,21 +141,41 @@ async def parse(self, response, **kwargs): drop_item_flag: bool = self.check_if_item_should_be_dropped(response) if drop_item_flag is True: return - # if we need more metadata from the DOM, this could be a suitable place to move up the call to Playwright + + playwright_dict: dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) + playwright_html: str = playwright_dict["html"] + base_loader: BaseItemLoader = self.getBase(response) - lom_loader: LomBaseItemloader = self.getLOM(response) - lom_loader.add_value("general", self.getLOMGeneral(response)) - lom_loader.add_value("technical", self.getLOMTechnical(response)) + lom_loader: LomBaseItemloader = LomBaseItemloader() + general_loader: LomGeneralItemloader = await self.getLOMGeneral( + response=response, playwright_dict=playwright_dict + ) + lom_loader.add_value("general", general_loader.load_item()) + educational_loader: LomEducationalItemLoader = LomEducationalItemLoader() + lom_loader.add_value("educational", educational_loader.load_item()) + lifecycle_loader = await self.getLOMLifecycle(response) + lom_loader.add_value("lifecycle", lifecycle_loader.load_item()) + technical_loader: LomTechnicalItemLoader = self.getLOMTechnical(response) + lom_loader.add_value("technical", technical_loader.load_item()) base_loader.add_value("lom", lom_loader.load_item()) base_loader.add_value("valuespaces", self.getValuespaces(response).load_item()) base_loader.add_value("license", self.getLicense(response).load_item()) base_loader.add_value("permissions", self.getPermissions(response).load_item()) - response_itemloader: ResponseItemLoader = await self.mapResponse(response, fetchData=False) - base_loader.add_value("response", response_itemloader.load_item()) + response_loader: ResponseItemLoader = await self.mapResponse(response, fetchData=False) + if playwright_html and isinstance(playwright_html, str): + response_loader.replace_value("html", playwright_html) + if "screenshot_bytes" in playwright_dict: + sbytes: bytes = playwright_dict["screenshot_bytes"] + base_loader.add_value("screenshot_bytes", sbytes) + if "text" in playwright_dict: + playwright_fulltext: str = playwright_dict["text"] + if playwright_fulltext and isinstance(playwright_fulltext, str): + response_loader.replace_value("text", playwright_fulltext) + base_loader.add_value("response", response_loader.load_item()) yield base_loader.load_item() - def getBase(self, response=None): + def getBase(self, response=None) -> BaseItemLoader: base = LomBase.getBase(self, response) base.add_value("lastModified", response.meta["item"]["updatedAt"]) base.add_value( @@ -149,8 +184,8 @@ def getBase(self, response=None): ) return base - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) + def getValuespaces(self, response) -> ValuespaceItemLoader: + valuespaces: ValuespaceItemLoader = LomBase.getValuespaces(self, response) disciplines = set() subject_codes: list[str] = list( map( @@ -244,8 +279,8 @@ def getValuespaces(self, response): valuespaces.add_value("new_lrt", "36e68792-6159-481d-a97b-2c00901f4f78") # Arbeitsblatt return valuespaces - def getLicense(self, response=None): - license_loader = LomBase.getLicense(self, response) + def getLicense(self, response=None) -> LicenseItemLoader: + license_loader: LicenseItemLoader = LomBase.getLicense(self, response) if "user" in response.meta["item"]: user_dict: dict = response.meta["item"]["user"] if "publishName" in user_dict: @@ -258,7 +293,6 @@ def getLicense(self, response=None): if username: license_loader.add_value("author", username) elif publish_decision == "name": - # ToDo: this information could also be used for lifecycle role 'author' in a future crawler update firstname = None lastname = None if "firstname" in user_dict: @@ -270,7 +304,39 @@ def getLicense(self, response=None): license_loader.add_value("author", full_name) return license_loader - async def getLOMGeneral(self, response=None): + async def getLOMLifecycle(self, response: scrapy.http.Response = None) -> LomLifecycleItemloader: + lifecycle_loader: LomLifecycleItemloader = LomLifecycleItemloader() + if "user" in response.meta["item"]: + user_dict: dict = response.meta["item"]["user"] + lifecycle_loader.add_value("role", "author") + if "publishName" in user_dict: + # the 'publishName'-field seems to indicate whether the username or the full name appears on top of a + # worksheet as author metadata. + publish_decision: str = user_dict["publishName"] + if publish_decision == "username": + if "username" in user_dict: + username: str = user_dict["username"] + if username: + lifecycle_loader.add_value("firstName", username) + elif publish_decision == "name": + if "firstname" in user_dict: + firstname: str = user_dict.get("firstname") + if firstname: + lifecycle_loader.add_value("firstName", firstname) + if "lastname" in user_dict: + lastname: str = user_dict.get("lastname") + if lastname: + lifecycle_loader.add_value("lastName", lastname) + user_profile_path: str = response.xpath( + "//a[@class='value']/@href|label[contains(text(), 'Autor')]" + ).get() + if user_profile_path and isinstance(user_profile_path, str): + user_profile_url: str = urllib.parse.urljoin(self.url, user_profile_path) + if user_profile_url: + lifecycle_loader.add_value("url", user_profile_url) + return lifecycle_loader + + async def getLOMGeneral(self, response=None, playwright_dict: dict = None) -> LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["name"]) item_description = None @@ -286,18 +352,15 @@ async def getLOMGeneral(self, response=None): elif meta_og_description: # 2nd fallback: general.add_value("description", meta_og_description) - else: + elif "html" in playwright_dict: # this is where the (expensive) calls to our headless browser start - playwright_dict = await WebTools.getUrlData(response.url, engine=WebEngine.Playwright) playwright_html = playwright_dict["html"] - # ToDo: if we need DOM data from Playwright in another method, move the call to Playwright into parse() - # and parametrize the result if playwright_html: # 3rd fallback: trying to extract the fulltext with trafilatura playwright_bytes: bytes = playwright_html.encode() trafilatura_text = trafilatura.extract(playwright_bytes) if trafilatura_text: - logging.debug( + logger.debug( f"Item {response.url} did not provide any valid 'description' in its DOM header metadata. " f"Fallback to trafilatura fulltext..." ) @@ -319,7 +382,7 @@ async def getLOMGeneral(self, response=None): general.add_value("description", text_combined) return general - def getLOMTechnical(self, response=None): + def getLOMTechnical(self, response=None) -> LomTechnicalItemLoader: technical = LomBase.getLOMTechnical(self, response) technical.add_value("location", response.url) technical.add_value("format", "text/html") From c6c620c58b6b840e5d054ca24067384eb98620ed Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 6 Feb 2024 20:04:07 +0100 Subject: [PATCH 438/590] dilertube_spider v0.0.2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Metadata changelog: - fix/change: fix 'license.url'-related warnings by using LicenseMapper instead of (potentially outdated) crawler-specific license-mapping table - change: activate better thumbnails by parsing meta "og:image" tag from HTML header (DiLerTube no longer throws HTTP Error Code 423 when trying to download these images directly) - feat: save "Quelle" metadata to LomLifecycleItem (publisher) - feat: improve lifecycle author metadata - fix: do not create empty LifecycleItems if the only metadata available is a date - change: improve discipline mapping and remove unnecessary mapping entries - fix: don't try to set 'base.lastModified' with published_date Changelog: - change: activate Scrapy Autothrottle - change: use Playwright - change: increase priority of Sitemap requests -- intention behind this change: make sure to crawl each video overview first, then crawl the individual videos afterward - change: disable fetchData in super().mapResponse() call - fix: getID(), getHash(), init() - fix: hasChanged()-check if item needs update or not - refactor: retrieving the "veröffentlicht am"-date from the DOM is done in get_published_date_from_dom() method from now on - code cleanup: remove unnecessary mappings, old comments and (commented out) debug commands - code formatting via black - style: add missing TypeHints Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/spiders/dilertube_spider.py | 425 ++++++++++++++------------ 1 file changed, 230 insertions(+), 195 deletions(-) diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index 43b023a0..5eff3964 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -1,3 +1,4 @@ +import logging import re from datetime import datetime @@ -5,67 +6,68 @@ import w3lib.html from scrapy.spiders import CrawlSpider -from converter.items import BaseItemLoader, LomBaseItemloader, LomGeneralItemloader, LomTechnicalItemLoader, \ - LomLifecycleItemloader, LomEducationalItemLoader, LomClassificationItemLoader, ValuespaceItemLoader, \ - LicenseItemLoader, ResponseItemLoader +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + LomClassificationItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + ResponseItemLoader, + PermissionItemLoader, +) from converter.spiders.base_classes import LomBase +from converter.util.license_mapper import LicenseMapper from converter.util.sitemap import from_xml_response +from converter.web_tools import WebEngine + +logger = logging.getLogger(__name__) class DiLerTubeSpider(CrawlSpider, LomBase): name = "dilertube_spider" friendlyName = "DiLerTube" start_urls = ["https://www.dilertube.de/sitemap.xml"] - version = "0.0.1" # last update: 2022-05-16 + version = "0.0.2" # last update: 2024-02-08 custom_settings = { - "ROBOTSTXT_OBEY": False + "ROBOTSTXT_OBEY": False, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 3, + "WEB_TOOLS": WebEngine.Playwright, } - # debug_video_url_set = set() - - LICENSE_MAPPING = { - "CC BY 4.0": "https://creativecommons.org/licenses/by/4.0", - "CC BY-SA 4.0": "https://creativecommons.org/licenses/by-sa/4.0", - "CC BY-ND 4.0": "https://creativecommons.org/licenses/by-nd/4.0", - "CC BY-NC 4.0": "https://creativecommons.org/licenses/by-nc/4.0", - "CC BY-NC-SA 4.0": "https://creativecommons.org/licenses/by-nc-sa/4.0", - "CC BY-NC-ND 4.0": "https://creativecommons.org/licenses/by-nc-nd/4.0" - } - # ToDo: we're missing several licenses in converter.Constants (either keep using this mapping or update Constants) DISCIPLINE_MAPPING = { - "Alltagskultur, Ernährung, Soziales (AES)": "Ernährung und Hauswirtschaft", + "Alltagskultur, Ernährung, Soziales (AES)": "04006", # "Ernährung und Hauswirtschaft" # ToDo: AES discipline exists since 2016 in BaWü, probably needs its own entry in the "disciplines.ttl"-Vocab - "Berufsorientierung": "Berufliche Bildung", - "Bildende Kunst": "Kunst", - # "Biologie": "Biologie", - # "Chemie": "Chemie", - # "Deutsch": "Deutsch", - # "Englisch": "Englisch", - "Ethik": "Ethik", - # "Französisch": "Französisch", - "Gemeinschaftskunde": "", - "Geographie": "Geografie", - # "Geschichte": "Geschichte", - "Gesundheit und Soziales": "", - "Informatik und Medienbildung": "", - "Lateinisch": "Latein", - "Materie Natur Technik (MNT)": "", - # "Mathematik": "Mathematik", - # "Musik": "Musik", - # "Pädagogik": "Pädagogik", - # "Philosophie": "Philosophie", - # "Religion": "Religion", - # "Sachunterricht": "Sachunterricht", - # "Spanisch": "Spanisch", - # "Sport": "Sport", - "Technik": "Arbeitslehre", - # "Wirtschaftskunde": "Wirtschaftskunde", + "Berufsorientierung": "040", # "Berufliche Bildung" + "Bildende Kunst": "060", # Kunst + "Gemeinschaftskunde": "48005", # Gesellschaftskunde / Sozialkunde + "Geographie": "220", # Geografie + "Gesundheit und Soziales": "", # ToDO: cannot be mapped + "Informatik und Medienbildung": "900", # Medienbildung + "Lateinisch": "20005", # Latein + "Materie Natur Technik (MNT)": "", # ToDo: cannot be mapped + "Technik": "020", # Arbeitslehrer } CATEGORY_IS_ACTUALLY_A_KEYWORD = [ - "DiLer Tutorials", "Führerscheine", "Imagefilme von Schulen", "Kanäle", "Methoden", "Naturphänomene", - "Sonstige", "Schülerprojekte", "Technik" + "DiLer Tutorials", + "Führerscheine", + "Imagefilme von Schulen", + "Kanäle", + "Methoden", + "Naturphänomene", + "Sonstige", + "Schülerprojekte", + "Technik", ] + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + def start_requests(self) -> scrapy.Request: """ @@ -96,7 +98,7 @@ def parse_sitemap(self, response) -> scrapy.Request: # 0.5 # for sitemap_item in sitemap_items: - yield scrapy.Request(url=sitemap_item.loc, callback=self.parse_video_overview) + yield scrapy.Request(url=sitemap_item.loc, callback=self.parse_video_overview, priority=1) def parse_video_overview(self, response) -> scrapy.Request: """ @@ -113,52 +115,67 @@ def parse_video_overview(self, response) -> scrapy.Request: # the individual links from the video-overview look like this: # '/bildende-kunst/oer-video/kudivi-geschichte-der-kunstpaedagogik-03-bauhaus.html' - # logging.debug(f"Video-links from {response.url}: \n {url_list}") for url in url_list: video_url: str = str("https://www.dilertube.de" + url) - # self.debug_video_url_set.add(video_url) yield scrapy.Request(url=video_url, callback=self.parse) def getId(self, response=None) -> str: - pass + return response.url def getHash(self, response=None) -> str: - pass + date_str: str = self.get_published_date_from_dom(response=response) + hash_str: str = f"{date_str}v{self.version}" + return hash_str - async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + @staticmethod + def get_published_date_from_dom(response: scrapy.http.Response) -> str: """ - Gathers metadata from a video-url, nests the metadata within a BaseItemLoader and yields a complete BaseItem by - calling the .load_item()-method. - :param response: scrapy.http.Response - :param kwargs: - :return: yields a converter.items.BaseItem by calling the ".load_item()"-method on its scrapy.ItemLoader + Try to parse the published date directly from the DOM and return it as a string. + If no "veröffentlicht am"-date is found in the DOM, build a date string from datetime now as a fallback. - Scrapy Contracts: - @url https://www.dilertube.de/ethik/oer-video/solidaritaet.html - @returns item 1 + :param response: scrapy.http.Response + :return: a string containing the published date or the current time """ date_string_raw, date_string = str(), str() - date_regex = re.compile(r'((?P\d{2})\.)?' - r'((?<=\.)(?P\d{2})\.)?' - r'(?P\d{4})') - channel_info_box: list = response.xpath('//div[@class="jv-channel"]/small/text()').getall() + date_regex: re.Pattern = re.compile(r"((?P\d{2})\.)?" r"((?<=\.)(?P\d{2})\.)?" r"(?P\d{4})") + channel_info_box: list[str] = response.xpath('//div[@class="jv-channel"]/small/text()').getall() for channel_info_item in channel_info_box: if "Veröffentlicht am" in channel_info_item: date_string_raw: str = channel_info_item - if date_string_raw is not None: + if date_string_raw and isinstance(date_string_raw, str): date_string_raw = w3lib.html.strip_html5_whitespace(date_string_raw) if date_regex.search(date_string_raw): date_string = date_regex.search(date_string_raw).group() - if date_string is not None: - # ToDo RegEx discerning between Year-only and proper dates + if date_string and isinstance(date_string, str): + # ToDo RegEx distinction between Year-only and proper dates date_parsed: datetime = datetime.strptime(date_string, "%d.%m.%Y") - if date_parsed is not None: + if date_parsed: date_string = date_parsed.isoformat() else: # fallback value: current time (in case we can't gather the published_date ("Veröffentlicht am: ...") # from the DOM) date_string = datetime.now().isoformat() published_date = date_string + return published_date + + async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoader: + """ + Gathers metadata from a video-url, nests the metadata within a BaseItemLoader and yields a complete BaseItem by + calling the .load_item()-method. + :param response: scrapy.http.Response + :param kwargs: + :return: yields a converter.items.BaseItem by calling the ".load_item()"-method on its scrapy.ItemLoader + + Scrapy Contracts: + @url https://www.dilertube.de/ethik/oer-video/solidaritaet.html + @returns item 1 + """ + if self.shouldImport(response) is False: + logger.info(f"Skipping item {self.getId(response)} because shouldImport() returned False.") + return + if self.getId(response) is not None and self.getHash(response) is not None: + if not self.hasChanged(response): + return # Below a video, these possible metadata fields might be available in the video-information-box: # "Lizenz" always? (freeform text, set by the video-uploader) @@ -166,170 +183,188 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade # "Quelle" optional (= the original source of the video, freeform text) # "Produktionsjahr des Videos (ca.)" optional (year, e.g. "2020") # "Produktionsdatum" optional (date, e.g. "09.03.2021") - license_description_raw = response.xpath('//div[@class="customFieldValue license"]/text()').get() + license_description_raw: str = response.xpath('//div[@class="customFieldValue license"]/text()').get() video_info_dict = dict() - if license_description_raw is not None: - license_description = w3lib.html.strip_html5_whitespace(license_description_raw) - video_info_dict.update({'license_description': license_description}) - if license_description is not None: - cc_pattern = re.compile(r'\((?PC{2})\)\s' - r'(?P\D{2}(-\D{2})*)' - r'.*' - r'(?<=\s)(?P\d\.\d)?(?=\s)' - ) - if cc_pattern.search(license_description) is not None: + if license_description_raw and isinstance(license_description_raw, str): + license_description: str = w3lib.html.strip_html5_whitespace(license_description_raw) + video_info_dict.update({"license_description": license_description}) + if license_description and isinstance(license_description, str): + # a typical license description string might look like this: + # 'Creative Commons (CC) BY-NC-ND Namensnennung-Nicht kommerziell-Keine Bearbeitungen 4.0 International' + cc_pattern: re.Pattern = re.compile( + r"\((?PC{2})\)\s" r"(?P\D{2}(-\D{2})*)" r".*" r"(?<=\s)(?P\d\.\d)?(?=\s)" + ) + if cc_pattern.search(license_description): + # the LicenseMapper does not recognize this string yet, which is why we need to trim it down in the + # crawler first and then let the LicenseMapper do the rest cc_pattern_result_dict = cc_pattern.search(license_description).groupdict() - # cc_string_ready_for_mapping = str(cc_pattern_result_dict.get("CC") + "_" - # + cc_pattern_result_dict.get("CC_TYPE").replace("-", "_") + "_" - # + cc_pattern_result_dict.get("CC_VERSION").replace(".", "")) - # ToDo map license url with converter.Constants instead? (some licenses are missing there) - cc_string = str(cc_pattern_result_dict.get("CC") + " " + cc_pattern_result_dict.get("CC_TYPE") - + " " + cc_pattern_result_dict.get("CC_VERSION")) - if cc_string in self.LICENSE_MAPPING.keys(): - cc_url = self.LICENSE_MAPPING.get(cc_string) - video_info_dict.update({'cc_url': cc_url}) - - video_info_box = \ - response.xpath('//ul[@class="list-group mx-0 my-0"]//div[@class="card-body"]/div[@class="mb-2"]').getall() + cc_string_prepared_for_mapping: str = ( + f"{cc_pattern_result_dict.get('CC')} " + f"{cc_pattern_result_dict.get('CC_TYPE')} " + f"{cc_pattern_result_dict.get('CC_VERSION')}" + ) + license_mapper = LicenseMapper() + mapped_license_url: str | None = license_mapper.get_license_url(cc_string_prepared_for_mapping) + if mapped_license_url: + video_info_dict.update({"cc_url": mapped_license_url}) + + video_info_box: list[str] = response.xpath( + '//ul[@class="list-group mx-0 my-0"]//div[@class="card-body"]/div[@class="mb-2"]' + ).getall() for video_info_field in video_info_box: selector_item = scrapy.Selector(text=video_info_field) video_info_field_description = selector_item.xpath('//h4[@class="customFieldLabel "]/text()').get() # the class-name "customFieldLabel " needs to come with that trailing whitespace! this is NOT A TYPO! - if video_info_field_description is not None: + if video_info_field_description: if "Autor" in video_info_field_description: - author_string = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() - if author_string is not None: + author_string: str = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() + if author_string: author_string = w3lib.html.strip_html5_whitespace(author_string) - video_info_dict.update({'author': author_string}) + video_info_dict.update({"author": author_string}) if "Quelle" in video_info_field_description: - source_string = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() - if source_string is not None: + source_string: str = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() + if source_string: source_string = w3lib.html.strip_html5_whitespace(source_string) - video_info_dict.update({'source': source_string}) + video_info_dict.update({"source": source_string}) if "Produktionsjahr" in video_info_field_description: production_year: str = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() - if production_year is not None: + if production_year: production_year = w3lib.html.strip_html5_whitespace(production_year) - video_info_dict.update({'production_year': production_year}) + video_info_dict.update({"production_year": production_year}) if "Produktionsdatum" in video_info_field_description: production_date: str = selector_item.xpath('//div[@class="customFieldValue "]/text()').get() - if production_date is not None: + if production_date: production_date = w3lib.html.strip_html5_whitespace(production_date) - video_info_dict.update({'production_date': production_date}) - - base = BaseItemLoader() + video_info_dict.update({"production_date": production_date}) - base.add_value('sourceId', response.url) - hash_temp: str = published_date + self.version - base.add_value('hash', hash_temp) - last_modified = published_date - # while this is not strictly the last_modified date, it is the only date we can gather from the OOM - base.add_value('lastModified', last_modified) - # thumbnail_url: str = response.xpath('//meta[@property="og:image"]/@content').get() - # ToDo: DiLerTube provides thumbnails, but they are locked behind an error 423 when directly accessing the link - # if thumbnail_url is not None: - # base.add_value('thumbnail', thumbnail_url) - - if "source" in video_info_dict.keys(): - base.add_value('publisher', video_info_dict.get("source")) + base: BaseItemLoader = BaseItemLoader() + base.add_value("sourceId", self.getId(response=response)) + base.add_value("hash", self.getHash(response=response)) + thumbnail_url: str = response.xpath('//meta[@property="og:image"]/@content').get() + if thumbnail_url and isinstance(thumbnail_url, str): + base.add_value("thumbnail", thumbnail_url) categories = list() keywords = list() - categories_and_keywords_list: list = response.xpath('//ul[@class="list-group mx-0 my-0"]/li[' - '@class="list-group-item"]').getall() + categories_and_keywords_list: list[str] = response.xpath( + '//ul[@class="list-group mx-0 my-0"]/li[' '@class="list-group-item"]' + ).getall() # categories and keywords both use the same generic class names for its elements, therefore we try to identify # the description-text and use its
-siblings to extract the text-values: - for category_or_keyword_item in categories_and_keywords_list: - selector_item = scrapy.Selector(text=category_or_keyword_item) - category_or_keyword_description = selector_item.xpath('//span[@class="title"]/text()').get() - if "Kategorie" in category_or_keyword_description: - categories_temp = selector_item.xpath('//a[@class="badge-primary badge-pill"]/text()').getall() - if len(categories_temp) >= 1: - for category_potential_candidate in categories_temp: - if category_potential_candidate.startswith("||| "): - # there are some categories which are not school-disciplines but rather keywords - # e.g. "||| Methoden": https://www.dilertube.de/sonstige/oer-videos/methoden.html - category_potential_candidate: str = category_potential_candidate.replace("||| ", "") - if category_potential_candidate in self.CATEGORY_IS_ACTUALLY_A_KEYWORD: - keywords.append(category_potential_candidate) - else: - categories.append(category_potential_candidate) - if "Schlagwörter" in category_or_keyword_description: - keywords_temp = selector_item.xpath('//a[@class="badge-primary badge-pill"]/text()').getall() - if len(keywords_temp) >= 1: - keywords.extend(keywords_temp) - - lom = LomBaseItemloader() - - general = LomGeneralItemloader() - general.add_value('identifier', response.url) - general.add_value('title', response.xpath('//meta[@property="og:title"]/@content').get()) - general.add_value('description', response.xpath('//meta[@property="og:description"]/@content').get()) - general.add_value('language', response.xpath('/html/@lang').get()) + if categories_and_keywords_list and isinstance(categories_and_keywords_list, list): + for category_or_keyword_item in categories_and_keywords_list: + selector_item = scrapy.Selector(text=category_or_keyword_item) + category_or_keyword_description = selector_item.xpath('//span[@class="title"]/text()').get() + if "Kategorie" in category_or_keyword_description: + categories_raw = selector_item.xpath('//a[@class="badge-primary badge-pill"]/text()').getall() + if categories_raw and isinstance(categories_raw, list) and len(categories_raw) >= 1: + for category_potential_candidate in categories_raw: + if category_potential_candidate.startswith("||| "): + # there are some categories which are not school-disciplines but rather keywords + # e.g. "||| Methoden": https://www.dilertube.de/sonstige/oer-videos/methoden.html + category_potential_candidate: str = category_potential_candidate.replace("||| ", "") + if category_potential_candidate in self.CATEGORY_IS_ACTUALLY_A_KEYWORD: + keywords.append(category_potential_candidate) + else: + categories.append(category_potential_candidate) + if "Schlagwörter" in category_or_keyword_description: + keywords_raw: list[str] = selector_item.xpath( + '//a[@class="badge-primary badge-pill"]/text()' + ).getall() + if keywords_raw and isinstance(keywords_raw, list) and len(keywords_raw) >= 1: + keywords.extend(keywords_raw) + + lom: LomBaseItemloader = LomBaseItemloader() + + general: LomGeneralItemloader = LomGeneralItemloader() + general.add_value("identifier", response.url) + general.add_value("title", response.xpath('//meta[@property="og:title"]/@content').get()) + general.add_value("description", response.xpath('//meta[@property="og:description"]/@content').get()) + general.add_value("language", response.xpath("/html/@lang").get()) # grabs the language from the html language; there seem to be additional translations of DiLerTube in the works: # the german URLs use 'de-DE' by default, # while the english translations use 'en-GB', so this looks like a suitable indicator - general.add_value('keyword', keywords) - lom.add_value('general', general.load_item()) + general.add_value("keyword", keywords) + lom.add_value("general", general.load_item()) - technical = LomTechnicalItemLoader() - technical.add_value('format', 'text/html') - technical.add_value('location', response.url) - lom.add_value('technical', technical.load_item()) + technical: LomTechnicalItemLoader = LomTechnicalItemLoader() + technical.add_value("format", "text/html") + technical.add_value("location", response.url) + lom.add_value("technical", technical.load_item()) - lifecycle = LomLifecycleItemloader() - if "production_year" in video_info_dict.keys(): + date_production_year: str | None = None + date_of_production: str | None = None + if "production_year" in video_info_dict: # this is a necessary workaround because dateparser.parse() would mis-calculate year-only representations of # the date - datetime_production_year: datetime = datetime.strptime(video_info_dict.get("production_year"), "%Y") - datetime_production_year: str = datetime_production_year.isoformat() - lifecycle.add_value('date', datetime_production_year) - if "production_date" in video_info_dict.keys(): + dt_production_year: datetime = datetime.strptime(video_info_dict.get("production_year"), "%Y") + date_production_year: str = dt_production_year.isoformat() + if "production_date" in video_info_dict: # this is a necessary workaround because dateparser.parse() would confuse de-DE time-formats as en-US - datetime_production_date: datetime = datetime.strptime(video_info_dict.get("production_date"), "%d.%m.%Y") - datetime_production_date: str = datetime_production_date.isoformat() - lifecycle.add_value('date', datetime_production_date) - lom.add_value('lifecycle', lifecycle.load_item()) - - educational = LomEducationalItemLoader() - lom.add_value('educational', educational.load_item()) - - classification = LomClassificationItemLoader() - lom.add_value('classification', classification.load_item()) - - # once you've filled "general", "technical", "lifecycle" and "educational" with values, - # the LomBaseItem is loaded into the "base"-BaseItemLoader - base.add_value('lom', lom.load_item()) - - vs = ValuespaceItemLoader() + dt_production_date: datetime = datetime.strptime(video_info_dict.get("production_date"), "%d.%m.%Y") + date_of_production: str = dt_production_date.isoformat() + + if "source" in video_info_dict: + lifecycle_publisher: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + lifecycle_publisher.add_value("organization", video_info_dict.get("source")) + if date_of_production: + lifecycle_publisher.add_value("date", date_of_production) + elif date_production_year: + lifecycle_publisher.add_value("date", date_production_year) + lom.add_value("lifecycle", lifecycle_publisher.load_item()) + + if "author" in video_info_dict: + lifecycle_author: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle_author.add_value("role", "author") + lifecycle_author.add_value("firstName", video_info_dict.get("author")) + # dumping the whole author string into "firstName" is a temporary solution so we don't lose author metadata + # ToDo (optional): + # - refine author information by splitting author names into firstName and lastName? + if date_of_production: + lifecycle_author.add_value("date", date_of_production) + elif date_production_year: + lifecycle_author.add_value("date", date_production_year) + lom.add_value("lifecycle", lifecycle_author.load_item()) + + educational: LomEducationalItemLoader = LomEducationalItemLoader() + lom.add_value("educational", educational.load_item()) + + classification: LomClassificationItemLoader = LomClassificationItemLoader() + lom.add_value("classification", classification.load_item()) + + base.add_value("lom", lom.load_item()) + + vs: ValuespaceItemLoader = ValuespaceItemLoader() for category_item in categories: if category_item in self.DISCIPLINE_MAPPING.keys(): discipline = self.DISCIPLINE_MAPPING.get(category_item) - vs.add_value('discipline', discipline) + vs.add_value("discipline", discipline) else: - vs.add_value('discipline', category_item) - vs.add_value('new_lrt', "7a6e9608-2554-4981-95dc-47ab9ba924de") # Video (Material) - vs.add_value('intendedEndUserRole', ["learner", "teacher"]) - vs.add_value('conditionsOfAccess', "no login") - vs.add_value('containsAdvertisement', "no") - vs.add_value('dataProtectionConformity', "Datensparsam") + vs.add_value("discipline", category_item) + vs.add_value("new_lrt", "7a6e9608-2554-4981-95dc-47ab9ba924de") # Video (Material) + vs.add_value("intendedEndUserRole", ["learner", "teacher"]) + vs.add_value("conditionsOfAccess", "no_login") + vs.add_value("containsAdvertisement", "no") + vs.add_value("dataProtectionConformity", "generalDataProtectionRegulation") # Datensparsam # see https://www.dilertube.de/datenschutz.html - vs.add_value('price', "no") - base.add_value('valuespaces', vs.load_item()) - lic = LicenseItemLoader() - if "license_description" in video_info_dict.keys(): + vs.add_value("price", "no") + base.add_value("valuespaces", vs.load_item()) + + lic: LicenseItemLoader = LicenseItemLoader() + if "license_description" in video_info_dict: # DiLerTube allows the uploaders to enter freeform text into the license field - lic.add_value('description', video_info_dict.get("license_description")) - if "cc_url" in video_info_dict.keys(): - lic.add_value('url', video_info_dict.get("cc_url")) - if "author" in video_info_dict.keys(): - lic.add_value('author', video_info_dict.get("author")) - base.add_value('license', lic.load_item()) - - permissions = super().getPermissions(response) - base.add_value('permissions', permissions.load_item()) - - response_loader: ResponseItemLoader = await super().mapResponse(response) - base.add_value('response', response_loader.load_item()) + lic.add_value("description", video_info_dict.get("license_description")) + if "cc_url" in video_info_dict: + lic.add_value("url", video_info_dict.get("cc_url")) + if "author" in video_info_dict: + lic.add_value("author", video_info_dict.get("author")) + base.add_value("license", lic.load_item()) + + permissions: PermissionItemLoader = super().getPermissions(response) + base.add_value("permissions", permissions.load_item()) + + response_loader: ResponseItemLoader = await super().mapResponse(response, fetchData=False) + base.add_value("response", response_loader.load_item()) yield base.load_item() From ac487e6d2af8dd8d0612fccb20ba12b006191175 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 7 Feb 2024 18:17:27 +0100 Subject: [PATCH 439/590] docs: add LicenseMapper ToDos - added edge-cases from DiLerTube for future maintenance - style: improve readability of debug message Signed-off-by: criamos <981166+Criamos@users.noreply.github.com> --- converter/util/license_mapper.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index 354e7a00..52988026 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -26,7 +26,14 @@ class LicenseMapper: ) # ToDo: - # - gather more license string edge-cases from debug crawlers for test cases + # - gather more license string edge-cases from debug crawlers for test cases: + # - DiLerTube edge-cases that cannot be handled by the above RegEx yet: + # - "Creative Commons (CC) BY-NC-ND Namensnennung-Nicht kommerziell-Keine Bearbeitungen 4.0 International" + # - "Creative Commons (CC) BY-SA Namensnennung-Weitergabe unter gleichen Bedingungen 4.0 International" + # - "Creative Commons (CC) BY Namensnennung 4.0 International" + # - add these edge-cases to the test-suite before trying to improve the RegEx! + + # ToDo: # - feature-idea: fill up provided 'LicenseItemLoader' automatically? # flow: try 'url' # -> fallback: try 'internal' @@ -173,7 +180,7 @@ def identify_cc_license(self, license_string: str) -> str | None: return valid_license_url elif license_string: license_string = license_string.lower() - logging.debug(f"LicenseMapper: Recognized license string '{license_string}'") + logging.debug(f"LicenseMapper: Received license string '{license_string}'") if self.cc_pattern.search(license_string): result_dict: dict = self.cc_pattern.search(license_string).groupdict() cc_type = result_dict.get("CC_TYPE") From 1819a1c40144cc8615383f4d9648138545125679 Mon Sep 17 00:00:00 2001 From: criamos <981166+criamos@users.noreply.github.com> Date: Thu, 8 Feb 2024 13:55:36 +0100 Subject: [PATCH 440/590] tutory_spider v0.2.1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Metadata: - feat: handle "custom:" author information and save to 'license.author' / 'lifecycle.author' Performance: - feat: respect HTML robot meta tags -- items that were crawled in the past might have moved or gotten deactivated by the authors, which is indicated in the DOM header -- by checking the robot meta tags early in the program flow, we can skip items that have been deleted or are work-in-progress and not ready to be indexed yet - change: increase Autothrottle concurrency, but delay the individual API overview page requests to increase crawler performance -- each API overview request takes ~21s to load, which caused cascading side-effects with Scrapy's Autothrottle, further delaying subsequent requests Signed-off-by: Andreas Schnäpp <981166+Criamos@users.noreply.github.com> --- converter/spiders/tutory_spider.py | 38 ++++++++++++++++++++++++------ 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 316a9616..549ea973 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -30,15 +30,16 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): url = "https://www.tutory.de/" objectUrl = "https://www.tutory.de/bereitstellung/dokument/" baseUrl = "https://www.tutory.de/api/v1/share/" - version = "0.2.0" # last update: 2024-01-26 + version = "0.2.1" # last update: 2024-02-08 custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 3, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 6, "WEB_TOOLS": WebEngine.Playwright, } API_PAGESIZE_LIMIT = 250 + # the old API pageSize of 999999 (which was used in 2021) doesn't work anymore and throws a 502 Error (Bad Gateway). # 2023-03: setting pageSize to 5000 appeared to be a reasonable value with an API response time of 12-15s # 2023-08-15: every setting above 500 appears to always return a '502'-Error now. Current response times during api @@ -51,9 +52,9 @@ def __init__(self, **kwargs): def start_requests(self): first_url: str = self.assemble_tutory_api_url(api_page=0) - # by increasing the priority of these requests, we're making sure to parse all API pages first and only crawl - # individual items once we've got the complete list of URLs - yield scrapy.Request(url=first_url, callback=self.parse_api_page, priority=5) + # we need to lower the priority of subsequent API page requests because each response takes about 21s while + # individual documents load within <300 ms + yield scrapy.Request(url=first_url, callback=self.parse_api_page, priority=-1) def parse_api_page(self, response: scrapy.http.TextResponse) -> scrapy.Request: """ @@ -122,6 +123,13 @@ def check_if_item_should_be_dropped(self, response) -> bool: drop_item_flag: bool = False identifier: str = self.getId(response) hash_str: str = self.getHash(response) + robot_meta_tags: str = response.xpath("//meta[@name='robots']/@content").get() + if robot_meta_tags: + if "noindex" in robot_meta_tags or "none" in robot_meta_tags: + drop_item_flag = True + logger.info(f"Robot Meta Tag {robot_meta_tags} identified: Tags 'noindex' or 'none' indicate that this " + f"item should not be indexed by the crawler. Dropping item...") + return drop_item_flag if self.shouldImport(response) is False: logger.debug(f"Skipping entry {identifier} because shouldImport() returned false") drop_item_flag = True @@ -287,7 +295,15 @@ def getLicense(self, response=None) -> LicenseItemLoader: # the 'publishName'-field seems to indicate whether the username or the full name appears on top of a # worksheet as author metadata. publish_decision: str = user_dict["publishName"] - if publish_decision == "username": + if publish_decision and publish_decision.startswith("custom:"): + # there are edge-cases where "publishName" starts with "custom:<...name of person>", which means + # that a custom string shall be used. (e.g., document id "1cdf1514-af66-475e-956c-b8487588e095" + # -> https://www.tutory.de/entdecken/dokument/class-test-checkliste-englisch ) + custom_name: str = publish_decision + custom_name = custom_name.replace("custom:", "") + if custom_name: + license_loader.add_value("author", custom_name) + elif publish_decision == "username": if "username" in user_dict: username: str = user_dict["username"] if username: @@ -313,7 +329,15 @@ async def getLOMLifecycle(self, response: scrapy.http.Response = None) -> LomLif # the 'publishName'-field seems to indicate whether the username or the full name appears on top of a # worksheet as author metadata. publish_decision: str = user_dict["publishName"] - if publish_decision == "username": + if publish_decision and publish_decision.startswith("custom:"): + # there are edge-cases where "publishName" starts with "custom:<...name of person>", which means + # that a custom string shall be used. (e.g., document id "1cdf1514-af66-475e-956c-b8487588e095" + # -> https://www.tutory.de/entdecken/dokument/class-test-checkliste-englisch ) + custom_name: str = publish_decision + custom_name = custom_name.replace("custom:", "") + if custom_name: + lifecycle_loader.add_value("firstName", custom_name) + elif publish_decision == "username": if "username" in user_dict: username: str = user_dict["username"] if username: From 820825205edb9241da897cd8821001c003923bc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:19:45 +0100 Subject: [PATCH 441/590] chore: update dependencies MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - updated dependencies (black, certifi, playwright, pytest, urllib3) to their latest version - pyproject.toml now correctly points to Python 3.11, which is also used as the code formatting target by "black" Signed-off-by: Andreas Schnäpp <981166+Criamos@users.noreply.github.com> --- poetry.lock | 89 ++++++++++++++++++++++++------------------------ pyproject.toml | 16 ++++----- requirements.txt | 10 +++--- 3 files changed, 58 insertions(+), 57 deletions(-) diff --git a/poetry.lock b/poetry.lock index a2e70943..0996328b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "anyio" @@ -127,33 +127,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.1.0" +version = "24.1.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94d5280d020dadfafc75d7cae899609ed38653d3f5e82e7ce58f75e76387ed3d"}, - {file = "black-24.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aaf9aa85aaaa466bf969e7dd259547f4481b712fe7ee14befeecc152c403ee05"}, - {file = "black-24.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec489cae76eac3f7573629955573c3a0e913641cafb9e3bfc87d8ce155ebdb29"}, - {file = "black-24.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5a0100b4bdb3744dd68412c3789f472d822dc058bb3857743342f8d7f93a5a7"}, - {file = "black-24.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6cc5a6ba3e671cfea95a40030b16a98ee7dc2e22b6427a6f3389567ecf1b5262"}, - {file = "black-24.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0e367759062dcabcd9a426d12450c6d61faf1704a352a49055a04c9f9ce8f5a"}, - {file = "black-24.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be305563ff4a2dea813f699daaffac60b977935f3264f66922b1936a5e492ee4"}, - {file = "black-24.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a8977774929b5db90442729f131221e58cc5d8208023c6af9110f26f75b6b20"}, - {file = "black-24.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d74d4d0da276fbe3b95aa1f404182562c28a04402e4ece60cf373d0b902f33a0"}, - {file = "black-24.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39addf23f7070dbc0b5518cdb2018468ac249d7412a669b50ccca18427dba1f3"}, - {file = "black-24.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:827a7c0da520dd2f8e6d7d3595f4591aa62ccccce95b16c0e94bb4066374c4c2"}, - {file = "black-24.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd59d01bf3306ff7e3076dd7f4435fcd2fafe5506a6111cae1138fc7de52382"}, - {file = "black-24.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf8dd261ee82df1abfb591f97e174345ab7375a55019cc93ad38993b9ff5c6ad"}, - {file = "black-24.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:82d9452aeabd51d1c8f0d52d4d18e82b9f010ecb30fd55867b5ff95904f427ff"}, - {file = "black-24.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aede09f72b2a466e673ee9fca96e4bccc36f463cac28a35ce741f0fd13aea8b"}, - {file = "black-24.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:780f13d03066a7daf1707ec723fdb36bd698ffa29d95a2e7ef33a8dd8fe43b5c"}, - {file = "black-24.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a15670c650668399c4b5eae32e222728185961d6ef6b568f62c1681d57b381ba"}, - {file = "black-24.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e0fa70b8464055069864a4733901b31cbdbe1273f63a24d2fa9d726723d45ac"}, - {file = "black-24.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fa8d9aaa22d846f8c0f7f07391148e5e346562e9b215794f9101a8339d8b6d8"}, - {file = "black-24.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:f0dfbfbacfbf9cd1fac7a5ddd3e72510ffa93e841a69fcf4a6358feab1685382"}, - {file = "black-24.1.0-py3-none-any.whl", hash = "sha256:5134a6f6b683aa0a5592e3fd61dd3519d8acd953d93e2b8b76f9981245b65594"}, - {file = "black-24.1.0.tar.gz", hash = "sha256:30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc"}, + {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, + {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, + {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, + {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, + {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, + {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, + {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, + {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, + {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, + {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, + {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, + {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, + {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, + {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, + {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, + {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, + {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, + {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, + {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, + {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, + {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, + {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, ] [package.dependencies] @@ -173,13 +173,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -1277,18 +1277,18 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "playwright" -version = "1.41.1" +version = "1.41.2" description = "A high-level API to automate web browsers" optional = false python-versions = ">=3.8" files = [ - {file = "playwright-1.41.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b456f25db38e4d93afc3c671e1093f3995afb374f14cee284152a30f84cfff02"}, - {file = "playwright-1.41.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53ff152506dbd8527aa815e92757be72f5df60810e8000e9419d29fd4445f53c"}, - {file = "playwright-1.41.1-py3-none-macosx_11_0_universal2.whl", hash = "sha256:70c432887b8b5e896fa804fb90ca2c8baf05b13a3590fb8bce8b3c3efba2842d"}, - {file = "playwright-1.41.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:f227a8d616fd3a02d45d68546ee69947dce4a058df134a9e7dc6167c543de3cd"}, - {file = "playwright-1.41.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:475130f879b4ba38b9db7232a043dd5bc3a8bd1a84567fbea7e21a02ee2fcb13"}, - {file = "playwright-1.41.1-py3-none-win32.whl", hash = "sha256:ef769414ea0ceb76085c67812ab6bc0cc6fac0adfc45aaa09d54ee161d7f637b"}, - {file = "playwright-1.41.1-py3-none-win_amd64.whl", hash = "sha256:316e1ba0854a712e9288b3fe49509438e648d43bade77bf724899de8c24848de"}, + {file = "playwright-1.41.2-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:cf68335a5dfa4038fa797a4ba0105faee0094ebbb372547d7a27feec5b23c672"}, + {file = "playwright-1.41.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:431e3a05f8c99147995e2b3e8475d07818745294fd99f1510b61756e73bdcf68"}, + {file = "playwright-1.41.2-py3-none-macosx_11_0_universal2.whl", hash = "sha256:0608717cbf291a625ba6f751061af0fc0cc9bdace217e69d87b1eb1383b03406"}, + {file = "playwright-1.41.2-py3-none-manylinux1_x86_64.whl", hash = "sha256:4bf214d812092cf5b9b9648ba84611aa35e28685519911342a7da3a3031f9ed6"}, + {file = "playwright-1.41.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa17ab44622c447de26ed8f7d99912719568d8dbc3a9db0e07f0ae1487709d9"}, + {file = "playwright-1.41.2-py3-none-win32.whl", hash = "sha256:edb210a015e70bb0d328bf1c9b65fa3a08361f33e4d7c4ddd1ad2adb6d9b4479"}, + {file = "playwright-1.41.2-py3-none-win_amd64.whl", hash = "sha256:71ead0f33e00f5a8533c037c647938b99f219436a1b27d4ba4de4e6bf0567278"}, ] [package.dependencies] @@ -1470,13 +1470,13 @@ requests = ">=2.25.1" [[package]] name = "pytest" -version = "7.4.4" +version = "8.0.0" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, ] [package.dependencies] @@ -1484,7 +1484,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" +pluggy = ">=1.3.0,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] @@ -2003,17 +2003,18 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2133,4 +2134,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "bfb781f2a0a3538378685365efa2a1fb0c23a758a13dc5c4f303da60ecd172c5" +content-hash = "a00940952ef5f1880983cf58aa8529703cf887145d9f76d53123a9ddca5ac77f" diff --git a/pyproject.toml b/pyproject.toml index 07322e13..5a4be901 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ maintainers = [ "Andreas Schnäpp <981166+Criamos@users.noreply.github.com>" ] readme = "README.md" -python = "^3.10" +python = "^3.11" homepage = "https://github.com/openeduhub/oeh-search-etl" repository = "https://github.com/openeduhub/oeh-search-etl" documentation = "https://github.com/openeduhub/oeh-search-etl" @@ -17,14 +17,14 @@ keywords = ["metadata", "oer", "crawl", " wirlernenonline"] classifiers = [ "Framework :: Scrapy", "Development Status :: 4 - Beta", - "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Topic :: Education :: Testing", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", ] [tool.black] line-length = 120 -target-version = ['py310'] +target-version = ['py311'] include = '\.pyi?$' exclude = ''' @@ -60,8 +60,8 @@ packages = [{include = "converter"}] [tool.poetry.dependencies] python = "^3.10" wheel = "^0.42.0" -black = "24.1.0" -certifi="^2023.11.17" +black = "24.1.1" +certifi="2024.2.2" dateparser="1.2" extruct="0.16.0" flake8 = "7.0.0" @@ -73,16 +73,16 @@ itemloaders="1.1.0" isodate="0.6.1" overrides="3.1.0" Pillow="10.1.0" -playwright="1.41.1" +playwright="1.41.2" pyOpenSSL="23.3.0" -pytest="^7.4.3" +pytest="8.0.0" python-dateutil="2.8.2" python-dotenv="1.0.1" requests="2.31.0" six="1.16.0" Scrapy="2.11" scrapy-splash="0.9.0" -urllib3="2.1.0" +urllib3="2.2.0" vobject="0.9.6.1" w3lib="2.1.2" xmltodict="0.13.0" diff --git a/requirements.txt b/requirements.txt index 90cf180d..0dd1e44f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,8 +5,8 @@ attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" babel==2.14.0 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" -black==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -certifi==2023.11.17 ; python_version >= "3.10" and python_version < "4.0" +black==24.1.1 ; python_version >= "3.10" and python_version < "4.0" +certifi==2024.2.2 ; python_version >= "3.10" and python_version < "4.0" cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" @@ -54,7 +54,7 @@ parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0" pillow==10.1.0 ; python_version >= "3.10" and python_version < "4.0" platformdirs==4.1.0 ; python_version >= "3.10" and python_version < "4.0" -playwright==1.41.1 ; python_version >= "3.10" and python_version < "4.0" +playwright==1.41.2 ; python_version >= "3.10" and python_version < "4.0" pluggy==1.4.0 ; python_version >= "3.10" and python_version < "4.0" protego==0.3.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" @@ -68,7 +68,7 @@ pyopenssl==23.3.0 ; python_version >= "3.10" and python_version < "4.0" pyparsing==3.1.1 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" pyrdfa3==3.6.2 ; python_version >= "3.10" and python_version < "4.0" -pytest==7.4.4 ; python_version >= "3.10" and python_version < "4.0" +pytest==8.0.0 ; python_version >= "3.10" and python_version < "4.0" python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" pytz==2023.3.post1 ; python_version >= "3.10" and python_version < "4.0" @@ -94,7 +94,7 @@ twisted==22.10.0 ; python_version >= "3.10" and python_version < "4.0" typing-extensions==4.9.0 ; python_version >= "3.10" and python_version < "4.0" tzdata==2023.4 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.1.0 ; python_version >= "3.10" and python_version < "4.0" +urllib3==2.2.0 ; python_version >= "3.10" and python_version < "4.0" vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" w3lib==2.1.2 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" From 7e33bf09d568f867878376a8bb4707b5178bbd25 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 9 Feb 2024 15:55:58 +0100 Subject: [PATCH 442/590] Fix warnings in 'getLRMI()'-method MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - fix: fixed hidden ValueError when trying to use 'LrmiBase.getLRMI()' on Response objects of type 'json' -- Scrapy's older version used the 'parsel'-package <1.8 which (somehow) was less strict when erroneously trying to navigate a 'json'-object with XPath-selectors -- as of Scrapy v2.9+ trying to use 'response.xpath()' on a response object other than of type "html" will throw an Error which needs to be handled -- a bare except previously hid this problem from us, causing digitallearninglab_spider.py to throw warnings which obfuscated the real problem -- see: https://github.com/scrapy/scrapy/issues/5923 - fix: fixed weak warnings (ambiguous variable names) - fix: fixed weak warning regarding comparison with None (PEP8:E711) - optimized imports Signed-off-by: Andreas Schnäpp <981166+Criamos@users.noreply.github.com> --- converter/spiders/base_classes/lrmi_base.py | 78 ++++++++++++--------- 1 file changed, 45 insertions(+), 33 deletions(-) diff --git a/converter/spiders/base_classes/lrmi_base.py b/converter/spiders/base_classes/lrmi_base.py index c281900c..5986699b 100644 --- a/converter/spiders/base_classes/lrmi_base.py +++ b/converter/spiders/base_classes/lrmi_base.py @@ -1,15 +1,13 @@ -import re - -from .lom_base import LomBase -from .json_base import JSONBase -import json -import time import html +import json import logging +import time + +from .json_base import JSONBase +from .lom_base import LomBase # base spider mapping data via LRMI inside the html pages # Please override the lrmi_path if necessary and add your sitemap_urls -from ...constants import Constants from ...items import LicenseItemLoader @@ -21,22 +19,36 @@ def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) def getLRMI(self, *params, response): - try: - lrmi = list( - map( - lambda x: json.loads(x.replace("\r", "").replace("\n", " ")), - response.xpath(self.lrmi_path).getall(), + lrmi: list[dict] = list() + if response: + if response.selector.type == "html": + # this check is necessary because querying a selector of type 'json' would result in an ValueError and + # throw warnings + lrmi_raw: list[str] = response.xpath(self.lrmi_path).getall() + for lrmi_item in lrmi_raw: + lrmi_item = lrmi_item.replace("\r", "") + lrmi_item = lrmi_item.replace("\n", " ") + lrmi_item = lrmi_item.replace("\t", " ") + # after these steps there might still be multiple whitespaces within a json-ld object + lrmi_item = " ".join(lrmi_item.split()) + if lrmi_item: + lrmi_object: dict = json.loads(lrmi_item) + lrmi.append(lrmi_object) + else: + logging.warning( + f"Failed parsing LRMI at {response.url} : After trying to sanitize the JSON string object, " + f"the final string was invalid." + ) + else: + logging.warning( + f"Failed parsing lrmi at {response.url} , please check source (if there was an JSON-LD available)" ) - ) - except: - logging.warning( - "failed parsing lrmi at " + response.url + ", please check source" - ) - return None - for l in lrmi: - value = JSONBase.get(self, *params, json=l) - if value != None: - return html.unescape(value) + return None + if lrmi and isinstance(lrmi, list): + for lrmi_dict in lrmi: + value = JSONBase.get(self, *params, json=lrmi_dict) + if value is not None: + return html.unescape(value) return None async def parse(self, response): @@ -46,7 +58,7 @@ def getId(self, response): return self.getLRMI("identifier", "url", "name", response=response) def getHash(self, response): - if self.get("version") != None: + if self.get("version") is not None: return self.getLRMI("version", response=response) return time.time() @@ -65,16 +77,12 @@ def getLOMGeneral(self, response): general.add_value("title", self.getLRMI("name", "headline", response=response)) general.add_value("keyword", self.getLRMI("keywords", response=response)) general.add_value("language", self.getLRMI("inLanguage", response=response)) - general.add_value( - "description", self.getLRMI("description", "about", response=response) - ) + general.add_value("description", self.getLRMI("description", "about", response=response)) return general def getLOMEducational(self, response): educational = LomBase.getLOMEducational(self, response) - educational.add_value( - "typicalLearningTime", self.getLRMI("timeRequired", response=response) - ) + educational.add_value("typicalLearningTime", self.getLRMI("timeRequired", response=response)) return educational def getValuespaces(self, response): @@ -97,11 +105,15 @@ def getLicense(self, response): # the "license" field holds a valid URL -> use it directly as is license_loader.add_value("url", license_raw) else: - logging.warning(f"Could not map the received 'license'-value {license_raw} within LrmiBase. " - f"Please check Constants.py and LrmiBase for missing mappings/values.") + logging.warning( + f"Could not map the received 'license'-value {license_raw} within LrmiBase. " + f"Please check Constants.py and LrmiBase for missing mappings/values." + ) else: - logging.warning("LrmiBase: The 'license'-field returned within the JSON_LD doesn't seem to be a URL.\n" - "Please check if additional license-mapping is necessary within the spider itself.") + logging.warning( + "LrmiBase: The 'license'-field returned within the JSON_LD doesn't seem to be a URL.\n" + "Please check if additional license-mapping is necessary within the spider itself." + ) return license_loader def getLOMTechnical(self, response): From 9da72dae1447454b5ed2e36557b40a213e07e9dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20Schn=C3=A4pp?= <981166+Criamos@users.noreply.github.com> Date: Fri, 9 Feb 2024 19:55:59 +0100 Subject: [PATCH 443/590] Fix broken API navigation of digitallearninglab_spider MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - while debugging warnings from LrmiBase, it became apparent that digitallearninglab_spider used only one API starting point (either "tools" or "unterrichtsbausteine", not both) which caused the crawler to prematurely abort the crawling process - fix: hasChanged() was called twice -- one of the calls was too early in the program flow, which caused warnings/errors when trying to access JSON-LD data which was not available in the API response --- the JSON-LD containers are only available in the DOM, which means that 'hasChanged()' cannot be called earlier than in the 'parse()'-method! - refactor: added missing TypeHints and improved readability of some variables and their expected types - code formatting via black Signed-off-by: Andreas Schnäpp <981166+Criamos@users.noreply.github.com> --- .../spiders/digitallearninglab_spider.py | 158 ++++++++++-------- 1 file changed, 85 insertions(+), 73 deletions(-) diff --git a/converter/spiders/digitallearninglab_spider.py b/converter/spiders/digitallearninglab_spider.py index cde295ce..ada07e77 100644 --- a/converter/spiders/digitallearninglab_spider.py +++ b/converter/spiders/digitallearninglab_spider.py @@ -8,23 +8,34 @@ from converter.constants import Constants from converter.valuespace_helper import ValuespaceHelper from .base_classes import LrmiBase, LomBase -from ..items import LicenseItemLoader, LomLifecycleItemloader, ResponseItemLoader +from ..items import ( + LicenseItemLoader, + LomLifecycleItemloader, + ResponseItemLoader, + BaseItemLoader, + LomGeneralItemloader, + LomTechnicalItemLoader, + ValuespaceItemLoader, + LomBaseItemloader, +) from ..util.license_mapper import LicenseMapper +from ..web_tools import WebEngine class DigitallearninglabSpider(CrawlSpider, LrmiBase): name = "digitallearninglab_spider" friendlyName = "digital.learning.lab" url = "https://digitallearninglab.de" - version = "0.1.4" # last update: 2023-03-08 + version = "0.1.4" # last update: 2024-02-09 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, # Digital Learning Lab recognizes and blocks crawlers that are too fast: # without the Autothrottle we'll be seeing HTTP Errors 503 (and therefore missing out on lots of items) # "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 1, - "AUTOTHROTTLE_START_DELAY": 0.25 + "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, + "AUTOTHROTTLE_START_DELAY": 0.25, + "WEB_TOOLS": WebEngine.Playwright, } apiUrl = "https://digitallearninglab.de/api/%type?q=&sorting=latest&page=%page" # API Counts (as of 2023-03-08) @@ -45,11 +56,11 @@ def getId(self, response): def getHash(self, response): modified = self.getLRMI("dateModified", response=response) if modified: - return modified + self.version + return f"{modified}v{self.version}" # fallback if LRMI was not parsable return time.time() - def start_request(self, type, page): + def start_request(self, type: str, page: int) -> scrapy.Request: return scrapy.Request( url=self.apiUrl.replace("%page", str(page)).replace("%type", type), callback=self.parse_request, @@ -57,29 +68,31 @@ def start_request(self, type, page): meta={"page": page, "type": type}, ) + def build_initial_api_requests(self, response: scrapy.http.Response): + # the previous approach of simply having two yield requests after each other caused Scrapy to stop + # after the first request, basically skipping half the items. + # see: https://docs.scrapy.org/en/latest/topics/spiders.html#scrapy.Spider.start_requests + api_material_type_starting_points: list[str] = ["unterrichtsbausteine", "tools"] + for material_type in api_material_type_starting_points: + yield self.start_request(type=material_type, page=1) + def start_requests(self): - yield self.start_request("unterrichtsbausteine", 1) - yield self.start_request("tools", 1) + # Dummy request: check if Digital Learning Lab is online/available, then start building the actual API requests + yield scrapy.Request(url=self.url, callback=self.build_initial_api_requests) def parse_request(self, response: scrapy.http.TextResponse): - data = response.json() + data: dict = response.json() results = data.get("results") if results: for item in results: copy_response = response.replace(url=self.url + item.get("url")) copy_response.meta["item"] = item - if self.hasChanged(copy_response): - yield scrapy.Request( - url=copy_response.url, - callback=self.handle_entry, - meta={"item": item, "type": response.meta["type"]}, - ) - yield self.start_request( - response.meta["type"], response.meta["page"] + 1 - ) - - def handle_entry(self, response): - return self.parse(response) + yield scrapy.Request( + url=copy_response.url, + callback=self.parse, + meta={"item": item, "type": response.meta["type"]}, + ) + yield self.start_request(type=response.meta["type"], page=response.meta["page"] + 1) @staticmethod def get_new_lrt(response): @@ -90,7 +103,7 @@ def get_new_lrt(response): # thumbnail is always the same, do not use the one from rss def getBase(self, response): - base = LrmiBase.getBase(self, response) + base: BaseItemLoader = LrmiBase.getBase(self, response) # base.replace_value('thumbnail', self.url + '/media/' + response.meta['item'].get('image')) base.replace_value( "thumbnail", @@ -99,29 +112,25 @@ def getBase(self, response): return base def getLOMGeneral(self, response): - general = LrmiBase.getLOMGeneral(self, response) - general.replace_value( - "title", html.unescape(response.meta["item"].get("name").strip()) - ) + general: LomGeneralItemloader = LrmiBase.getLOMGeneral(self, response) + general.replace_value("title", html.unescape(response.meta["item"].get("name").strip())) json_ld_description = self.getLRMI("description", response=response) if json_ld_description: - general.add_value('description', json_ld_description) + general.add_value("description", json_ld_description) else: # fallback via DLL API: shorter "teaser"-description - general.add_value( - "description", html.unescape(response.meta["item"].get("teaser")) - ) + general.add_value("description", html.unescape(response.meta["item"].get("teaser"))) # general.add_value('keyword', list(filter(lambda x: x,map(lambda x: x.strip(), response.xpath('//*[@id="ContentModuleApp"]//*[@class="topic-name"]//text()').getall())))) return general def getLOMTechnical(self, response): - technical = LrmiBase.getLOMTechnical(self, response) + technical: LomTechnicalItemLoader = LrmiBase.getLOMTechnical(self, response) technical.replace_value("format", "text/html") technical.replace_value("location", response.url) return technical def get_lifecycle_author(self, response): - lifecycle_loader = LomLifecycleItemloader() + lifecycle_loader: LomLifecycleItemloader = LomLifecycleItemloader() json_ld_authors: list[dict] = self.getLRMI("author", response=response) if json_ld_authors: for author_item in json_ld_authors: @@ -129,36 +138,36 @@ def get_lifecycle_author(self, response): author_type = author_item["@type"] if author_type == "Person": if "name" in author_item: - lifecycle_loader.add_value('role', 'author') - lifecycle_loader.add_value('firstName', author_item["name"]) + lifecycle_loader.add_value("role", "author") + lifecycle_loader.add_value("firstName", author_item["name"]) if "sameAs" in author_item: - lifecycle_loader.add_value('url', author_item["sameAs"]) + lifecycle_loader.add_value("url", author_item["sameAs"]) elif author_type == "Organization": if "name" in author_item: - lifecycle_loader.add_value('role', 'publisher') - lifecycle_loader.add_value('organization', author_item["name"]) + lifecycle_loader.add_value("role", "publisher") + lifecycle_loader.add_value("organization", author_item["name"]) if "sameAs" in author_item: - lifecycle_loader.add_value('url', author_item["sameAs"]) + lifecycle_loader.add_value("url", author_item["sameAs"]) return lifecycle_loader def get_lifecycle_metadata_provider(self, response, provider_item: dict = None): if provider_item: - lifecycle_loader = LomLifecycleItemloader() - provider_name = provider_item.get("name") - provider_url = provider_item.get("sameAs") - date_published = self.getLRMI("datePublished", response=response) + lifecycle_loader: LomLifecycleItemloader = LomLifecycleItemloader() + provider_name: str = provider_item.get("name") + provider_url: str = provider_item.get("sameAs") + date_published: str = self.getLRMI("datePublished", response=response) if provider_name: - lifecycle_loader.add_value('role', 'metadata_provider') - lifecycle_loader.add_value('organization', provider_name) + lifecycle_loader.add_value("role", "metadata_provider") + lifecycle_loader.add_value("organization", provider_name) if provider_url: - lifecycle_loader.add_value('url', provider_url) + lifecycle_loader.add_value("url", provider_url) if date_published: - lifecycle_loader.add_value('date', date_published) + lifecycle_loader.add_value("date", date_published) return lifecycle_loader def getLicense(self, response): license_loader: LicenseItemLoader = LomBase.getLicense(self, response) - license_raw = self.getLRMI("license", response=response) + license_raw: str = self.getLRMI("license", response=response) json_ld_authors: list[dict] = self.getLRMI("author", response=response) authors = set() # by adding all authors to a set, we're making sure to only save unique author names if json_ld_authors: @@ -166,10 +175,10 @@ def getLicense(self, response): # itself provides a "co_author"-field (which will be used later on in lifecycle 'role' -> 'unknown') for author_item in json_ld_authors: if "name" in author_item: - author_name = author_item["name"] + author_name: str = author_item["name"] authors.add(author_name) if authors: - license_loader.add_value('author', authors) + license_loader.add_value("author", authors) if license_raw: license_mapper = LicenseMapper() license_url = license_mapper.get_license_url(license_string=license_raw) @@ -177,17 +186,19 @@ def getLicense(self, response): if license_url: license_loader.replace_value("url", license_url) elif license_internal: - license_loader.add_value('internal', license_internal) + license_loader.add_value("internal", license_internal) else: # Footer: "Inhalte der Seite stehen unter CC BY-SA 4.0 Lizenz, wenn nicht anders angegeben." - logging.debug(f"DigitalLearningLabs did not provide a valid license for {response.url} . Setting fallback " - f"value CC-BY-SA 4.0.") - license_loader.add_value('url', Constants.LICENSE_CC_BY_SA_40) # default for every item + logging.debug( + f"DigitalLearningLabs did not provide a valid license for {response.url} . Setting fallback " + f"value CC-BY-SA 4.0." + ) + license_loader.add_value("url", Constants.LICENSE_CC_BY_SA_40) # default for every item return license_loader def getValuespaces(self, response): - valuespaces = LrmiBase.getValuespaces(self, response) - valuespaces.replace_value('new_lrt', self.get_new_lrt(response)) + vs_loader: ValuespaceItemLoader = LrmiBase.getValuespaces(self, response) + vs_loader.replace_value("new_lrt", self.get_new_lrt(response)) # ToDo: scrape DOM (left bar) for additional metadata: # - 'conditionsOfAccess' # - dataProtectionConformity? @@ -202,7 +213,7 @@ def getValuespaces(self, response): .split(" - ") ) if len(range): - valuespaces.add_value( + vs_loader.add_value( "educationalContext", ValuespaceHelper.educationalContextByGrade(range), ) @@ -212,15 +223,15 @@ def getValuespaces(self, response): discipline = response.xpath( '//ul[@class="sidebar__information"]/li[@class="sidebar__information-item"]/*[contains(@class,"icon-subject")]/parent::*//text()' ).getall() - valuespaces.add_value("discipline", discipline) + vs_loader.add_value("discipline", discipline) # ToDo: implement a proper 'discipline'-mapping with the 'digitalCompetencies'-update of the crawler except: pass item_type = response.meta["item"].get("type") # the DLL API currently provides only 3 values for "type": 'teaching-module', 'tool', 'trend' - valuespaces.add_value("new_lrt", item_type) + vs_loader.add_value("new_lrt", item_type) if item_type == "teaching-module": - valuespaces.replace_value("new_lrt", "5098cf0b-1c12-4a1b-a6d3-b3f29621e11d") # Unterrichtsbaustein + vs_loader.replace_value("new_lrt", "5098cf0b-1c12-4a1b-a6d3-b3f29621e11d") # Unterrichtsbaustein try: tool_type = list( map( @@ -231,40 +242,41 @@ def getValuespaces(self, response): ) ) # @TODO: proper mapping, maybe specialised tool field? - valuespaces.add_value("new_lrt", tool_type) + vs_loader.add_value("new_lrt", tool_type) except: pass - return valuespaces + # ToDo: fix above PEP8:E722 warnings (too broad 'except' clauses) asap + return vs_loader - async def parse(self, response, **kwargs): + async def parse(self, response: scrapy.http.HtmlResponse, **kwargs): if self.shouldImport(response) is False: - logging.debug( - "Skipping entry {} because shouldImport() returned false".format(str(self.getId(response))) - ) + logging.debug("Skipping entry {} because shouldImport() returned false".format(str(self.getId(response)))) return None if self.getId(response) is not None and self.getHash(response) is not None: if not self.hasChanged(response): return None - base = self.getBase(response) + base: BaseItemLoader = self.getBase(response) # ToDo: educational -> competencies ("ccm:competencies")? - lom = self.getLOM(response) + lom: LomBaseItemloader = self.getLOM(response) if self.getLRMI("author", response=response): - lom.add_value('lifecycle', self.get_lifecycle_author(response).load_item()) + lom.add_value("lifecycle", self.get_lifecycle_author(response).load_item()) provider_list: list[dict] = self.getLRMI("provider", response=response) # there might be multiple providers within the "provider"-field of the json_ld if provider_list: for provider_item in provider_list: - lom.add_value("lifecycle", self.get_lifecycle_metadata_provider(response, provider_item=provider_item).load_item()) + lom.add_value( + "lifecycle", self.get_lifecycle_metadata_provider(response, provider_item=provider_item).load_item() + ) if "co_authors" in response.meta["item"]: - co_authors: list = response.meta["item"]["co_authors"] + co_authors: list[str] = response.meta["item"]["co_authors"] if co_authors: for co_author in co_authors: lifecycle_unknown_item_loader = LomLifecycleItemloader() if co_author: - lifecycle_unknown_item_loader.add_value('role', 'unknown') - lifecycle_unknown_item_loader.add_value('firstName', co_author) - lom.add_value('lifecycle', lifecycle_unknown_item_loader.load_item()) + lifecycle_unknown_item_loader.add_value("role", "unknown") + lifecycle_unknown_item_loader.add_value("firstName", co_author) + lom.add_value("lifecycle", lifecycle_unknown_item_loader.load_item()) base.add_value("lom", lom.load_item()) base.add_value("license", self.getLicense(response).load_item()) base.add_value("permissions", self.getPermissions(response).load_item()) From 211a7d3d24309baa95014d50601c3e3c907cf5dc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 13 Feb 2024 12:37:39 +0100 Subject: [PATCH 444/590] change: lower Autothrottle target concurrency --- converter/spiders/tutory_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/tutory_spider.py b/converter/spiders/tutory_spider.py index 549ea973..9599c56a 100644 --- a/converter/spiders/tutory_spider.py +++ b/converter/spiders/tutory_spider.py @@ -34,7 +34,7 @@ class TutorySpider(CrawlSpider, LomBase, JSONBase): custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 6, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, "WEB_TOOLS": WebEngine.Playwright, } From 5bad991cc3c3958c1c586964a073346cd51b4c52 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 13 Feb 2024 17:18:10 +0100 Subject: [PATCH 445/590] add: bpb_spider pyCharm runConfiguration --- .run/bpb_spider.run.xml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .run/bpb_spider.run.xml diff --git a/.run/bpb_spider.run.xml b/.run/bpb_spider.run.xml new file mode 100644 index 00000000..eac6ac6e --- /dev/null +++ b/.run/bpb_spider.run.xml @@ -0,0 +1,26 @@ + + + + + + \ No newline at end of file From e469be526bb40c5479c3a7dbdf1c001d6343989d Mon Sep 17 00:00:00 2001 From: criamos <981166+criamos@users.noreply.github.com> Date: Tue, 13 Feb 2024 17:18:50 +0100 Subject: [PATCH 446/590] bpb_spider v0.2.1 (complete rework after bpb website relaunch) - completely rewrote the bpb_spider from scratch due to bpb's new website structure -- version-bumped the crawler from v0.1.x to 0.2.x to emphasize this big change Background: - the old bpb.de crawler was not able to run anymore since bpb.de completely relaunched their website in the beginning of 2022 and offered no sitemap anymore, which our crawler depended on - recently bpb.de started providing a sitemap and (several) RSS feeds, which can be found here: -- there's an HTML sitemap available at https://www.bpb.de/die-bpb/ueber-uns/service/sitemap/ -- but they also serve a Sitemap-Index in the last line of their robots.txt (see: https://www.bpb.de/robots.txt) --- the Sitemap-Index currently provides 2 XML files which need to be filtered for sitemap-paths which we want to crawl and paths which should be ignored -- the RSS feeds (see: https://www.bpb.de/die-bpb/ueber-uns/service/rss/ ) only list the most recent <30 entries, which sadly is not useful enough for the webcrawler, but might come handy in the future --- converter/spiders/bpb_spider.py | 601 +++++++++++++++++++++++--------- 1 file changed, 433 insertions(+), 168 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 770825c2..15ea1724 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -1,181 +1,446 @@ -from scrapy import Request +import html +import logging +from typing import Any, Union -from scrapy.spiders import CrawlSpider, Rule -from scrapy.linkextractors import LinkExtractor -from .base_classes import LrmiBase -from typing import List +import extruct +from scrapy.http import Response +from scrapy.spiders import SitemapSpider +from twisted.internet.defer import Deferred +from converter.spiders.base_classes import LomBase +from ..constants import Constants +from ..es_connector import EduSharing +from ..items import ( + LomBaseItemloader, + BaseItemLoader, + ResponseItemLoader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + LicenseItemLoader, + ValuespaceItemLoader, + LomClassificationItemLoader, + PermissionItemLoader, +) +from ..web_tools import WebEngine -class BpbSpider(LrmiBase, CrawlSpider): - name = "bpb_spider" +logger = logging.getLogger(__name__) + + +class BpbSpider(SitemapSpider, LomBase): + name = "bpb_spider" url = "https://www.bpb.de" friendlyName = "Bundeszentrale für politische Bildung" - start_urls = ["https://www.bpb.de/sitemap/"] + sitemap_urls = [ + "https://www.bpb.de/robots.txt", + # "https://www.bpb.de/sites/default/files/xmlsitemap/oWx2Pl033k1XFmYJFOs7sO0G3JasH0cjDbduvDwKuwo/index.xml" + ] + # the most-current sitemap can be found at the bottom of the robots.txt file and contains a sitemap-index. + # the human-readable sitemap can be found at: https://www.bpb.de/sitemap/ allowed_domains = ["bpb.de"] - allow_list = [ - "politik", - "internationales", - "geschichte", - "gesellschaft", - # "nachschlagen", # refers to some kind of glossar, which we might not need - "lernen", - "mediathek" + sitemap_rules = [ + ("/themen/", "parse"), + ("/mediathek/", "parse"), + ("/lernen/", "parse"), + ("/kurz-knapp/", "parse"), + ("/shop/", "drop_item"), + ("/veranstaltungen/", "drop_item"), # ToDo: implement custom handling for events in a future version + ("/die-bpb/", "drop_item"), ] + version = "0.2.1" # last update: 2024-02-14 + # (first version of the crawler after bpb.de completely relaunched their website in 2022-02) + custom_settings = { + "WEB_TOOLS": WebEngine.Playwright, + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True, + } + DEBUG_DROPPED_ITEMS: list[str] = list() + + def __init__(self, **kwargs): + SitemapSpider.__init__(self, **kwargs) + LomBase.__init__(self, **kwargs) - deny_list: tuple = ( - "/suche", - "/glossar" - ) + def close(self, reason: str) -> Union[Deferred, None]: + # ToDo (optional): extend functionality by counting filtered duplicates as well + # (-> extend Scrapy Dupefilter logging) + if self.DEBUG_DROPPED_ITEMS: + logger.info( + f"Summary: Items dropped in total (due to sitemap rules / robot meta tags etc.): " + f"{len(self.DEBUG_DROPPED_ITEMS)}" + ) + return - version = "0.1.2" # the version of your crawler, used to identify if a reimport is necessary + def drop_item(self, response: Response) -> None: + """ + URLs which should not be crawled are dropped and logged. At the end of the crawl process, a counter will + display the amount of dropped items for debugging purposes. + """ + logger.debug(f"Dropping item {response.url} due to specified sitemap rules.") + self.DEBUG_DROPPED_ITEMS.append(response.url) + return - rules = ( - Rule(LinkExtractor(allow=()), process_links="process_links", - callback="parse_links", follow=True), - ) + @staticmethod + def get_json_ld_property(json_lds: list[dict], property_name: str) -> Any | None: + # JSON-LD embeds on bpb.de typically look like this (best-case scenario): + # { + # "@context": "http:\/\/schema.org", + # "@type": "Article", + # "author": "Bundeszentrale f\u00fcr politische Bildung", + # "headline": "Die USA zwischen Internationalismus und Isolationismus", + # "datePublished": "2023-02-02", + # "dateCreated": "2023-01-25", + # "dateModified": "2023-02-02", + # "mainEntityOfPage": "https:\/\/www.bpb.de\/themen\/nordamerika\/usa\/517667\/die-usa-zwischen-internationalismus-und-isolationismus\/", + # "description": "Die USA sind die bedeutendste Weltordnungsmacht. Doch immer wieder scheint ihre Au\u00dfen- und Sicherheitspolitik zwischen den beiden Extremen Internationalismus und Isolationismus hin und her zu pendeln. Wie kommt es dazu?", + # "keywords": "USA,Au\u00dfenpolitik der USA,Weltordnung,Internationalismus,Isolationismus", + # "publisher": { + # "@type": "Organization", + # "name": "Bundeszentrale f\u00fcr politische Bildung", + # "logo": { + # "@type": "ImageObject", + # "url": "https:\/\/www.bpb.de\/themes\/custom\/bpbtheme\/images\/bpb_logo_ldjson.jpg", + # "width": "144", + # "height": "60" + # } + # }, + # "image": { + # "@type": "ImageObject", + # "url": "https:\/\/www.bpb.de\/cache\/images\/7\/759897_teaser_3x2_800.jpg?0326D", + # "width": 800, + # "height": 534 + # } + # } + if json_lds and isinstance(json_lds, list): + for json_ld in json_lds: + if property_name in json_ld: + property_value = json_ld.get(property_name) + if property_value and isinstance(property_value, str): + property_value = html.unescape(property_value) + return property_value + else: + return None - def start_requests(self): - yield Request(url = self.start_urls[0], callback = self.parse) + @staticmethod + def get_opengraph_property(opengraph_dict: dict, property_name: str) -> Any | None: + # after using extruct for the opengraph data, the resulting dictionary will have a 'opengraph'-key if + # extraction was successful. Within that key there will be a list[dict]. + if opengraph_dict and isinstance(opengraph_dict, dict): + if "opengraph" in opengraph_dict: + og_list: list[dict] = opengraph_dict["opengraph"] + for opengraph in og_list: + if property_name in opengraph: + return opengraph.get(property_name) + else: + return None - def parse(self, response): - return CrawlSpider.parse(self, response) + def getId(self, response: Response = None, json_lds: list[dict] = None, opengraph: dict = None) -> str: + item_url: str = str() + if json_lds: + main_entity_of_page: str = self.get_json_ld_property(json_lds, property_name="mainEntityOfPage") + if main_entity_of_page: + main_entity_of_page: str = html.unescape(main_entity_of_page) + item_url = main_entity_of_page + if opengraph: + og_url: str = self.get_opengraph_property(opengraph_dict=opengraph, property_name="og:url") + if og_url: + item_url = og_url + if item_url: + return item_url + elif response: + logger.warning(f"Item {response.url} did not provide a stable ID (url). Falling back to response.url ...") + return response.url - def __init__(self, **kwargs): - LrmiBase.__init__(self, **kwargs) - CrawlSpider.__init__(self, **kwargs) - - def process_links(self, links): - for link in links: - try: - if link.url.split("/")[3] not in self.allow_list: - continue - elif link.url.endswith(self.deny_list): - continue - yield link - except IndexError: - pass - - def parse_links(self, response): - return LrmiBase.parse(self, response) - - # return a (stable) id of the source - def getId(self, response): - return self.getLRMI("mainEntityOfPage", response=response) - - def getBase(self, response): - base = LrmiBase.getBase(self, response) - base.replace_value("thumbnail", None) - return base - - def getKeywords(self, response) -> List[str]: - keywords = self.getLRMI("keywords", response=response) - if keywords.strip(): - return[keyword.strip() for keyword in keywords.split(",")] - return [] - def getLOMGeneral(self, response): - general = LrmiBase.getLOMGeneral(self, response) - general.replace_value("title", self.getLRMI("name", "headline", response=response).replace(" | bpb", "")) - general.replace_value("identifier", self.getLRMI( - "mainEntityOfPage", response=response)) - - # Keywords (use try catch, some entries might not have keywords) - try: - general.replace_value("keyword", self.getKeywords(response)) - except: - pass - - # Language TODO fill in value by hand or leave empty? - general.add_value("language", self.getLRMI( - "inLanguage", response=response)) - - # Description - general.add_value( - "description", self.getLRMI( - "description", response=response) - ) - return general - - def getLOMLifecycle(self, response): - name = self.getLRMI("author", response=response) - lifecycle = LrmiBase.getLOMLifecycle(self, response) - - if name == "Bundeszentrale für politische Bildung": - lifecycle.add_value("role", "author") - # if author organization - lifecycle.add_value( - "organization", name) - - elif name == "Redaktion": - lifecycle.add_value("role", "author") - # if author organization - lifecycle.add_value( - "organization", name) - - elif "Redaktion werkstatt.bpb.de" in name: - lifecycle.add_value("role", "author") - # if author organization - lifecycle.add_value( - "organization", name) - - elif ", " not in name: - # maybe one author - lifecycle.add_value("role", "author") - author = name.split(" ") - lifecycle.add_value("firstName", " ".join(author[:-1]).strip()) - lifecycle.add_value("lastName", author[-1].strip()) - - elif ", " in name: - for author_name in name.split(","): - lifecycle.add_value("role", "author") - author = author_name.split(" ") - lifecycle.add_value("firstName", " ".join(author[:-1]).strip()) - lifecycle.add_value("lastName", author[-1].strip()) - - elif "und" in name: - for author_name in name.split("und"): - lifecycle.add_value("role", "author") - author = author_name.split(" ") - lifecycle.add_value("firstName", " ".join(author[:-1]).strip()) - lifecycle.add_value("lastName", author[-1].strip()) - - return lifecycle - - def getLicense(self, response): - license = LrmiBase.getLicense(self, response) - license_value: str = response.xpath( - "//div[@class='cc-license']/a/@href").get() - if license_value: - # remove language link from license - if license_value.endswith("deed.de"): - license_value = license_value[:-len("deed.de")] - elif license_value.endswith("de/"): - license_value = license_value[:-len("de/")] - # oeh crawling constants all use https - license_value = license_value.replace("http://", "https://") - license.replace_value("url", license_value) - return license - - def getLOMTechnical(self, response): - technical = LrmiBase.getLOMTechnical(self, response) - technical.replace_value("format", "text/html") - # technical.add_value("size", self.getLRMI( - # "ContentSize", response=response)) - url = self.getLRMI("mainEntityOfPage", response=response) - if not url: - url = response.url - technical.replace_value("location", url) - return technical - - def getValuespaces(self, response): - valuespaces = LrmiBase.getValuespaces(self, response) - disciplines = ["politik", "geschichte"] - for discipline in disciplines: - if "/" + discipline in response.url: - valuespaces.add_value("discipline", discipline) - - # try to map keywords to known disciplines - try: - valuespaces.add_value("discipline", self.getKeywords(response)) - except: - pass - return valuespaces \ No newline at end of file + def getHash(self, response: Response = None, json_lds: list[dict] = None) -> str: + hash_str: str | None = None + if json_lds: + json_ld_date_modified: str = self.get_json_ld_property(json_lds, property_name="dateModified") + json_ld_date_created: str = self.get_json_ld_property(json_lds, property_name="dateCreated") + json_ld_date_published: str = self.get_json_ld_property(json_lds, property_name="datePublished") + if json_ld_date_modified: + # 'dateModified' is our first priority: this will be the most precise date for hash checks + hash_str = f"{json_ld_date_modified}v{self.version}" + return hash_str + elif json_ld_date_created: + hash_str = f"{json_ld_date_created}v{self.version}" + return hash_str + elif json_ld_date_published: + hash_str = f"{json_ld_date_published}v{self.version}" + return hash_str + if hash_str is None and response: + # fallback to DOM meta property 'last-modified' if the JSON-LD didn't provide a more precise date + meta_last_modified: str = response.xpath("//meta[@name='last-modified']/@content").get() + if meta_last_modified: + hash_str = f"{meta_last_modified}v{self.version}" + return hash_str + + def get_keywords(self, response: Response = None, json_lds: list[dict] = None) -> list[str] | None: + # ToDo: if 'keywords' become available in the HTML header (in the future), we might need the Response object as + # our fallback for edge-cases where no JSON-LD was available. + if json_lds: + json_ld_keywords: str = self.get_json_ld_property(json_lds, property_name="keywords") + if json_ld_keywords: + if json_ld_keywords.strip(): + if "," in json_ld_keywords: + # default case for German articles + return [keyword.strip() for keyword in json_ld_keywords.split(",")] + if ";" in json_ld_keywords: + # edge case: international articles (English / French) often have keywords split by semicolon + return [keyword.strip() for keyword in json_ld_keywords.split(";")] + else: + return None + + def has_changed(self, response: Response, identifier: str, hash_str: str) -> bool: + identifier: str = identifier + hash_str: str = hash_str + uuid_str: str = self.getUUID(response) + if self.forceUpdate: + return True + if self.uuid: + if uuid_str == self.uuid: + logger.info(f"Matched requested uuid: {self.uuid} ({identifier}).") + return True + return False + if self.remoteId: + if identifier == self.remoteId: + logger.info(f"Matched requested remoteId {self.remoteId} ({identifier}).") + return True + return False + db = EduSharing().find_item(identifier, self) + changed = db is None or db[1] != hash_str + if not changed: + logger.info(f"Item {identifier} has not changed.") + return changed + + def check_if_item_should_be_dropped(self, response, json_lds, opengraph_dict) -> bool: + drop_item_flag: bool = False + identifier: str = self.getId(response=response, json_lds=json_lds, opengraph=opengraph_dict) + hash_str: str = self.getHash(response=response, json_lds=json_lds) + if self.shouldImport(response) is False: + logger.info(f"Skipping entry {identifier} because shouldImport() returned 'False'.") + drop_item_flag = True + if identifier is not None and hash_str is not None: + if not self.has_changed(response=response, identifier=identifier, hash_str=hash_str): + drop_item_flag = True + robot_meta_tags: list[str] = response.xpath("//meta[@name='robots']/@content").getall() + if robot_meta_tags: + # see: https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag + if "noindex" in robot_meta_tags or "none" in robot_meta_tags: + logging.info( + f"Robot Meta Tag {robot_meta_tags} identified. Robot Meta Tags 'noindex' or 'none' should " + f"be skipped by the crawler. Dropping item {response.url} ." + ) + drop_item_flag = True + return drop_item_flag + + async def parse(self, response: Response, **kwargs: Any) -> Any: + jslde = extruct.JsonLdExtractor() + json_lds: list[dict] = jslde.extract(response.body) + opengraph_dict: dict = extruct.extract(htmlstring_or_tree=response.body, syntaxes=["opengraph"], uniform=True) + + drop_item_flag = self.check_if_item_should_be_dropped(response, json_lds, opengraph_dict) + if drop_item_flag: + self.DEBUG_DROPPED_ITEMS.append(response.url) + return + + base_itemloader: BaseItemLoader = BaseItemLoader() + + source_id: str = self.getId(response=response, json_lds=json_lds, opengraph=opengraph_dict) + base_itemloader.add_value("sourceId", source_id) + hash_value: str = self.getHash(response=response, json_lds=json_lds) + base_itemloader.add_value("hash", hash_value) + + json_ld_date_modified: str = self.get_json_ld_property(json_lds, property_name="dateModified") + if json_ld_date_modified: + base_itemloader.add_value("lastModified", json_ld_date_modified) + + json_ld_image: dict = self.get_json_ld_property(json_lds, property_name="image") + og_image: str = self.get_opengraph_property(opengraph_dict, property_name="og:image") + og_image_url: str = self.get_opengraph_property(opengraph_dict, property_name="og:image:url") + # og_image_alt: str = self.get_opengraph_property(opengraph_dict, property_name="og:image:alt") + # ToDo: the image altLabel cannot be saved yet, there exists no edu-sharing attribute in items.py (yet) + # og_image_type: str = self.get_opengraph_property(opengraph_dict, property_name="og:image:type") + # ToDo: the image type cannot be saved yet, there exists no edu-sharing property in items.py (yet) + if json_ld_image and "url" in json_ld_image: + image_url: str = json_ld_image.get("url") + image_url = html.unescape(image_url) + if image_url and image_url != "https://www.bpb.de": + # there are hundreds of items with wrong JSON-LD metadata. + # (e.g.: https://www.bpb.de/kurz-knapp/lexika/politiklexikon/296491/shitstorm/ ) + # The image object will typically look like this one: + # "image": { + # "@type": "ImageObject", + # "url": "https:\/\/www.bpb.de", + # "width": "", + # "height": "" + # } + # since the above item is not a valid ImageObject, we need to make sure to not save those urls + base_itemloader.add_value("thumbnail", image_url) + elif og_image: + base_itemloader.add_value("thumbnail", og_image) + elif og_image_url: + base_itemloader.add_value("thumbnail", og_image_url) + + lom_base_itemloader: LomBaseItemloader = LomBaseItemloader() + + general_itemloader: LomGeneralItemloader = LomGeneralItemloader() + json_ld_headline: str = self.get_json_ld_property(json_lds, property_name="headline") + og_title: str = self.get_opengraph_property(opengraph_dict, property_name="og:title") + if json_ld_headline: + general_itemloader.add_value("title", json_ld_headline) + elif og_title: + general_itemloader.add_value("title", og_title) + + keywords: list[str] | None = self.get_keywords(response=response, json_lds=json_lds) + if keywords: + general_itemloader.add_value("keyword", keywords) + + json_ld_description: str = self.get_json_ld_property(json_lds, property_name="description") + og_description: str = self.get_opengraph_property(opengraph_dict, property_name="og:description") + if json_ld_description: + general_itemloader.add_value("description", json_ld_description) + elif og_description: + general_itemloader.add_value("description", og_description) + identifier_url: str = self.getId(response=response, json_lds=json_lds) + if identifier_url: + general_itemloader.add_value("identifier", identifier_url) + in_language: str = self.get_json_ld_property(json_lds, property_name="inLanguage") + if in_language: + general_itemloader.add_value("language", in_language) + else: + html_language: str = response.xpath("//html/@lang").get() + if html_language: + general_itemloader.add_value("language", html_language) + + technical_itemloader: LomTechnicalItemLoader = LomTechnicalItemLoader() + technical_itemloader.add_value("format", "text/html") + # ToDo: confirm if hard-coding "text/html" is still a desired pattern for crawler items + if source_id.startswith("http"): + technical_itemloader.add_value("location", source_id) + og_url: str = self.get_opengraph_property(opengraph_dict, property_name="og:url") + if source_id != response.url or og_url and source_id != og_url: + # make sure to only save values that are different from our URL identifier + if response.url != og_url: + technical_itemloader.add_value("location", og_url) + technical_itemloader.add_value("location", response.url) + elif og_url: + technical_itemloader.add_value("location", og_url) + else: + technical_itemloader.add_value("location", response.url) + + json_ld_date_published: str = self.get_json_ld_property(json_lds, property_name="datePublished") + json_ld_date_created: str = self.get_json_ld_property(json_lds, property_name="dateCreated") + + json_ld_author: str = self.get_json_ld_property(json_lds, property_name="author") + if json_ld_author: + lifecycle_author: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle_author.add_value("role", "author") + if json_ld_author == "Bundeszentrale für politische Bildung": + lifecycle_author.add_value("organization", json_ld_author) + else: + # author names cannot be (safely) split into firstName / lastName by comma because the strings vary too + # much to make a safe assumption. Oftentimes commas indicate a specification, not a separate person: + # e.g. "Judyth Twigg (Virginia Commonwealth University, Richmond)" + # therefore we have no other choice than saving the complete string to firstName + lifecycle_author.add_value("firstName", json_ld_author) + if json_ld_date_published: + lifecycle_author.add_value("date", json_ld_date_published) + elif json_ld_date_created: + lifecycle_author.add_value("date", json_ld_date_created) + lom_base_itemloader.add_value("lifecycle", lifecycle_author.load_item()) + + json_ld_publisher: dict = self.get_json_ld_property(json_lds, property_name="publisher") + if json_ld_publisher: + # a typical "publisher"-dict looks like this: + # "publisher": { + # "@type": "Organization", + # "name": "Bundeszentrale f\u00fcr politische Bildung", + # "logo": { + # "@type": "ImageObject", + # "url": "https:\/\/www.bpb.de\/themes\/custom\/bpbtheme\/images\/bpb_logo_ldjson.jpg", + # "width": "144", + # "height": "60" + # } + # } + publisher_name: str = str() + publisher_type: str = str() + lifecycle_publisher: LomLifecycleItemloader = LomLifecycleItemloader() + lifecycle_publisher.add_value("role", "publisher") + if "name" in json_ld_publisher: + publisher_name: str = json_ld_publisher.get("name") + if "@type" in json_ld_publisher: + publisher_type: str = json_ld_publisher.get("@type") + if publisher_type and publisher_type == "Organization" and publisher_name: + lifecycle_publisher.add_value("organization", publisher_name) + elif publisher_type and publisher_type == "Person" and publisher_name: + lifecycle_publisher.add_value("firstName", publisher_name) + if json_ld_date_published: + lifecycle_publisher.add_value("date", json_ld_date_published) + elif json_ld_date_created: + lifecycle_publisher.add_value("date", json_ld_date_created) + lom_base_itemloader.add_value("lifecycle", lifecycle_publisher.load_item()) + + educational_itemloader: LomEducationalItemLoader = LomEducationalItemLoader() + if in_language: + educational_itemloader.add_value("language", in_language) + + classification_itemloader: LomClassificationItemLoader = LomClassificationItemLoader() + + vs_itemloader: ValuespaceItemLoader = ValuespaceItemLoader() + vs_itemloader.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) + json_ld_type: str = self.get_json_ld_property(json_lds, property_name="@type") + og_type: str = self.get_opengraph_property(opengraph_dict, property_name="og:type") + if json_ld_type: + vs_itemloader.add_value("new_lrt", json_ld_type) + elif og_type: + vs_itemloader.add_value("new_lrt", og_type) + if "/lexika/" in response.url: + vs_itemloader.add_value("new_lrt", "c022c920-c236-4234-bae1-e264a3e2bdf6") + # Nachschlagewerk und Glossareintrag + if "/taegliche-dosis-politik/" in response.url: + vs_itemloader.add_value("new_lrt", "dc5763ab-6f47-4aa3-9ff3-1303efbeef6e") + # Nachricht und Neuigkeit + if "/mediathek/" in response.url and "/podcast/" not in response.url: + vs_itemloader.add_value("new_lrt", "7a6e9608-2554-4981-95dc-47ab9ba924de") + # Video (Material) + if "/podcasts/" in response.url: + vs_itemloader.add_value("new_lrt", "6e821748-ad12-4ac1-bb14-9b54493e2c50") + # Radio, Podcastfolge und Interview + # ToDo: valuespaces vocabs + # - intendedEndUserRole? + # - educationalContext? + # - dataProtectionConformity? + vs_itemloader.add_value("discipline", ["480", "240"]) # Politik, Geschichte + if "/wirtschaft/" in response.url: + vs_itemloader.add_value("discipline", "700") # Wirtschaftskunde + if "/umwelt/" in response.url: + vs_itemloader.add_value("discipline", "640") # Umwelterziehung + if "/medienpaedagogik/" in response.url: + vs_itemloader.add_value("discipline", "900") # Medienbildung + vs_itemloader.add_value("conditionsOfAccess", "no_login") + vs_itemloader.add_value("containsAdvertisement", "no") + vs_itemloader.add_value("price", "no") + + license_itemloader: LicenseItemLoader = LicenseItemLoader() + if json_ld_author: + license_itemloader.add_value("author", json_ld_author) + license_url: str = response.xpath("//a[@rel='license']/@href").get() + if license_url: + license_itemloader.add_value("url", license_url) + + permission_itemloader: PermissionItemLoader = super().getPermissions(response) + + response_itemloader: ResponseItemLoader = await super().mapResponse(response=response) + + lom_base_itemloader.add_value("general", general_itemloader.load_item()) + lom_base_itemloader.add_value("technical", technical_itemloader.load_item()) + lom_base_itemloader.add_value("educational", educational_itemloader.load_item()) + lom_base_itemloader.add_value("classification", classification_itemloader.load_item()) + + base_itemloader.add_value("lom", lom_base_itemloader.load_item()) + base_itemloader.add_value("license", license_itemloader.load_item()) + base_itemloader.add_value("valuespaces", vs_itemloader.load_item()) + base_itemloader.add_value("permissions", permission_itemloader.load_item()) + base_itemloader.add_value("response", response_itemloader.load_item()) + + yield base_itemloader.load_item() From bc7756c8b447832973180f20b2492cf651842c67 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:15:47 +0100 Subject: [PATCH 447/590] change/perf: drop SitemapSpider usage in favor of scrapy.Spider - change: replaced SitemapSpider by scrapy.Spider to reduce crawler HTTP Requests -- while convenient, Scrapy's SitemapSpider does not allow enough control over how scrapy.Requests are made (we want to skip all HTTP requests which would get dropped later) - feat: implemented XML element counter -- the amount of XML elements is shown during initial parsing of the sitemaps -- the final count is shown during spider close - optimized imports --- converter/spiders/bpb_spider.py | 86 ++++++++++++++++++++++++--------- 1 file changed, 62 insertions(+), 24 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 15ea1724..7a182964 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -1,10 +1,15 @@ import html import logging -from typing import Any, Union +from io import BytesIO +from typing import Any, Iterable, Union import extruct +import scrapy +from lxml import objectify +from lxml.etree import ElementTree +from lxml.objectify import ObjectifiedElement +from scrapy import Request from scrapy.http import Response -from scrapy.spiders import SitemapSpider from twisted.internet.defer import Deferred from converter.spiders.base_classes import LomBase @@ -28,25 +33,26 @@ logger = logging.getLogger(__name__) -class BpbSpider(SitemapSpider, LomBase): +class BpbSpider(scrapy.Spider, LomBase): name = "bpb_spider" url = "https://www.bpb.de" friendlyName = "Bundeszentrale für politische Bildung" - sitemap_urls = [ - "https://www.bpb.de/robots.txt", - # "https://www.bpb.de/sites/default/files/xmlsitemap/oWx2Pl033k1XFmYJFOs7sO0G3JasH0cjDbduvDwKuwo/index.xml" - ] - # the most-current sitemap can be found at the bottom of the robots.txt file and contains a sitemap-index. - # the human-readable sitemap can be found at: https://www.bpb.de/sitemap/ + start_urls = ["https://www.bpb.de/sitemap.xml?page=1", "https://www.bpb.de/sitemap.xml?page=2"] + # the most-current sitemap can be found at the bottom of the robots.txt file (see: https://www.bpb.de/robots.txt ) + # and contains a sitemap-index, + # e.g.: https://www.bpb.de/sites/default/files/xmlsitemap/oWx2Pl033k1XFmYJFOs7sO0G3JasH0cjDbduvDwKuwo/index.xml + # an additional, human-readable sitemap (HTML) can be found at: https://www.bpb.de/sitemap/ allowed_domains = ["bpb.de"] sitemap_rules = [ ("/themen/", "parse"), ("/mediathek/", "parse"), ("/lernen/", "parse"), ("/kurz-knapp/", "parse"), - ("/shop/", "drop_item"), - ("/veranstaltungen/", "drop_item"), # ToDo: implement custom handling for events in a future version - ("/die-bpb/", "drop_item"), + ] + deny_list: list[str] = [ + "/shop/", + "/veranstaltungen/", # ToDo: implement custom handling for events in a future version + "/die-bpb/", ] version = "0.2.1" # last update: 2024-02-14 # (first version of the crawler after bpb.de completely relaunched their website in 2022-02) @@ -56,29 +62,61 @@ class BpbSpider(SitemapSpider, LomBase): "AUTOTHROTTLE_DEBUG": True, } DEBUG_DROPPED_ITEMS: list[str] = list() + DEBUG_XML_COUNT: int = 0 def __init__(self, **kwargs): - SitemapSpider.__init__(self, **kwargs) + scrapy.Spider.__init__(self, **kwargs) LomBase.__init__(self, **kwargs) def close(self, reason: str) -> Union[Deferred, None]: # ToDo (optional): extend functionality by counting filtered duplicates as well # (-> extend Scrapy Dupefilter logging) + logger.info(f"Closing spider (reason: {reason} )...") + if self.DEBUG_XML_COUNT: + logger.info(f"Summary: The sitemap index contained {self.DEBUG_XML_COUNT} (unfiltered) items in total.") if self.DEBUG_DROPPED_ITEMS: logger.info( - f"Summary: Items dropped in total (due to sitemap rules / robot meta tags etc.): " + f"Summary: Items dropped (due to sitemap rules / robot meta tags etc.): " f"{len(self.DEBUG_DROPPED_ITEMS)}" ) - return - - def drop_item(self, response: Response) -> None: - """ - URLs which should not be crawled are dropped and logged. At the end of the crawl process, a counter will - display the amount of dropped items for debugging purposes. - """ - logger.debug(f"Dropping item {response.url} due to specified sitemap rules.") - self.DEBUG_DROPPED_ITEMS.append(response.url) - return + return None + + def start_requests(self) -> Iterable[Request]: + for url in self.start_urls: + yield scrapy.Request(url=url, callback=self.parse_sitemap, priority=2) + + def parse_sitemap(self, response: Response): + if response: + xml: ElementTree = objectify.parse(BytesIO(response.body)) + xml_root: ObjectifiedElement = xml.getroot() + xml_count: int = len(xml_root.getchildren()) + if xml_count: + logger.info(f"Sitemap {response.url} contained {xml_count} XML elements in total.") + self.DEBUG_XML_COUNT += xml_count + ns_map = {"ns": "http://www.sitemaps.org/schemas/sitemap/0.9"} + for xml_element in xml_root.findall("ns:url", ns_map): + # we're only interested in the of an element, e.g.: + # + # https://www.bpb.de/themen/medien-journalismus/netzdebatte/179637/big-data-ein-ungezaehmtes-tier-mit-grossem-potential-ein-interview-mit-frank-schirrmacher/ + # 2022-02-07T16:25Z + # yearly + # 0.6 + # + item_url: str = xml_element.loc.text + # xml_element.loc is a StringElement -> by calling .text on it, we get the string value + drop_item_flag: bool = False + for partial_url in self.deny_list: + # since the sitemaps are huge (56.880 urls in total), + # we try to not cause HTTP Requests for items that would be dropped anyway + if item_url and partial_url in item_url: + # URLs which should not be crawled are dropped and logged. + # At the end of the crawl process, a counter will display the amount of dropped items for + # debugging purposes. + drop_item_flag = True + # logger.debug(f"Dropping item {item_url} due to sitemap rules.") # this one is spammy! + self.DEBUG_DROPPED_ITEMS.append(item_url) + if not drop_item_flag: + yield scrapy.Request(url=item_url, callback=self.parse) @staticmethod def get_json_ld_property(json_lds: list[dict], property_name: str) -> Any | None: From df42bf5c1db4fe245f5806b44c3203d68b0b4267 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Feb 2024 10:51:33 +0100 Subject: [PATCH 448/590] change: reduce concurrent requests - during debugging we observed 404 responses after the crawler temporarily reached a concurrency of 8, causing all subsequent requests to fail with a 404 response -- status code 404 responses were returned with a "/big-pipe/no-js?destination=..." path, which is not seen under normal crawling circumstances --- converter/spiders/bpb_spider.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 7a182964..31ef8483 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -60,6 +60,8 @@ class BpbSpider(scrapy.Spider, LomBase): "WEB_TOOLS": WebEngine.Playwright, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, + "CONCURRENT_REQUESTS_PER_DOMAIN": 5, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5 } DEBUG_DROPPED_ITEMS: list[str] = list() DEBUG_XML_COUNT: int = 0 From fd663ca466c71ddeb8ff4faeccd2fcd81a8e1d02 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:07:29 +0100 Subject: [PATCH 449/590] logging: use spider-specific logger - replaced all remaining "logging."-calls with spider-specific logger (to increase readability of spider logs) --- converter/spiders/bpb_spider.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 31ef8483..ef00e9ef 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -1,5 +1,4 @@ import html -import logging from io import BytesIO from typing import Any, Iterable, Union @@ -30,8 +29,6 @@ ) from ..web_tools import WebEngine -logger = logging.getLogger(__name__) - class BpbSpider(scrapy.Spider, LomBase): name = "bpb_spider" @@ -61,7 +58,7 @@ class BpbSpider(scrapy.Spider, LomBase): "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, "CONCURRENT_REQUESTS_PER_DOMAIN": 5, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5 + "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5, } DEBUG_DROPPED_ITEMS: list[str] = list() DEBUG_XML_COUNT: int = 0 @@ -73,11 +70,13 @@ def __init__(self, **kwargs): def close(self, reason: str) -> Union[Deferred, None]: # ToDo (optional): extend functionality by counting filtered duplicates as well # (-> extend Scrapy Dupefilter logging) - logger.info(f"Closing spider (reason: {reason} )...") + self.logger.info(f"Closing spider (reason: {reason} )...") if self.DEBUG_XML_COUNT: - logger.info(f"Summary: The sitemap index contained {self.DEBUG_XML_COUNT} (unfiltered) items in total.") + self.logger.info( + f"Summary: The sitemap index contained {self.DEBUG_XML_COUNT} (unfiltered) items in total." + ) if self.DEBUG_DROPPED_ITEMS: - logger.info( + self.logger.info( f"Summary: Items dropped (due to sitemap rules / robot meta tags etc.): " f"{len(self.DEBUG_DROPPED_ITEMS)}" ) @@ -93,7 +92,7 @@ def parse_sitemap(self, response: Response): xml_root: ObjectifiedElement = xml.getroot() xml_count: int = len(xml_root.getchildren()) if xml_count: - logger.info(f"Sitemap {response.url} contained {xml_count} XML elements in total.") + self.logger.info(f"Sitemap {response.url} contained {xml_count} XML elements in total.") self.DEBUG_XML_COUNT += xml_count ns_map = {"ns": "http://www.sitemaps.org/schemas/sitemap/0.9"} for xml_element in xml_root.findall("ns:url", ns_map): @@ -115,7 +114,7 @@ def parse_sitemap(self, response: Response): # At the end of the crawl process, a counter will display the amount of dropped items for # debugging purposes. drop_item_flag = True - # logger.debug(f"Dropping item {item_url} due to sitemap rules.") # this one is spammy! + # self.logger.debug(f"Dropping item {item_url} due to sitemap rules.") # this one is spammy! self.DEBUG_DROPPED_ITEMS.append(item_url) if not drop_item_flag: yield scrapy.Request(url=item_url, callback=self.parse) @@ -188,7 +187,9 @@ def getId(self, response: Response = None, json_lds: list[dict] = None, opengrap if item_url: return item_url elif response: - logger.warning(f"Item {response.url} did not provide a stable ID (url). Falling back to response.url ...") + self.logger.warning( + f"Item {response.url} did not provide a stable ID (url). Falling back to response.url ..." + ) return response.url def getHash(self, response: Response = None, json_lds: list[dict] = None) -> str: @@ -238,18 +239,18 @@ def has_changed(self, response: Response, identifier: str, hash_str: str) -> boo return True if self.uuid: if uuid_str == self.uuid: - logger.info(f"Matched requested uuid: {self.uuid} ({identifier}).") + self.logger.info(f"Matched requested uuid: {self.uuid} ({identifier}).") return True return False if self.remoteId: if identifier == self.remoteId: - logger.info(f"Matched requested remoteId {self.remoteId} ({identifier}).") + self.logger.info(f"Matched requested remoteId {self.remoteId} ({identifier}).") return True return False db = EduSharing().find_item(identifier, self) changed = db is None or db[1] != hash_str if not changed: - logger.info(f"Item {identifier} has not changed.") + self.logger.info(f"Item {identifier} has not changed.") return changed def check_if_item_should_be_dropped(self, response, json_lds, opengraph_dict) -> bool: @@ -257,7 +258,7 @@ def check_if_item_should_be_dropped(self, response, json_lds, opengraph_dict) -> identifier: str = self.getId(response=response, json_lds=json_lds, opengraph=opengraph_dict) hash_str: str = self.getHash(response=response, json_lds=json_lds) if self.shouldImport(response) is False: - logger.info(f"Skipping entry {identifier} because shouldImport() returned 'False'.") + self.logger.info(f"Skipping entry {identifier} because shouldImport() returned 'False'.") drop_item_flag = True if identifier is not None and hash_str is not None: if not self.has_changed(response=response, identifier=identifier, hash_str=hash_str): @@ -266,7 +267,7 @@ def check_if_item_should_be_dropped(self, response, json_lds, opengraph_dict) -> if robot_meta_tags: # see: https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag if "noindex" in robot_meta_tags or "none" in robot_meta_tags: - logging.info( + self.logger.info( f"Robot Meta Tag {robot_meta_tags} identified. Robot Meta Tags 'noindex' or 'none' should " f"be skipped by the crawler. Dropping item {response.url} ." ) From 6a98177b4d3cbf34b3356a77ee602020f299a38d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:02:47 +0100 Subject: [PATCH 450/590] change: try to mitigate "/big_pipe/no-js?..." 404s by ignoring cookies - after roughly 5650 HTTP requests, bpb's Drupal started throwing BigPipe-related 404s (the response contains "/big_pipe/no-js?destination=..." in its URL) - change: scrapy Requests to individual items don't merge the cookies from previous responses anymore -- see: https://doc.scrapy.org/en/latest/topics/request-response.html#std-reqmeta-dont_merge_cookies --- converter/spiders/bpb_spider.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index ef00e9ef..1fa7fbf6 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -117,7 +117,12 @@ def parse_sitemap(self, response: Response): # self.logger.debug(f"Dropping item {item_url} due to sitemap rules.") # this one is spammy! self.DEBUG_DROPPED_ITEMS.append(item_url) if not drop_item_flag: - yield scrapy.Request(url=item_url, callback=self.parse) + yield scrapy.Request(url=item_url, callback=self.parse, meta={ + "dont_merge_cookies": True + }) + # the flag 'dont_merge_cookies' is necessary because bpb.de apparently uses Drupal's BigPipe + # implementation, which sets a "no-js"-cookie. After receiving that cookie, all subsequent requests + # are 404s and invalid. Invalid responses contain a "/big_pipe/no-js?destination=" path in their URL @staticmethod def get_json_ld_property(json_lds: list[dict], property_name: str) -> Any | None: From d35819d19b3365e2f8cdc44f599d04b0fc2eb132 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Feb 2024 12:04:03 +0100 Subject: [PATCH 451/590] change: extend deny_list (undesired "Impressum"-like URL paths) - while checking the results on Staging, a few more URL-paths were identified that should not be crawled because they aren't learning materials: -- bpb.de URLs that end with "/kontakt/", "/impressum/" or "/redaktion/" --- e.g.: https://www.bpb.de/themen/migration-integration/kurzdossiers/172761/impressum/ is not a desired (to be crawled) item in itself, but learners who stumble upon https://www.bpb.de/themen/migration-integration/kurzdossiers/ will be able to reach that information (if they need to) by pressing the "Inhalt"-button --- converter/spiders/bpb_spider.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 1fa7fbf6..fa3b36dd 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -47,9 +47,12 @@ class BpbSpider(scrapy.Spider, LomBase): ("/kurz-knapp/", "parse"), ] deny_list: list[str] = [ + "/die-bpb/", + "/impressum/", + "/kontakt/", + "/redaktion/", "/shop/", "/veranstaltungen/", # ToDo: implement custom handling for events in a future version - "/die-bpb/", ] version = "0.2.1" # last update: 2024-02-14 # (first version of the crawler after bpb.de completely relaunched their website in 2022-02) From 9e809c510d20818a957475288c9e218d8b52878f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Feb 2024 12:20:47 +0100 Subject: [PATCH 452/590] change: increase autothrottle target concurrency - since the recent workaround for Drupal's BigPipe "no-JS"-cookie seems to have been successful, we can try to increase the crawling throughput again - change / code cleanup: remove "sitemap_rules"-variable (since it is only used in SitemapSpiders) --- converter/spiders/bpb_spider.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index fa3b36dd..99057e4a 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -40,12 +40,6 @@ class BpbSpider(scrapy.Spider, LomBase): # e.g.: https://www.bpb.de/sites/default/files/xmlsitemap/oWx2Pl033k1XFmYJFOs7sO0G3JasH0cjDbduvDwKuwo/index.xml # an additional, human-readable sitemap (HTML) can be found at: https://www.bpb.de/sitemap/ allowed_domains = ["bpb.de"] - sitemap_rules = [ - ("/themen/", "parse"), - ("/mediathek/", "parse"), - ("/lernen/", "parse"), - ("/kurz-knapp/", "parse"), - ] deny_list: list[str] = [ "/die-bpb/", "/impressum/", @@ -61,7 +55,7 @@ class BpbSpider(scrapy.Spider, LomBase): "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, "CONCURRENT_REQUESTS_PER_DOMAIN": 5, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, } DEBUG_DROPPED_ITEMS: list[str] = list() DEBUG_XML_COUNT: int = 0 From e62cbfb61bbf9965eae3b706b328d57f28811e49 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Feb 2024 14:11:13 +0100 Subject: [PATCH 453/590] logging: improve counters of expected (unique) URLs - both during startup and closing of the crawler, counters will be displayed for the number of: -- unique URLs that were parsed from the sitemaps and are expected to be passed into the "parse()"-method -- unique URLs that are expected to be filtered / dropped according to our deny_list, hash check etc. - this should make it more clear during later crawls how many URLs we expect to crawl and how many of those are filtered out --- converter/spiders/bpb_spider.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 99057e4a..c28c0ea3 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -57,7 +57,8 @@ class BpbSpider(scrapy.Spider, LomBase): "CONCURRENT_REQUESTS_PER_DOMAIN": 5, "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, } - DEBUG_DROPPED_ITEMS: list[str] = list() + DEBUG_DROPPED_ITEMS: set[str] = set() + DEBUG_UNIQUE_URLS_TO_BE_CRAWLED: set[str] = set() DEBUG_XML_COUNT: int = 0 def __init__(self, **kwargs): @@ -70,14 +71,17 @@ def close(self, reason: str) -> Union[Deferred, None]: self.logger.info(f"Closing spider (reason: {reason} )...") if self.DEBUG_XML_COUNT: self.logger.info( - f"Summary: The sitemap index contained {self.DEBUG_XML_COUNT} (unfiltered) items in total." + f"Summary: The sitemap index contained {self.DEBUG_XML_COUNT} (unfiltered) XML elements in total." ) + if self.DEBUG_UNIQUE_URLS_TO_BE_CRAWLED: + self.logger.info(f"Summary: Unique URLs to be crawled: {len(self.DEBUG_UNIQUE_URLS_TO_BE_CRAWLED)}") if self.DEBUG_DROPPED_ITEMS: self.logger.info( - f"Summary: Items dropped (due to sitemap rules / robot meta tags etc.): " + f"Summary: Items filtered / dropped (due to sitemap rules " + f"(see: 'deny_list'-variable / robot meta tags etc.): " f"{len(self.DEBUG_DROPPED_ITEMS)}" ) - return None + return def start_requests(self) -> Iterable[Request]: for url in self.start_urls: @@ -112,14 +116,18 @@ def parse_sitemap(self, response: Response): # debugging purposes. drop_item_flag = True # self.logger.debug(f"Dropping item {item_url} due to sitemap rules.") # this one is spammy! - self.DEBUG_DROPPED_ITEMS.append(item_url) + self.DEBUG_DROPPED_ITEMS.add(item_url) if not drop_item_flag: - yield scrapy.Request(url=item_url, callback=self.parse, meta={ - "dont_merge_cookies": True - }) + self.DEBUG_UNIQUE_URLS_TO_BE_CRAWLED.add(item_url) + yield scrapy.Request(url=item_url, callback=self.parse, meta={"dont_merge_cookies": True}) # the flag 'dont_merge_cookies' is necessary because bpb.de apparently uses Drupal's BigPipe # implementation, which sets a "no-js"-cookie. After receiving that cookie, all subsequent requests # are 404s and invalid. Invalid responses contain a "/big_pipe/no-js?destination=" path in their URL + if self.DEBUG_UNIQUE_URLS_TO_BE_CRAWLED: + self.logger.info( + f"Unique URLs to be crawled after parsing sitemap {response.url} : " + f"{len(self.DEBUG_UNIQUE_URLS_TO_BE_CRAWLED)}" + ) @staticmethod def get_json_ld_property(json_lds: list[dict], property_name: str) -> Any | None: @@ -283,7 +291,7 @@ async def parse(self, response: Response, **kwargs: Any) -> Any: drop_item_flag = self.check_if_item_should_be_dropped(response, json_lds, opengraph_dict) if drop_item_flag: - self.DEBUG_DROPPED_ITEMS.append(response.url) + self.DEBUG_DROPPED_ITEMS.add(response.url) return base_itemloader: BaseItemLoader = BaseItemLoader() From e83a9a8ca70af84ad296b1aebe35b2be6d049295 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 20 Feb 2024 12:21:29 +0100 Subject: [PATCH 454/590] change: extend URL filter - implemented an additional URL check that catches URLs ending with known "Impressum"-like substrings -- while the deny_list looks at URL paths that could appear anywhere in the URL, this additional check explicitly only looks for specific substrings at the end of a URL (that would not be picked up by the previous deny_list) --- converter/spiders/bpb_spider.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index c28c0ea3..f3130e5f 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -48,7 +48,8 @@ class BpbSpider(scrapy.Spider, LomBase): "/shop/", "/veranstaltungen/", # ToDo: implement custom handling for events in a future version ] - version = "0.2.1" # last update: 2024-02-14 + deny_list_endswith: list[str] = ["/impressum", "/kontakt", "/redaktion"] + version = "0.2.1" # last update: 2024-02-20 # (first version of the crawler after bpb.de completely relaunched their website in 2022-02) custom_settings = { "WEB_TOOLS": WebEngine.Playwright, @@ -117,6 +118,12 @@ def parse_sitemap(self, response: Response): drop_item_flag = True # self.logger.debug(f"Dropping item {item_url} due to sitemap rules.") # this one is spammy! self.DEBUG_DROPPED_ITEMS.add(item_url) + for url_ending_with_str in self.deny_list_endswith: + if item_url and item_url.endswith(url_ending_with_str): + # URLs that end with "/impressum", "/kontakt" or "/redaktion" are "Impressum"-like pages which + # need to be checked separately from the deny_list. + drop_item_flag = True + self.DEBUG_DROPPED_ITEMS.add(item_url) if not drop_item_flag: self.DEBUG_UNIQUE_URLS_TO_BE_CRAWLED.add(item_url) yield scrapy.Request(url=item_url, callback=self.parse, meta={"dont_merge_cookies": True}) From bca0565293a9a2b8a00776a7cba3137e1eeb1a8f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 20 Feb 2024 13:19:59 +0100 Subject: [PATCH 455/590] change: add missing (legacy) licenses - while debugging bpb_spider there were several license URL edge-cases which weren't handled properly by the license pipeline yet -- while CC 1.0 licenses shouldn't be used anymore and URLs pointing to those deeds are considered legacy URLs (see: https://creativecommons.org/licenses/), the license pipeline should recognize these URLs anyway and save them accordingly - tests: added two test-cases for license URLs from bpb.de --- converter/constants.py | 25 +++++++++++++++++++++++++ converter/es_connector.py | 24 ++++++++++++++++++++++++ converter/util/test_license_mapper.py | 2 ++ 3 files changed, 51 insertions(+) diff --git a/converter/constants.py b/converter/constants.py index 0c9cbe30..f76eb59d 100644 --- a/converter/constants.py +++ b/converter/constants.py @@ -2,23 +2,31 @@ class Constants: + LICENSE_CC_BY_10: Final[str] = "https://creativecommons.org/licenses/by/1.0/" LICENSE_CC_BY_20: Final[str] = "https://creativecommons.org/licenses/by/2.0/" LICENSE_CC_BY_25: Final[str] = "https://creativecommons.org/licenses/by/2.5/" LICENSE_CC_BY_30: Final[str] = "https://creativecommons.org/licenses/by/3.0/" LICENSE_CC_BY_40: Final[str] = "https://creativecommons.org/licenses/by/4.0/" + LICENSE_CC_BY_NC_10: Final[str] = "https://creativecommons.org/licenses/by-nc/1.0/" + LICENSE_CC_BY_NC_20: Final[str] = "https://creativecommons.org/licenses/by-nc/2.0/" + LICENSE_CC_BY_NC_25: Final[str] = "https://creativecommons.org/licenses/by-nc/2.5/" LICENSE_CC_BY_NC_30: Final[str] = "https://creativecommons.org/licenses/by-nc/3.0/" LICENSE_CC_BY_NC_40: Final[str] = "https://creativecommons.org/licenses/by-nc/4.0/" LICENSE_CC_BY_NC_ND_20: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/2.0/" LICENSE_CC_BY_NC_ND_25: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/2.5/" LICENSE_CC_BY_NC_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/3.0/" LICENSE_CC_BY_NC_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nc-nd/4.0/" + LICENSE_CC_BY_NC_SA_10: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/1.0/" LICENSE_CC_BY_NC_SA_20: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/2.0/" LICENSE_CC_BY_NC_SA_25: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/2.5/" LICENSE_CC_BY_NC_SA_30: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/3.0/" LICENSE_CC_BY_NC_SA_40: Final[str] = "https://creativecommons.org/licenses/by-nc-sa/4.0/" + LICENSE_CC_BY_ND_10: Final[str] = "https://creativecommons.org/licenses/by-nd/1.0/" LICENSE_CC_BY_ND_20: Final[str] = "https://creativecommons.org/licenses/by-nd/2.0/" + LICENSE_CC_BY_ND_25: Final[str] = "https://creativecommons.org/licenses/by-nd/2.5/" LICENSE_CC_BY_ND_30: Final[str] = "https://creativecommons.org/licenses/by-nd/3.0/" LICENSE_CC_BY_ND_40: Final[str] = "https://creativecommons.org/licenses/by-nd/4.0/" + LICENSE_CC_BY_SA_10: Final[str] = "https://creativecommons.org/licenses/by-sa/1.0/" LICENSE_CC_BY_SA_20: Final[str] = "https://creativecommons.org/licenses/by-sa/2.0/" LICENSE_CC_BY_SA_25: Final[str] = "https://creativecommons.org/licenses/by-sa/2.5/" LICENSE_CC_BY_SA_30: Final[str] = "https://creativecommons.org/licenses/by-sa/3.0/" @@ -27,23 +35,31 @@ class Constants: LICENSE_PDM: Final[str] = "https://creativecommons.org/publicdomain/mark/1.0/" VALID_LICENSE_URLS: list[str | Any] = [ + LICENSE_CC_BY_10, LICENSE_CC_BY_20, LICENSE_CC_BY_25, LICENSE_CC_BY_30, LICENSE_CC_BY_40, + LICENSE_CC_BY_NC_10, + LICENSE_CC_BY_NC_20, + LICENSE_CC_BY_NC_25, LICENSE_CC_BY_NC_30, LICENSE_CC_BY_NC_40, LICENSE_CC_BY_NC_ND_20, LICENSE_CC_BY_NC_ND_25, LICENSE_CC_BY_NC_ND_30, LICENSE_CC_BY_NC_ND_40, + LICENSE_CC_BY_NC_SA_10, LICENSE_CC_BY_NC_SA_20, LICENSE_CC_BY_NC_SA_25, LICENSE_CC_BY_NC_SA_30, LICENSE_CC_BY_NC_SA_40, + LICENSE_CC_BY_ND_10, LICENSE_CC_BY_ND_20, + LICENSE_CC_BY_ND_25, LICENSE_CC_BY_ND_30, LICENSE_CC_BY_ND_40, + LICENSE_CC_BY_SA_10, LICENSE_CC_BY_SA_20, LICENSE_CC_BY_SA_25, LICENSE_CC_BY_SA_30, @@ -52,22 +68,30 @@ class Constants: LICENSE_PDM, ] LICENSE_MAPPINGS: dict[str, str] = { + "https://creativecommons.org/licenses/by/1.0/": LICENSE_CC_BY_10, "https://creativecommons.org/licenses/by/2.0/": LICENSE_CC_BY_20, "https://creativecommons.org/licenses/by/2.5/": LICENSE_CC_BY_25, "https://creativecommons.org/licenses/by/3.0/": LICENSE_CC_BY_30, "https://creativecommons.org/licenses/by/4.0/": LICENSE_CC_BY_40, + "https://creativecommons.org/licenses/by-nc/1.0/": LICENSE_CC_BY_NC_10, + "https://creativecommons.org/licenses/by-nc/2.0/": LICENSE_CC_BY_NC_20, + "https://creativecommons.org/licenses/by-nc/2.5/": LICENSE_CC_BY_NC_25, "https://creativecommons.org/licenses/by-nc/3.0/": LICENSE_CC_BY_NC_30, "https://creativecommons.org/licenses/by-nc/4.0/": LICENSE_CC_BY_NC_40, "https://creativecommons.org/licenses/by-nc-nd/2.0/": LICENSE_CC_BY_NC_ND_20, "https://creativecommons.org/licenses/by-nc-nd/3.0/": LICENSE_CC_BY_NC_ND_30, "https://creativecommons.org/licenses/by-nc-nd/4.0/": LICENSE_CC_BY_NC_ND_40, + "https://creativecommons.org/licenses/by-nc-sa/1.0/": LICENSE_CC_BY_NC_SA_10, "https://creativecommons.org/licenses/by-nc-sa/2.0/": LICENSE_CC_BY_NC_SA_20, "https://creativecommons.org/licenses/by-nc-sa/2.5/": LICENSE_CC_BY_NC_SA_25, "https://creativecommons.org/licenses/by-nc-sa/3.0/": LICENSE_CC_BY_NC_SA_30, "https://creativecommons.org/licenses/by-nc-sa/4.0/": LICENSE_CC_BY_NC_SA_40, + "https://creativecommons.org/licenses/by-nd/1.0/": LICENSE_CC_BY_ND_10, "https://creativecommons.org/licenses/by-nd/2.0/": LICENSE_CC_BY_ND_20, + "https://creativecommons.org/licenses/by-nd/2.5/": LICENSE_CC_BY_ND_25, "https://creativecommons.org/licenses/by-nd/3.0/": LICENSE_CC_BY_ND_30, "https://creativecommons.org/licenses/by-nd/4.0/": LICENSE_CC_BY_ND_40, + "https://creativecommons.org/licenses/by-sa/1.0/": LICENSE_CC_BY_SA_10, "https://creativecommons.org/licenses/by-sa/2.0/": LICENSE_CC_BY_SA_20, "https://creativecommons.org/licenses/by-sa/2.5/": LICENSE_CC_BY_SA_25, "https://creativecommons.org/licenses/by-sa/3.0/": LICENSE_CC_BY_SA_30, @@ -77,6 +101,7 @@ class Constants: "https://creativecommons.org/licenses/pdm/": LICENSE_PDM, "https://creativecommons.org/publicdomain/mark/1.0/": LICENSE_PDM, } + # ToDo: LICENSE_MAPPINGS is only used once in pipelines.py and should be refactored asap LICENSE_MAPPINGS_INTERNAL: dict[str, list[str]] = { "CC_0": [LICENSE_CC_ZERO_10], "CC_BY": [LICENSE_CC_BY_40, LICENSE_CC_BY_30], diff --git a/converter/es_connector.py b/converter/es_connector.py index 4371695b..3ca0700b 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -245,6 +245,9 @@ def map_license(self, spaces, license): if "url" in license: match license["url"]: # ToDo: refactor this ungodly method asap + case Constants.LICENSE_CC_BY_10: + spaces["ccm:commonlicense_key"] = "CC_BY" + spaces["ccm:commonlicense_cc_version"] = "1.0" case Constants.LICENSE_CC_BY_20: spaces["ccm:commonlicense_key"] = "CC_BY" spaces["ccm:commonlicense_cc_version"] = "2.0" @@ -257,6 +260,15 @@ def map_license(self, spaces, license): case Constants.LICENSE_CC_BY_40: spaces["ccm:commonlicense_key"] = "CC_BY" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_10: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" + spaces["ccm:commonlicense_cc_version"] = "1.0" + case Constants.LICENSE_CC_BY_NC_20: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" + spaces["ccm:commonlicense_cc_version"] = "2.0" + case Constants.LICENSE_CC_BY_NC_25: + spaces["ccm:commonlicense_key"] = "CC_BY_NC" + spaces["ccm:commonlicense_cc_version"] = "2.5" case Constants.LICENSE_CC_BY_NC_30: spaces["ccm:commonlicense_key"] = "CC_BY_NC" spaces["ccm:commonlicense_cc_version"] = "3.0" @@ -275,6 +287,9 @@ def map_license(self, spaces, license): case Constants.LICENSE_CC_BY_NC_ND_40: spaces["ccm:commonlicense_key"] = "CC_BY_NC_ND" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_NC_SA_10: + spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" + spaces["ccm:commonlicense_cc_version"] = "1.0" case Constants.LICENSE_CC_BY_NC_SA_20: spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" spaces["ccm:commonlicense_cc_version"] = "2.0" @@ -287,15 +302,24 @@ def map_license(self, spaces, license): case Constants.LICENSE_CC_BY_NC_SA_40: spaces["ccm:commonlicense_key"] = "CC_BY_NC_SA" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_ND_10: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "1.0" case Constants.LICENSE_CC_BY_ND_20: spaces["ccm:commonlicense_key"] = "CC_BY_ND" spaces["ccm:commonlicense_cc_version"] = "2.0" + case Constants.LICENSE_CC_BY_ND_25: + spaces["ccm:commonlicense_key"] = "CC_BY_ND" + spaces["ccm:commonlicense_cc_version"] = "2.5" case Constants.LICENSE_CC_BY_ND_30: spaces["ccm:commonlicense_key"] = "CC_BY_ND" spaces["ccm:commonlicense_cc_version"] = "3.0" case Constants.LICENSE_CC_BY_ND_40: spaces["ccm:commonlicense_key"] = "CC_BY_ND" spaces["ccm:commonlicense_cc_version"] = "4.0" + case Constants.LICENSE_CC_BY_SA_10: + spaces["ccm:commonlicense_key"] = "CC_BY_SA" + spaces["ccm:commonlicense_cc_version"] = "1.0" case Constants.LICENSE_CC_BY_SA_20: spaces["ccm:commonlicense_key"] = "CC_BY_SA" spaces["ccm:commonlicense_cc_version"] = "2.0" diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 4263a895..04c0c257 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -43,6 +43,8 @@ class TestLicenseMapper: ("https://creativecommons.org/licenses/by/3.0/es/", Constants.LICENSE_CC_BY_30), ("https://creativecommons.org/licenses/by/3.0/fr", Constants.LICENSE_CC_BY_30), ("http://creativecommons.org/licenses/by-nc-nd/2.5/ch/deed.en", Constants.LICENSE_CC_BY_NC_ND_25), + ("https://creativecommons.org/licenses/by/1.0/deed.de", Constants.LICENSE_CC_BY_10), + ("https://creativecommons.org/licenses/by-sa/1.0/deed.de", Constants.LICENSE_CC_BY_SA_10), ], ) def test_get_license_url(self, test_input, expected_result): From 6173908873983131279759851ab5d3cae784fb9e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Mar 2024 12:45:51 +0100 Subject: [PATCH 456/590] change: use class-specific logger instead of 'root' logging --- converter/util/license_mapper.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/converter/util/license_mapper.py b/converter/util/license_mapper.py index 52988026..3829196b 100644 --- a/converter/util/license_mapper.py +++ b/converter/util/license_mapper.py @@ -3,6 +3,8 @@ from converter.constants import Constants +logger = logging.getLogger(__name__) + class LicenseMapper: """ @@ -48,7 +50,7 @@ def get_license_url(self, license_string: str = None) -> str | None: if license_string: return self.identify_cc_license(license_string) else: - logging.debug(f"LicenseMapper ('url'): The provided '{license_string}' does not seem to be a valid string.") + logger.debug(f"LicenseMapper ('url'): The provided '{license_string}' does not seem to be a valid string.") return None def get_license_internal_key(self, license_string: str = None) -> str | None: @@ -67,9 +69,9 @@ def get_license_internal_key(self, license_string: str = None) -> str | None: if internal_hit: return internal_hit else: - logging.debug( - f"LicenseMapper ('internal'): Could not map '{license_string}' to 'license.internal'-key since it doesn't " - f"seem to be a valid string." + logger.debug( + f"LicenseMapper ('internal'): Could not map '{license_string}' to 'license.internal'-key since it " + f"doesn't seem to be a valid string." ) return None @@ -112,12 +114,12 @@ def fallback_to_license_internal_key(self, license_string: str = None) -> str | cc_zero = result_dict.get("CC_ZERO") public_domain = result_dict.get("PDM") if cc_zero: - logging.debug( + logger.debug( f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " f"CC_0" ) return "CC_0" if public_domain: - logging.debug( + logger.debug( f"Licensemapper: Fallback to 'license.internal' for '{license_string}' successful: " f"Public Domain " ) @@ -128,13 +130,13 @@ def fallback_to_license_internal_key(self, license_string: str = None) -> str | cc_string_internal = cc_string_internal.replace("-", "_") cc_string_internal = cc_string_internal.replace(" ", "_") if cc_string_internal in Constants.LICENSE_MAPPINGS_INTERNAL: - logging.debug( + logger.debug( f"LicenseMapper: Fallback to 'license.internal' for '{license_string}' successful: " f"{cc_string_internal}" ) return cc_string_internal else: - logging.debug( + logger.debug( f"LicenseMapper: Fallback to 'license.internal' failed for string " f"'{license_string}' . The extracted string_internal value was: " f"{cc_string_internal}" @@ -151,7 +153,7 @@ def identify_cc_license(self, license_string: str) -> str | None: license_string_original: str = license_string if self.identify_if_string_contains_url_pattern(license_string_original): license_url_candidate = license_string_original.lower() - logging.debug(f"LicenseMapper: The string '{license_url_candidate}' was recognized as a URL.") + logger.debug(f"LicenseMapper: The string '{license_url_candidate}' was recognized as a URL.") if "http://" in license_url_candidate: license_url_candidate = license_url_candidate.replace("http://", "https://") if "deed" in license_url_candidate: @@ -180,7 +182,7 @@ def identify_cc_license(self, license_string: str) -> str | None: return valid_license_url elif license_string: license_string = license_string.lower() - logging.debug(f"LicenseMapper: Received license string '{license_string}'") + logger.debug(f"LicenseMapper: Received license string '{license_string}'") if self.cc_pattern.search(license_string): result_dict: dict = self.cc_pattern.search(license_string).groupdict() cc_type = result_dict.get("CC_TYPE") @@ -194,10 +196,10 @@ def identify_cc_license(self, license_string: str) -> str | None: f"/{str(result_dict.get('CC_TYPE')).lower().strip()}" f"/{str(result_dict.get('CC_VERSION')).lower().strip()}/" ) - logging.debug(f"partial_url: {partial_url}") + logger.debug(f"partial_url: {partial_url}") for valid_license_url in Constants.VALID_LICENSE_URLS: if partial_url in valid_license_url: - logging.debug( + logger.debug( f"LicenseMapper: License string '{license_string}' was recognized as " f"{valid_license_url}" ) @@ -205,13 +207,13 @@ def identify_cc_license(self, license_string: str) -> str | None: if public_domain: return Constants.LICENSE_PDM elif cc_type: - logging.debug( + logger.debug( f"LicenseMapper: Couldn't recognize a (valid) CC Version within {license_string} - " f"Trying fallback method for 'license.internal' next..." ) return None else: - logging.debug(f"LicenseMapper: Couldn't detect a CC license within {license_string}") + logger.debug(f"LicenseMapper: Couldn't detect a CC license within {license_string}") return None From 482755b86118514ae3d78b36564dd26042813eaa Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Mar 2024 12:48:14 +0100 Subject: [PATCH 457/590] tests: add edge-case from DiLerTube to LicenseMapper test-suite --- converter/util/test_license_mapper.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/util/test_license_mapper.py b/converter/util/test_license_mapper.py index 04c0c257..1540b0ad 100644 --- a/converter/util/test_license_mapper.py +++ b/converter/util/test_license_mapper.py @@ -45,6 +45,7 @@ class TestLicenseMapper: ("http://creativecommons.org/licenses/by-nc-nd/2.5/ch/deed.en", Constants.LICENSE_CC_BY_NC_ND_25), ("https://creativecommons.org/licenses/by/1.0/deed.de", Constants.LICENSE_CC_BY_10), ("https://creativecommons.org/licenses/by-sa/1.0/deed.de", Constants.LICENSE_CC_BY_SA_10), + ("Creative Commons (CC) CC0 gemeinfrei (public domain - no rights reserved)", Constants.LICENSE_CC_ZERO_10) ], ) def test_get_license_url(self, test_input, expected_result): @@ -72,6 +73,7 @@ def test_get_license_url(self, test_input, expected_result): ("Frei nutzbares Material", None), (" ", None), ("", None), + ("Creative Commons (CC) CC0 gemeinfrei (public domain - no rights reserved)", "CC_0") ], ) def test_get_license_internal_key(self, test_input, expected_result): From 8222d5fb88c9a0cf86717ebecf27969e8ff82e8d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 6 Mar 2024 12:52:12 +0100 Subject: [PATCH 458/590] dilertube_spider v0.0.3 - fix: handling for CC0 edge-cases where the string would not get picked up by the crawler-specific RegEx -- if the crawler-specific RegEx fails to parse/detect a CC pattern, we'll use the (less precise) fallback method of LicenseMapper for string detection --- converter/spiders/dilertube_spider.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index 5eff3964..c5bd7144 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -31,7 +31,7 @@ class DiLerTubeSpider(CrawlSpider, LomBase): name = "dilertube_spider" friendlyName = "DiLerTube" start_urls = ["https://www.dilertube.de/sitemap.xml"] - version = "0.0.2" # last update: 2024-02-08 + version = "0.0.3" # last update: 2024-03-06 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -194,6 +194,7 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade cc_pattern: re.Pattern = re.compile( r"\((?PC{2})\)\s" r"(?P\D{2}(-\D{2})*)" r".*" r"(?<=\s)(?P\d\.\d)?(?=\s)" ) + license_mapper = LicenseMapper() if cc_pattern.search(license_description): # the LicenseMapper does not recognize this string yet, which is why we need to trim it down in the # crawler first and then let the LicenseMapper do the rest @@ -203,10 +204,15 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade f"{cc_pattern_result_dict.get('CC_TYPE')} " f"{cc_pattern_result_dict.get('CC_VERSION')}" ) - license_mapper = LicenseMapper() mapped_license_url: str | None = license_mapper.get_license_url(cc_string_prepared_for_mapping) if mapped_license_url: video_info_dict.update({"cc_url": mapped_license_url}) + else: + # fallback to string recognition by our license mapper for edge-cases where the above RegEx fails + # e.g. "Creative Commons (CC) CC0 gemeinfrei (public domain - no rights reserved)" + license_internal_mapped = license_mapper.get_license_internal_key(license_description) + if license_internal_mapped: + video_info_dict.update({"license_internal": license_internal_mapped}) video_info_box: list[str] = response.xpath( '//ul[@class="list-group mx-0 my-0"]//div[@class="card-body"]/div[@class="mb-2"]' @@ -357,6 +363,9 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade lic.add_value("description", video_info_dict.get("license_description")) if "cc_url" in video_info_dict: lic.add_value("url", video_info_dict.get("cc_url")) + elif "license_internal" in video_info_dict: + # fallback for edge-cases when no CC license could be parsed + lic.add_value("internal", video_info_dict.get("license_internal")) if "author" in video_info_dict: lic.add_value("author", video_info_dict.get("author")) base.add_value("license", lic.load_item()) From 68a133bb0ea6b7a745fa0a846956d5adf36a43d6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:11:34 +0100 Subject: [PATCH 459/590] docs: add missing DocStrings for "ResponseItem" properties - after a short consultation with Torsten, added the missing DocStrings with regard to ResponseItem properties (especially: 'full text' extraction related information) -- at the moment the only field that's actively used / stored within edu-sharing is 'ResponseItem.text' (which should be used for 'full text' extraction) -- the other fields ('cookies', 'headers', 'har', 'html', 'status', 'url') have never been connected / mapped to individual edu-sharing properties and are therefore (at the moment) not (yet) in use or might be obsolete --- converter/items.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/converter/items.py b/converter/items.py index c637fc52..ea0d08eb 100644 --- a/converter/items.py +++ b/converter/items.py @@ -214,12 +214,22 @@ class ResponseItem(Item): """ cookies = Field() + # ToDo: 'cookies' are not stored in edu-sharing. This field might be obsolete. headers = Field() + # ToDo: 'headers' are not stored in edu-sharing. This field might be obsolete. har = Field() + # ToDo: 'har' logs are not stored in edu-sharing. This field might be obsolete. html = Field() + # ToDo: The 'raw' HTML body is not stored in edu-sharing at the moment. This field might become relevant in the + # future, but as of 2024-03-15 we can only store one "textContent" per item via the edu-sharing API. + # (see: 'ResponseItem.text') status = Field() + # ToDo: the HTTP status code is not stored in edu-sharing. This field might be obsolete. text = Field() + """Corresponding ElasticSearch (!) property: 'content.fulltext'. (The 'full text' of an item is only used for + indexing purposes and not readily available as an edu-sharing property!)""" url = Field() + # ToDo: This field might be obsolete. URL(s) of items are stored within 'LomTechnicalItem.location'! class ValuespaceItem(Item): @@ -333,7 +343,8 @@ class BaseItem(Item): """A field for custom data which can be used by the target transformer to store data in the native format (i.e. 'ccm:'/'cclom:'-properties in edu-sharing).""" fulltext = Field() - """The 'fulltext'-attribute gets populated by a 'response.text'-call in the pipelines.""" + """The 'fulltext'-attribute gets populated by a 'ResponseItem.text'-call in the pipelines and is stored in the + ElasticSearch index within the 'content.fulltext' property.""" hash = Field() """Corresponding edu-sharing property: 'ccm:replicationsourcehash'""" lastModified = Field() From 6ba7be0dfd432bff5b7e3f8a44621af6cf32efb4 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:21:10 +0100 Subject: [PATCH 460/590] docs: update DocStrings with regard to 'full text' metadata --- converter/spiders/sample_spider_alternative.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 567593fe..14cd649f 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -66,6 +66,10 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade # - publisher optional # - binary optional (only needed if you're working with binary files (e.g. .pdf-files), # if you want to see an example, check out "niedersachsen_abi_spider.py") + # - fulltext optional (if 'full text' content is provided by a source (e.g. raw HTML or a + # human readable string of text) store its within the 'fulltext' field.) + # If no 'fulltext' value was provided, the pipelines will try to fetch + # 'full text' content from "ResponseItem.text" and save it here. base.add_value('sourceId', response.url) # if the source doesn't have a "datePublished" or "lastModified"-value in its header or JSON_LD, # you might have to help yourself with a unique string consisting of the datetime of the crawl + self.version @@ -232,12 +236,12 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade # response_loader = super().mapResponse(response) # TODO: if necessary, add/replace values for the following "response"-keys # - url required - # - status optional - # - html optional - # - text optional - # - headers optional - # - cookies optional - # - har optional + # - status unused + # - html unused + # - text optional (use this field for 'full text' data) + # - headers unused + # - cookies unused + # - har unused # once we've added all available values to the necessary keys in our LomGeneralItemLoader, # we call the load_item()-method to return a (now filled) LomGeneralItem to the LomBaseItemLoader. From 3ae37e1404e8e99d8792f730f81ef28df46f16b8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 20 Mar 2024 15:59:22 +0100 Subject: [PATCH 461/590] dilertube_spider v0.0.4 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - change: update 'discipline'-mapping for the following DiLerTube categories: -- "Gesundheit und Soziales (GuS)" -- "Informatik & Medienbildung" -- "Technik" - feat: use keywords (see: "tags" from https://www.dilertube.de/component/tags/) to: -- match "grundschule" items (-> 'educationalContext') -- match "methoden und erklärvideos" (-> 'new_lrt') - perf: slightly increase Scrapy's Autothrottle "target concurrency" setting --- converter/spiders/dilertube_spider.py | 33 +++++++++++++++++++-------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index c5bd7144..a6cb2d66 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -31,12 +31,12 @@ class DiLerTubeSpider(CrawlSpider, LomBase): name = "dilertube_spider" friendlyName = "DiLerTube" start_urls = ["https://www.dilertube.de/sitemap.xml"] - version = "0.0.3" # last update: 2024-03-06 + version = "0.0.4" # last update: 2024-03-20 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 3, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 4, "WEB_TOOLS": WebEngine.Playwright, } @@ -46,12 +46,12 @@ class DiLerTubeSpider(CrawlSpider, LomBase): "Berufsorientierung": "040", # "Berufliche Bildung" "Bildende Kunst": "060", # Kunst "Gemeinschaftskunde": "48005", # Gesellschaftskunde / Sozialkunde - "Geographie": "220", # Geografie - "Gesundheit und Soziales": "", # ToDO: cannot be mapped - "Informatik und Medienbildung": "900", # Medienbildung + "Geographie": "220", # Geografie # ToDo: remove this temporary mapping as soon as the vocabs are updated + "Gesundheit und Soziales (GuS)": "260", # Gesundheit + "Informatik & Medienbildung": ["320", "900"], # Informatik; Medienbildung "Lateinisch": "20005", # Latein - "Materie Natur Technik (MNT)": "", # ToDo: cannot be mapped - "Technik": "020", # Arbeitslehrer + # "Materie Natur Technik (MNT)": "", # ToDo: cannot be mapped + "Technik": "020", # Arbeitslehre } CATEGORY_IS_ACTUALLY_A_KEYWORD = [ "DiLer Tutorials", @@ -62,7 +62,6 @@ class DiLerTubeSpider(CrawlSpider, LomBase): "Naturphänomene", "Sonstige", "Schülerprojekte", - "Technik", ] def __init__(self, **kwargs): @@ -342,10 +341,24 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade base.add_value("lom", lom.load_item()) vs: ValuespaceItemLoader = ValuespaceItemLoader() + # ToDo: use keywords for 'educationalContext' mapping + # see: https://www.dilertube.de/component/tags/ + if keywords and isinstance(keywords, list): + # cast keywords to lowercase to make mapping easier: + kw_lower: list[str] = [kw.lower() for kw in keywords] + if "grundschule" in kw_lower: + vs.add_value("educationalContext", "grundschule") + if "methoden & erklärvideos" in kw_lower: + vs.add_value("new_lrt", "a0218a48-a008-4975-a62a-27b1a83d454f") # Erklárvideo und + # gefilmtes Experiment for category_item in categories: if category_item in self.DISCIPLINE_MAPPING.keys(): - discipline = self.DISCIPLINE_MAPPING.get(category_item) - vs.add_value("discipline", discipline) + discipline_mapped: str | list[str] = self.DISCIPLINE_MAPPING.get(category_item) + if isinstance(discipline_mapped, list): + for discipline in discipline_mapped: + vs.add_value("discipline", discipline) + if isinstance(discipline_mapped, str): + vs.add_value("discipline", discipline_mapped) else: vs.add_value("discipline", category_item) vs.add_value("new_lrt", "7a6e9608-2554-4981-95dc-47ab9ba924de") # Video (Material) From b6f19218ab92f563dff9b977d1697f7b54854fff Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 21 Mar 2024 11:26:41 +0100 Subject: [PATCH 462/590] dilertube_spider v0.0.5 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - fix: "erklärvideo" mapping now looks for the value within the lowercase keyword string (instead of checking for string equality) - code cleanup / docs --- converter/spiders/dilertube_spider.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/converter/spiders/dilertube_spider.py b/converter/spiders/dilertube_spider.py index a6cb2d66..0519b026 100644 --- a/converter/spiders/dilertube_spider.py +++ b/converter/spiders/dilertube_spider.py @@ -31,7 +31,7 @@ class DiLerTubeSpider(CrawlSpider, LomBase): name = "dilertube_spider" friendlyName = "DiLerTube" start_urls = ["https://www.dilertube.de/sitemap.xml"] - version = "0.0.4" # last update: 2024-03-20 + version = "0.0.5" # last update: 2024-03-21 custom_settings = { "ROBOTSTXT_OBEY": False, "AUTOTHROTTLE_ENABLED": True, @@ -341,16 +341,18 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade base.add_value("lom", lom.load_item()) vs: ValuespaceItemLoader = ValuespaceItemLoader() - # ToDo: use keywords for 'educationalContext' mapping - # see: https://www.dilertube.de/component/tags/ if keywords and isinstance(keywords, list): - # cast keywords to lowercase to make mapping easier: - kw_lower: list[str] = [kw.lower() for kw in keywords] - if "grundschule" in kw_lower: - vs.add_value("educationalContext", "grundschule") - if "methoden & erklärvideos" in kw_lower: - vs.add_value("new_lrt", "a0218a48-a008-4975-a62a-27b1a83d454f") # Erklárvideo und - # gefilmtes Experiment + # the complete list of keywords (called "tags" on DiLerTube) can be seen here: + # https://www.dilertube.de/component/tags/ + # (attention: DiLerTube tags are freetext strings that can be set by the individual video uploader) + keywords_lc: list[str] = [kw.lower() for kw in keywords] + # first, we need to cast keywords to lowercase to make mapping individual parts of a string more robust + for keyword_lowercase in keywords_lc: + if "grundschule" in keyword_lowercase: + vs.add_value("educationalContext", "grundschule") + if "erklärvideo" in keyword_lowercase: + vs.add_value("new_lrt", "a0218a48-a008-4975-a62a-27b1a83d454f") # Erklärvideo und + # gefilmtes Experiment for category_item in categories: if category_item in self.DISCIPLINE_MAPPING.keys(): discipline_mapped: str | list[str] = self.DISCIPLINE_MAPPING.get(category_item) From 1882c091f12b88c4a108579bdfcd303476ed7423 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 9 Apr 2024 12:53:28 +0200 Subject: [PATCH 463/590] bpb_spider v0.2.2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - fix: use a more precise XPath Selector for license URLs to retrieve the article license -- this fixes the edge-cases where there where multiple license URLs within an article (e.g. PDFs or images with their own license) - feat: title fallback for ambiguous titles / headlines -- during the "Rohdatenprüfung" with Anja we observed articles where titles wouldn't be helpful to users --- e.g. "Literatur" or "Weiterführende Links" -- if we encounter such useless titles, we'll try to use the breadcrumbs navigation bar and build a more precise title from those elements --- example: https://www.bpb.de/themen/medien-journalismus/krieg-in-den-medien/130755/weiterfuehrende-links/ ---- the ambiguous title "Weiterführende Links" would become "Themen > Politik > Medien & Digitales > Medien & Journalismus > Krieg in den Medien > Weiterführende Links" instead --- converter/spiders/bpb_spider.py | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index f3130e5f..945456fa 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -49,7 +49,7 @@ class BpbSpider(scrapy.Spider, LomBase): "/veranstaltungen/", # ToDo: implement custom handling for events in a future version ] deny_list_endswith: list[str] = ["/impressum", "/kontakt", "/redaktion"] - version = "0.2.1" # last update: 2024-02-20 + version = "0.2.2" # last update: 2024-04-09 # (first version of the crawler after bpb.de completely relaunched their website in 2022-02) custom_settings = { "WEB_TOOLS": WebEngine.Playwright, @@ -349,6 +349,29 @@ async def parse(self, response: Response, **kwargs: Any) -> Any: elif og_title: general_itemloader.add_value("title", og_title) + ambiguous_titles: list[str] = [ + "glossar", + "links", + "links zum thema", + "literatur", + "weiterführende links", + ] + retrieved_titles: list[str] | None = general_itemloader.get_collected_values("title") + if retrieved_titles and isinstance(retrieved_titles, list): + for retrieved_title in retrieved_titles: + if retrieved_title and isinstance(retrieved_title, str): + retrieved_title_lc = retrieved_title.lower() + if retrieved_title_lc and retrieved_title_lc in ambiguous_titles: + # There are edge-cases where the title is too ambiguous to be useful for an end-user. + # If we encounter such "useless" titles, + # we'll try to use the breadcrumbs as a fallback and build a string + breadcrumbs_raw: list[str] = response.xpath( + "//nav[@class='breadcrumbs']//li[" "@class='breadcrumbs__item']//a/text()" + ).getall() + if breadcrumbs_raw and isinstance(breadcrumbs_raw, list): + breadcrumbs_title: str = " > ".join(breadcrumbs_raw) + general_itemloader.replace_value("title", breadcrumbs_title) + keywords: list[str] | None = self.get_keywords(response=response, json_lds=json_lds) if keywords: general_itemloader.add_value("keyword", keywords) @@ -482,7 +505,7 @@ async def parse(self, response: Response, **kwargs: Any) -> Any: license_itemloader: LicenseItemLoader = LicenseItemLoader() if json_ld_author: license_itemloader.add_value("author", json_ld_author) - license_url: str = response.xpath("//a[@rel='license']/@href").get() + license_url: str = response.xpath("//div[@class='article-license']//a[@rel='license']/@href").get() if license_url: license_itemloader.add_value("url", license_url) From 24d773817aa9356720efe9171009a71bbab49cb1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 22 Mar 2024 17:55:26 +0100 Subject: [PATCH 464/590] docs/style: add ToDos for YouTube captions API (fulltext extraction feature) - docs: lay out necessary steps to be able to handle YouTube captions - style: fix 9 weak warnings by code formatting via black and refactoring method names to be more pythonic --- converter/spiders/youtube_spider.py | 189 +++++++++++++++------------- 1 file changed, 99 insertions(+), 90 deletions(-) diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index 82461fde..0bebb67b 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -24,6 +24,28 @@ # TODO: Find out whether `publishedAt` reflects modification # - Find another way to set `hash` if not # +# ToDo: "YouTube Handle" URLs: update URL parsing and implement handle-specific channel request parameter ('forHandle') +# - see: https://support.google.com/youtube/answer/6180214 +# (Example: "youtube.com/@youtubecreators" requires different API parameters when querying "Channels: list"-endpoint +# https://developers-dot-devsite-v2-prod.appspot.com/youtube/v3/docs/channels/list ) +# +# ToDo: YouTube API - Captions for fulltext extraction +# - see: https://developers.google.com/youtube/v3/docs/captions +# - PREREQUISITE: YouTube's "captions.download"-method REQUIRES OAuth 2.0 authentication (!!!) +# - (the current approach with a YT API Key DOES NOT work for this API endpoint! +# see: https://developers.google.com/youtube/v3/docs/captions/download#auth) +# - query 'captions'-API-endpoint with individual YT 'videoId' to receive a list of caption ids +# (see: https://developers.google.com/youtube/v3/docs/captions/list) +# IMPORTANT: API quota cost: 50 units per query! +# From the API response: +# - decide which specific language (order: German > English > ?) to download +# - decide which of the available subtitle formats (which format should we prefer? .ttml or .srt?) to download +# - query Captions API endpoint to download caption file +# (see: https://developers.google.com/youtube/v3/docs/captions/download) +# IMPORTANT: API quota cost: 200 units per query! +# - save captions to 'BaseItem.fulltext' +# (to make the fulltext available within edu-sharing's ElasticSearch index (within the 'content.fulltext' property)) + class YoutubeSpider(Spider): """ @@ -55,9 +77,7 @@ def update_url_query(url: str, params: dict) -> str: @staticmethod def get_csv_rows(filename: str) -> Generator[dict, None, None]: - csv_file_path = os.path.realpath( - os.path.join(os.path.dirname(__file__), "..", "..", "csv", filename) - ) + csv_file_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "..", "csv", filename)) with open(csv_file_path, newline="", encoding="utf-8") as csv_file: reader = csv.DictReader(csv_file) for row in reader: @@ -74,39 +94,49 @@ def start_requests(self): return if env.get(key="YOUTUBE_LIMITED_CRAWL_URL", allow_null=True, default=None) == "": # If no value is set, this serves as a reminder that you can disable the '.env'-variable altogether - logging.debug("The '.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' was detected, but no URL was set. \n" - "If you meant to start a LIMITED crawl, please check your '.env'-file and restart the " - "crawler. The crawler is now commencing with a COMPLETE crawl according to the " - "'csv/youtube.csv'-table.") + logging.debug( + "The '.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' was detected, but no URL was set. \n" + "If you meant to start a LIMITED crawl, please check your '.env'-file and restart the " + "crawler. The crawler is now commencing with a COMPLETE crawl according to the " + "'csv/youtube.csv'-table." + ) if env.get(key="YOUTUBE_LIMITED_CRAWL_URL", allow_null=True, default=None): # the OPTIONAL .env parameter is used to crawl from a SINGULAR URL ONLY - logging.debug("'.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' recognized. LIMITED crawling mode activated!\n" - "(This mode WILL NOT crawl the complete 'csv/youtube.csv'-file, but only a SINGLE YouTube " - "channel or playlist!)\n" - "If you actually wanted to start a complete/full crawl, please disable the variable in your " - "'.env'-file.") + logging.debug( + "'.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' recognized. LIMITED crawling mode activated!\n" + "(This mode WILL NOT crawl the complete 'csv/youtube.csv'-file, but only a SINGLE YouTube " + "channel or playlist!)\n" + "If you actually wanted to start a complete/full crawl, please disable the variable in your " + "'.env'-file." + ) singular_crawl_target_url: str = env.get(key="YOUTUBE_LIMITED_CRAWL_URL", default=None) if singular_crawl_target_url: - logging.debug(f"'.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' is set to: {singular_crawl_target_url} \n" - f"Searching for {singular_crawl_target_url} within 'csv/youtube.csv' for metadata values.") + logging.debug( + f"'.env'-variable 'YOUTUBE_LIMITED_CRAWL_URL' is set to: {singular_crawl_target_url} \n" + f"Searching for {singular_crawl_target_url} within 'csv/youtube.csv' for metadata values." + ) match_found: bool = False for row in YoutubeSpider.get_csv_rows("youtube.csv"): if row["url"] == singular_crawl_target_url: - # ToDo (optional): several YouTube URLs (youtu.be, youtube.com / youtube.de) can point to the same - # channel or playlist. Providing some leniency by resolving an URL to the "real" target might - # provide some Quality of Life while using this feature. + # ToDo (optional): several YouTube URLs (youtu.be, youtube.com / youtube.de) + # can point to the same channel or playlist. Providing some leniency by resolving an URL to + # the "real" target might provide some Quality of Life while using this feature. match_found = True - logging.debug(f"Match found in 'csv/youtube.csv' for {singular_crawl_target_url}! Commencing" - f"SINGULAR crawl process.") + logging.debug( + f"Match found in 'csv/youtube.csv' for {singular_crawl_target_url}! Commencing" + f"SINGULAR crawl process." + ) request = self.request_row(row) if request: # we are expecting exactly one result, therefore we can stop looking after the first match yield request break if match_found is False: - logging.error(f"Could not find a match for {singular_crawl_target_url} within 'csv/youtube.csv'. " - f"Please confirm that the EXACT specified URL can be found in a row of the CSV and " - f"restart the crawler.") + logging.error( + f"Could not find a match for {singular_crawl_target_url} within 'csv/youtube.csv'. " + f"Please confirm that the EXACT specified URL can be found in a row of the CSV and " + f"restart the crawler." + ) return else: # this is where the COMPLETE crawl happens: requests are yielded row-by-row from 'csv/youtube.csv' @@ -134,18 +164,17 @@ def request_row(self, row: dict) -> Request: # All of these lead to an ordinary channel, but we need to read its ID from the page # body. return Request( - row["url"], meta={"row": row}, callback=self.parse_custom_url, + row["url"], + meta={"row": row}, + callback=self.parse_custom_url, ) def request_channel(self, channel_id: str, meta: dict) -> Request: part = ["snippet", "contentDetails", "statistics"] # see: https://developers.google.com/youtube/v3/docs/channels - request_url = ( - "https://www.googleapis.com/youtube/v3/channels" - + "?part={}&id={}&key={}".format( + request_url = "https://www.googleapis.com/youtube/v3/channels" + "?part={}&id={}&key={}".format( "%2C".join(part), channel_id, env.get("YOUTUBE_API_KEY", False) ) - ) return Request(url=request_url, meta=meta, callback=self.parse_channel) def parse_channel(self, response: Response) -> Request: @@ -160,11 +189,10 @@ def parse_channel(self, response: Response) -> Request: def request_playlist(self, playlist_id: str, meta: dict) -> Request: part = ["snippet"] # see: https://developers.google.com/youtube/v3/docs/playlists - request_url = ( - "https://www.googleapis.com/youtube/v3/playlists" - + "?part={}&id={}&key={}".format( - "%2C".join(part), playlist_id, env.get("YOUTUBE_API_KEY"), - ) + request_url = "https://www.googleapis.com/youtube/v3/playlists" + "?part={}&id={}&key={}".format( + "%2C".join(part), + playlist_id, + env.get("YOUTUBE_API_KEY"), ) return Request(request_url, meta=meta, callback=self.parse_playlist) @@ -178,11 +206,10 @@ def parse_playlist(self, response: Response): def request_playlist_items(self, playlist_id: str, meta: dict) -> Request: part = ["snippet"] # see: https://developers.google.com/youtube/v3/docs/playlistItems - request_url = ( - "https://www.googleapis.com/youtube/v3/playlistItems" - + "?part={}&playlistId={}&key={}".format( - "%2C".join(part), playlist_id, env.get("YOUTUBE_API_KEY"), - ) + request_url = "https://www.googleapis.com/youtube/v3/playlistItems" + "?part={}&playlistId={}&key={}".format( + "%2C".join(part), + playlist_id, + env.get("YOUTUBE_API_KEY"), ) return Request(request_url, meta=meta, callback=self.parse_playlist_items) @@ -192,21 +219,16 @@ def parse_playlist_items(self, response: Response): ids = [item["snippet"]["resourceId"]["videoId"] for item in body["items"]] yield self.request_videos(ids, response.meta) if "nextPageToken" in body: - request_url = YoutubeSpider.update_url_query( - response.url, {"pageToken": body["nextPageToken"]} - ) - yield response.follow( - request_url, meta=response.meta, callback=self.parse_playlist_items - ) + request_url = YoutubeSpider.update_url_query(response.url, {"pageToken": body["nextPageToken"]}) + yield response.follow(request_url, meta=response.meta, callback=self.parse_playlist_items) def request_videos(self, ids: List[str], meta: dict): part = ["snippet", "status", "contentDetails"] # see: https://developers.google.com/youtube/v3/docs/videos - request_url = ( - "https://www.googleapis.com/youtube/v3/videos" - + "?part={}&id={}&key={}".format( - "%2C".join(part), "%2C".join(ids), env.get("YOUTUBE_API_KEY"), - ) + request_url = "https://www.googleapis.com/youtube/v3/videos" + "?part={}&id={}&key={}".format( + "%2C".join(part), + "%2C".join(ids), + env.get("YOUTUBE_API_KEY"), ) return Request(request_url, meta=meta, callback=self.parse_videos) @@ -231,7 +253,7 @@ class YoutubeLomLoader(LomBase): # The `response.meta` field is populated as follows: # - `row`: The row of the CSV file containing the channel or playlist to be scraped with some # additional information regarding all found videos. - # - `item`: Information about the video, obtained from the Youtube API. + # - `item`: Information about the video, obtained from the YouTube API. # - `channel`: Information about the YouTube channel, obtained from the YouTube API. Only # populated if an entire channel was given in the CSV row. # - `playlist`: Information about the YouTube playlist, obtained from the YouTube API. This @@ -268,22 +290,20 @@ def getBase(self, response: Response) -> items.BaseItemLoader: base = LomBase.getBase(self, response) base.add_value("origin", response.meta["row"]["sourceTitle"].strip()) base.add_value("lastModified", response.meta["item"]["snippet"]["publishedAt"]) - base.add_value("thumbnail", self.getThumbnailUrl(response)) - base.add_value("fulltext", self.getFulltext(response)) + base.add_value("thumbnail", self.get_thumbnail_url(response)) + base.add_value("fulltext", self.get_fulltext(response)) return base - def getThumbnailUrl(self, response: Response) -> str: + def get_thumbnail_url(self, response: Response) -> str: thumbnails = response.meta["item"]["snippet"]["thumbnails"] thumbnail = ( thumbnails["maxres"] if "maxres" in thumbnails - else thumbnails["standard"] - if "standard" in thumbnails - else thumbnails["high"] + else thumbnails["standard"] if "standard" in thumbnails else thumbnails["high"] ) return thumbnail["url"] - def getFulltext(self, response: Response) -> str: + def get_fulltext(self, response: Response) -> str: item = response.meta["item"]["snippet"] # If `channel` is populated, it has more relevant information than `playlist` (see comments # to `meta` field above). @@ -295,7 +315,11 @@ def getFulltext(self, response: Response) -> str: else: playlist = response.meta["playlist"]["snippet"] fulltext = "\n\n".join( - [playlist["channelTitle"], playlist["title"], playlist["description"],], + [ + playlist["channelTitle"], + playlist["title"], + playlist["description"], + ], ) return fulltext @@ -303,18 +327,14 @@ def getFulltext(self, response: Response) -> str: def getLOMGeneral(self, response: Response) -> items.LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["snippet"]["title"]) - general.add_value("description", self.getDescription(response)) - general.add_value( - "keyword", self.parse_csv_field(response.meta["row"]["keyword"]) - ) + general.add_value("description", self.get_description(response)) + general.add_value("keyword", self.parse_csv_field(response.meta["row"]["keyword"])) if "tags" in response.meta["item"]["snippet"]: general.add_value("keyword", response.meta["item"]["snippet"]["tags"]) - general.add_value( - "language", self.parse_csv_field(response.meta["row"]["language"]) - ) + general.add_value("language", self.parse_csv_field(response.meta["row"]["language"])) return general - def getDescription(self, response: Response) -> str: + def get_description(self, response: Response) -> str: return ( response.meta["item"]["snippet"]["description"] # Fall back to playlist title when no description was given. @@ -325,26 +345,16 @@ def getDescription(self, response: Response) -> str: def getLOMTechnical(self, response: Response) -> items.LomTechnicalItemLoader: technical = LomBase.getLOMTechnical(self, response) technical.add_value("format", "text/html") - technical.add_value( - "location", YoutubeSpider.get_video_url(response.meta["item"]) - ) - technical.add_value( - "duration", response.meta["item"]["contentDetails"]["duration"] - ) + technical.add_value("location", YoutubeSpider.get_video_url(response.meta["item"])) + technical.add_value("duration", response.meta["item"]["contentDetails"]["duration"]) return technical @overrides # LomBase def getLOMEducational(self, response): educational = LomBase.getLOMEducational(self, response) tar = items.LomAgeRangeItemLoader() - tar.add_value( - "fromRange", - self.parse_csv_field(response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_FROM]) - ) - tar.add_value( - "toRange", - self.parse_csv_field(response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO]) - ) + tar.add_value("fromRange", self.parse_csv_field(response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_FROM])) + tar.add_value("toRange", self.parse_csv_field(response.meta["row"][CSVBase.COLUMN_TYPICAL_AGE_RANGE_TO])) educational.add_value("typicalAgeRange", tar.load_item()) return educational @@ -352,17 +362,15 @@ def getLOMEducational(self, response): def getLOMLifecycle(self, response: Response) -> items.LomLifecycleItemloader: lifecycle = LomBase.getLOMLifecycle(self, response) lifecycle.add_value("role", "author") - lifecycle.add_value( - "organization", response.meta["item"]["snippet"]["channelTitle"] - ) - lifecycle.add_value("url", self.getChannelUrl(response)) + lifecycle.add_value("organization", response.meta["item"]["snippet"]["channelTitle"]) + lifecycle.add_value("url", self.get_channel_url(response)) yield lifecycle lifecycle = LomBase.getLOMLifecycle(self, response) lifecycle.add_value("role", "publisher") lifecycle.add_value("date", response.meta["item"]["snippet"]["publishedAt"]) yield lifecycle - def getChannelUrl(self, response: Response) -> str: + def get_channel_url(self, response: Response) -> str: channel_id = response.meta["item"]["snippet"]["channelId"] return "https://www.youtube.com/channel/{}".format(channel_id) @@ -372,9 +380,7 @@ def getLicense(self, response: Response) -> items.LicenseItemLoader: # there are only two possible values according to https://developers.google.com/youtube/v3/docs/videos: # "youtube", "creativeCommon" if response.meta["item"]["status"]["license"] == "creativeCommon": - license_loader.add_value( - "url", Constants.LICENSE_CC_BY_30 - ) + license_loader.add_value("url", Constants.LICENSE_CC_BY_30) elif response.meta["item"]["status"]["license"] == "youtube": license_loader.replace_value("internal", Constants.LICENSE_CUSTOM) license_loader.add_value("description", "Youtube-Standardlizenz") @@ -387,14 +393,17 @@ def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) row = response.meta["row"] valuespaces.add_value( - "learningResourceType", self.parse_csv_field(row["learningResourceType"]), + "learningResourceType", + self.parse_csv_field(row["learningResourceType"]), ) valuespaces.add_value("discipline", self.parse_csv_field(row["discipline"])) valuespaces.add_value( - "intendedEndUserRole", self.parse_csv_field(row["intendedEndUserRole"]), + "intendedEndUserRole", + self.parse_csv_field(row["intendedEndUserRole"]), ) valuespaces.add_value( - "educationalContext", self.parse_csv_field(row[CSVBase.COLUMN_EDUCATIONAL_CONTEXT]), + "educationalContext", + self.parse_csv_field(row[CSVBase.COLUMN_EDUCATIONAL_CONTEXT]), ) if "fskRating" in response.meta["item"]["contentDetails"]: # the majority of videos doesn't have a fskRating, but if they do, we try to map the YT values to our vocab: From c6bd7172aa119ca7f944c08fc875b9d27d58d8dd Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 22 Mar 2024 18:06:23 +0100 Subject: [PATCH 465/590] add YouTube channel suggestions from ITSJOINTLY-1323 to youtube.csv - added channels: "Sehen & Verstehen - Experimente und meeehr", "MathemaTrick", "Christian Spannagel" - fix: change custom_url from YT channel "Sehen & Verstehen" to "YouTube Handle"-URL -- the custom URL format "https://www.youtube.com/c/sehenverstehenexperimenteundmeeehr/" is no longer supported by our YouTube crawler --- by clicking on "Home" / "Videos" once within a browser, YouTube redirects to the new "YouTube Handle" URL: https://www.youtube.com/@Unkauf_MC --- csv/youtube.csv | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/csv/youtube.csv b/csv/youtube.csv index 38f4ad5c..9ea82cde 100755 --- a/csv/youtube.csv +++ b/csv/youtube.csv @@ -61,6 +61,7 @@ https://www.youtube.com/channel/UComfd9z6KFVP3nggiME6-7w/featured,German as a Fo https://www.youtube.com/channel/UC7mZyCH5ppdYdJrHuxjJtkw/featured,Educational Robotics,video,"320, 04005",,teacher; learner,16,99,de; en,, ,,,,,,,,,, ,,,,,,,,,, +,,,,,,,,,, Playlists,,,,,,,,,, https://www.youtube.com/playlist?list=PLC9D2mzTyJeXYa6E1y_d0fc_7-V7BJnSq,DigiFernunterricht,video,720,,teacher,18,99,de,, https://www.youtube.com/playlist?list=PLFhPjADeGDodbVSSL8LE00SNjQIPiyamr,Webinare Deutsches Lehrkräfteforum,video,720,,teacher,18,99,de,, @@ -205,4 +206,8 @@ https://www.youtube.com/channel/UCKjJ1nCoMFTHzQlUtHHBBsw,Akademie für Lerncoach https://www.youtube.com/channel/UCFSS2FtaFNKMei4jGQOVL3w,Chemie und Bio in der Schule,video,100; 080,Sekundarstufe 1; Sekundarstufe 2,"learner, teacher",,,de,, https://www.youtube.com/channel/UCk0aUAhu9RxfOX1iMXAJ-2g,Chemistry Kicksass,video,100,Sekundarstufe 1; Sekundarstufe 2,learner,,,de,, https://www.youtube.com/channel/UCWNvo3l-K-X6CPSBcP9NCNg,Chemie - simpleclub,video,100,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,,,de,, -https://www.youtube.com/channel/UC1a400owZ_Qa-3Ood22cMKg,Ecole Science,video,460,Sekundarstufe 1; Sekundarstufe 2,teacher; learner,10,99,de,, \ No newline at end of file +https://www.youtube.com/channel/UC1a400owZ_Qa-3Ood22cMKg,Ecole Science,video,460,Sekundarstufe 1; Sekundarstufe 2,teacher; learner,10,99,de,, +,,,,,,,,,, +https://www.youtube.com/@Unkauf_MC,Sehen & Verstehen - Experimente und meeehr,video,100; 04003; 080; 460,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,99,de,,15.03.2024 +https://www.youtube.com/@MathemaTrick,MathemaTrick,video,380,Sekundarstufe 1; Sekundarstufe 2,learner; teacher; parent,10,99,de,,15.03.2024 +https://www.youtube.com/@pharithmetik,Christian Spannagel,video,380,Sekundarstufe 1; Sekundarstufe 2,learner; teacher,10,99,de,,15.03.2024 \ No newline at end of file From 78a3b4bce01b1fd8302ce10a44f3b7388e018021 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 22 Mar 2024 20:35:55 +0100 Subject: [PATCH 466/590] youtube_spider v0.2.3 ("YouTube Handle" URLs) - feat: reworked the "request_row()"-method to enable parsing of the "YouTube Handle" URL format -- see: https://support.google.com/youtube/answer/6180214?hl=en&sjid=8649083492401077263-EU and https://support.google.com/youtube/answer/11585688?hl=en&sjid=1154139518236355177-EU - change/remove: the previous "parse_custom_url()"-method relied on a HTTP response body that is no longer (reliably) available, causing crawls to fail silently -- observing youtube_spider in the debugger showed that YouTube redirected our HTTP Requests for custom URLs to a data protection / cookie consent pre-page, which does not contain the necessary channel_id information (which was REQUIRED for subsequent requests) -- before adding custom URLs to csv/youtube.csv always make sure that a "YouTube Handle" URL is used instead! (The crawler will throw a warning if a custom URL is detected that couldn't be handled) - style: fix whitespace in logging message --- converter/spiders/youtube_spider.py | 104 +++++++++++++++++++--------- 1 file changed, 70 insertions(+), 34 deletions(-) diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index 0bebb67b..971eb3f2 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -24,11 +24,6 @@ # TODO: Find out whether `publishedAt` reflects modification # - Find another way to set `hash` if not # -# ToDo: "YouTube Handle" URLs: update URL parsing and implement handle-specific channel request parameter ('forHandle') -# - see: https://support.google.com/youtube/answer/6180214 -# (Example: "youtube.com/@youtubecreators" requires different API parameters when querying "Channels: list"-endpoint -# https://developers-dot-devsite-v2-prod.appspot.com/youtube/v3/docs/channels/list ) -# # ToDo: YouTube API - Captions for fulltext extraction # - see: https://developers.google.com/youtube/v3/docs/captions # - PREREQUISITE: YouTube's "captions.download"-method REQUIRES OAuth 2.0 authentication (!!!) @@ -59,7 +54,7 @@ class YoutubeSpider(Spider): name = "youtube_spider" friendlyName = "Youtube" url = "https://www.youtube.com/" - version = "0.2.2" # last update: 2022-12-15 + version = "0.2.3" # last update: 2022-04-09 @staticmethod def get_video_url(item: dict) -> str: @@ -123,7 +118,7 @@ def start_requests(self): # the "real" target might provide some Quality of Life while using this feature. match_found = True logging.debug( - f"Match found in 'csv/youtube.csv' for {singular_crawl_target_url}! Commencing" + f"Match found in 'csv/youtube.csv' for {singular_crawl_target_url}! Commencing " f"SINGULAR crawl process." ) request = self.request_row(row) @@ -147,26 +142,60 @@ def start_requests(self): def request_row(self, row: dict) -> Request: if row["url"].startswith("https://www.youtube.com"): - url = urlparse(row["url"]) - if url.path == "/playlist": - playlist_id = dict(parse_qsl(url.query))["list"] - return self.request_playlist(playlist_id, meta={"row": row}) - elif url.path.startswith("/channel/"): - channel_id = url.path.split("/")[2] - return self.request_channel(channel_id, meta={"row": row}) + # There can be several types of YouTube URLs which might require different query parameters in + # subsequent requests. + # (Legacy) Username URLs: + # - https://www.youtube.com/user/ + # - https://www.youtube.com/ + # (New) YouTube Handle URLs: + # - https://www.youtube.com/@ + # YouTube also offered custom URLs to (popular) channels in two different forms in the past. + # Both of these custom URL patterns are considered legacy URLs + # and automatically redirect to YouTube Handles as of 2024. + # 1) Custom channel URLs: + # - https://www.youtube.com/c/ + # 2) Custom Channel Names: + # - https://www.youtube.com/ + yt_url_pattern = re.compile( + r"""youtube.com/""" + r"""((?P@?[a-zA-Z._-]{3,30}$)""" + r"""|playlist\?list=(?P[\w_-]+$)""" + r"""|c/(?P[\w_-]+)(/featured)?/$""" + r"""|channel/(?P[\w_-]+)(?:/featured)?$)""" + ) + re_match: re.Match = yt_url_pattern.search(row["url"]) + if re_match: + # see: https://support.google.com/youtube/answer/6180214?hl=en&sjid=8649083492401077263-EU + re_match_dict: dict = re_match.groupdict() + if "channel_id" in re_match_dict and re_match_dict["channel_id"]: + channel_id: str = re_match_dict["channel_id"] + return self.request_channel(channel_id, meta={"row": row}) + if "playlist_id" in re_match_dict and re_match_dict["playlist_id"]: + playlist_id = re_match_dict["playlist_id"] + return self.request_playlist(playlist_id, meta={"row": row}) + if "handle_url" in re_match_dict and re_match_dict["handle_url"]: + yt_handle: str = re_match_dict["handle_url"] + return self.request_channel_by_handle(yt_handle=yt_handle, meta={"row": row}) + if "custom_url" in re_match_dict and re_match_dict["custom_url"]: + # As of 2024-02 we cannot resolve channel_ids by directly querying a custom_url anymore. + # (YouTube automatically redirects to a Cookie Consent Banner, + # causing response.text to not have the necessary channel_id information). + # see: https://support.google.com/youtube/answer/2657968 + self.logger.warning( + f"Failed to extract channel_id because a YouTube custom URL detected! " + f"Please update the .csv entry for {row['url']} to point towards a valid " + f"YouTube handle instead." + ) + # We cannot query custom-URLs reliably anymore -> these need to be converted by hand: + # 1) open the custom URL in your browser + # 2) click on "Home" or "Videos" + # 3) Copypaste the URL which should now be in the format "https://www.youtube.com/@" to + # the YouTube table (Google Docs) and export it to csv/youtube.csv else: - # YouTube offers custom URLs to popular channels of the form - # - https://www.youtube.com/c/ - # - https://www.youtube.com/ - # - https://www.youtube.com/user/ - # - https://www.youtube.com/ - # - # All of these lead to an ordinary channel, but we need to read its ID from the page - # body. - return Request( - row["url"], - meta={"row": row}, - callback=self.parse_custom_url, + self.logger.debug( + f"Failed to RegEx parse URL {row['url']} . " + f"(Please check if the RegEx URL pattern needs an update in the " + f"'request_row()'-method!)" ) def request_channel(self, channel_id: str, meta: dict) -> Request: @@ -177,6 +206,21 @@ def request_channel(self, channel_id: str, meta: dict) -> Request: ) return Request(url=request_url, meta=meta, callback=self.parse_channel) + def request_channel_by_handle(self, yt_handle: str, meta: dict) -> Request: + # see: https://developers-dot-devsite-v2-prod.appspot.com/youtube/v3/docs/channels/list + # -> use the 'forHandle'-query-parameter to retrieve channel information from a YouTube Handle. + # Attention: YouTube Handles and YouTube usernames are two completely different things! + # see: https://support.google.com/youtube/answer/11585688?hl=en&sjid=1154139518236355177-EU + + api_url: str = "https://www.googleapis.com/youtube/v3/channels" + query_param_part: str = "snippet,contentDetails,statistics" + request_url: str = ( + f"{api_url}?part={query_param_part}" + f"&forHandle={yt_handle}" + f"&key={env.get('YOUTUBE_API_KEY', allow_null=False)}" + ) + return Request(url=request_url, meta=meta, callback=self.parse_channel) + def parse_channel(self, response: Response) -> Request: body = json.loads(response.body) assert body["kind"] == "youtube#channelListResponse" @@ -240,14 +284,6 @@ async def parse_videos(self, response: Response): response_copy.meta["item"] = item yield await self.lomLoader.parse(response_copy) - def parse_custom_url(self, response: Response) -> Request: - match = re.search('', response.text) - if match is not None: - channel_id = match.group(1) - return self.request_channel(channel_id, meta=response.meta) - else: - logging.warning("Could not extract channel id for {}".format(response.url)) - class YoutubeLomLoader(LomBase): # The `response.meta` field is populated as follows: From a33b88a1d28bdbc56dedb2252af7b81a2d4c0a47 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 10 Apr 2024 10:00:32 +0200 Subject: [PATCH 467/590] disable "robots.txt" parsing for youtube_spider - this fixes a UnicodeDecodeError thrown by Scrapy's "robots.txt"-parser when trying to download the robots.txt file from YouTube's image host ("i.ytimg.com") at the start of a crawl process --- converter/spiders/youtube_spider.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index 971eb3f2..2f3ff275 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -54,7 +54,10 @@ class YoutubeSpider(Spider): name = "youtube_spider" friendlyName = "Youtube" url = "https://www.youtube.com/" - version = "0.2.3" # last update: 2022-04-09 + version = "0.2.3" # last update: 2022-04-10 + custom_settings = { + "ROBOTSTXT_OBEY": False + } @staticmethod def get_video_url(item: dict) -> str: From b57aa5231463430b5c4f64599c7a8bc7e99f9225 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 10 Apr 2024 13:08:19 +0200 Subject: [PATCH 468/590] bpb_spider v0.2.3 - fix: breadcrumbs-title-fallback omitted the last word of the breadcrumbs list -- title strings assembled from the breadcrumbs list were missing the last word ("Glossar", "Links" etc.) because the last breadcrumbs item uses a different CSS class than the rest of the strings - decrease log level from "getId()"-method from 'warning' to 'debug' -- (lots of items do not provide a stable ID -> throwing a warning for each of them is too spammy in the Kubernetes logs) --- converter/spiders/bpb_spider.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/converter/spiders/bpb_spider.py b/converter/spiders/bpb_spider.py index 945456fa..b79032a8 100644 --- a/converter/spiders/bpb_spider.py +++ b/converter/spiders/bpb_spider.py @@ -49,7 +49,7 @@ class BpbSpider(scrapy.Spider, LomBase): "/veranstaltungen/", # ToDo: implement custom handling for events in a future version ] deny_list_endswith: list[str] = ["/impressum", "/kontakt", "/redaktion"] - version = "0.2.2" # last update: 2024-04-09 + version = "0.2.3" # last update: 2024-04-10 # (first version of the crawler after bpb.de completely relaunched their website in 2022-02) custom_settings = { "WEB_TOOLS": WebEngine.Playwright, @@ -204,7 +204,7 @@ def getId(self, response: Response = None, json_lds: list[dict] = None, opengrap if item_url: return item_url elif response: - self.logger.warning( + self.logger.debug( f"Item {response.url} did not provide a stable ID (url). Falling back to response.url ..." ) return response.url @@ -365,11 +365,20 @@ async def parse(self, response: Response, **kwargs: Any) -> Any: # There are edge-cases where the title is too ambiguous to be useful for an end-user. # If we encounter such "useless" titles, # we'll try to use the breadcrumbs as a fallback and build a string - breadcrumbs_raw: list[str] = response.xpath( - "//nav[@class='breadcrumbs']//li[" "@class='breadcrumbs__item']//a/text()" + breadcrumbs_clickable_raw: list[str] | None = response.xpath( + "//nav[@class='breadcrumbs']//li[@class='breadcrumbs__item']//a/text()" ).getall() - if breadcrumbs_raw and isinstance(breadcrumbs_raw, list): - breadcrumbs_title: str = " > ".join(breadcrumbs_raw) + # clickable breadcrumbs items have a different CSS class than the last word and need to be + # extracted separately + breadcrumbs_last_word: str | None = response.xpath( + "//ol[@class='breadcrumbs__list']//li[last()]//*[last()]/text()" + ).get() + if breadcrumbs_clickable_raw and isinstance(breadcrumbs_clickable_raw, list): + breadcrumbs_title: str = " > ".join(breadcrumbs_clickable_raw) + if breadcrumbs_last_word and isinstance(breadcrumbs_last_word, str): + # assemble the final string by appending the last, non-clickable word, e.g.: + # "Themen > Politik > ... > Glossar" + breadcrumbs_title = f"{breadcrumbs_title} > {breadcrumbs_last_word}" general_itemloader.replace_value("title", breadcrumbs_title) keywords: list[str] | None = self.get_keywords(response=response, json_lds=json_lds) From dd4502dd3388e9ade52f4dc30bbf2eec4b3e8b13 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Apr 2024 13:48:35 +0200 Subject: [PATCH 469/590] Fix SkoHub "altLabel" processing in pipelines.py - the most recent SkoHub update (see: https://github.com/openeduhub/oeh-metadata-vocabs/pull/65) introduced some changes in the generated vocab.json files: - "altLabel" values are no longer strings only, but a list[str] for each language - the "educationalContext"-vocab was one of those cases where calling .casefold() resulted in Errors because the "altLabel"-value wasn't a string object, but a list object --- converter/pipelines.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 30f0b98c..c2e151b0 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -319,12 +319,20 @@ def process_item(self, raw_item, spider): mapped = [] for entry in json[key]: _id = {} - valuespace = self.valuespaces.data[key] + valuespace: list[dict] = self.valuespaces.data[key] found = False for v in valuespace: labels = list(v["prefLabel"].values()) if "altLabel" in v: - labels = labels + list(v["altLabel"].values()) + # the Skohub update on 2024-04-19 generates altLabels as a list[str] per language ("de", "en) + # (for details, see: https://github.com/openeduhub/oeh-metadata-vocabs/pull/65) + alt_labels: list[list[str]] = list(v["altLabel"].values()) + if alt_labels and isinstance(alt_labels, list): + for alt_label in alt_labels: + if alt_label and isinstance(alt_label, list): + labels.extend(alt_label) + if alt_label and isinstance(alt_label, str): + labels.append(alt_label) labels = list(map(lambda x: x.casefold(), labels)) if v["id"].endswith(entry) or entry.casefold() in labels: _id = v["id"] From de747586d513bfa270f8f0255db83cb0da0bd627 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Apr 2024 16:04:39 +0200 Subject: [PATCH 470/590] logging: log transformed item for easier debugging in "edu-sharing"-mode - to increase the ease of use of the "edu-sharing"-crawling-mode (= when the EduSharingStorePipeline is active and es_connector.py is called), the es_connector will log the transformed item before it gets uploaded - this should make debugging while the "edu-sharing"-mode is active a bit easier to understand --- converter/es_connector.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 3ca0700b..759d535a 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -2,6 +2,7 @@ import base64 import json import logging +import pprint import time import uuid from asyncio import Semaphore @@ -553,6 +554,7 @@ def transform_item(self, uuid, spider, item): if not type(spaces[key]) is list: spaces[key] = [spaces[key]] + log.debug(f"Transformed item:\n{pprint.pformat(spaces)}") return spaces def create_groups_if_not_exists(self, groups, type: CreateGroupType): From 2c97de1caba33209a0f58f2de9c03da880d6ea94 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 16 Apr 2024 18:44:12 +0200 Subject: [PATCH 471/590] oersi_spider v0.1.9 - feat: BIRD-related PoC implementation of "vhb" metadata extraction (directly from the source, in this case: vhb) -- feat: extract "language" metadata for vhb courses - change: move ".load_item()" methods to the end of "parse()" because we need to access the ItemLoaders within "vhb"-specific method calls - fix: check "creator.id" for 'null'-Type before trying to parse identifier URL in "lifecycle_determine_type_of_identifier_url"-method - style: code formatting / code cleanup --- converter/spiders/oersi_spider.py | 97 ++++++++++++++++++++++++++----- 1 file changed, 81 insertions(+), 16 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 68e57094..229dcf3d 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -38,7 +38,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.8" # last update: 2023-12-20 + version = "0.1.9" # last update: 2024-04-16 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -122,6 +122,8 @@ class OersiSpider(scrapy.Spider, LomBase): # "peerTutor": "", # ToDo: find mapping # "professional": "", # ToDo: find mapping } + # BIRD-related: "vhb" response dict (from https://open.vhb.org/oersi.json) + vhb_oersi_json: dict | None = None def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -259,6 +261,12 @@ def elastic_fetch_all_provider_pages(self): f"Recognized multiple providers within OERSI_METADATA_PROVIDER .env setting:" f"{provider_list}" ) self.ELASTIC_PROVIDERS_TO_CRAWL = provider_list + if "vhb" in self.ELASTIC_PROVIDERS_TO_CRAWL: + # experimental BIRD-Hook for "vhb"-courses! + # ToDo: refactor this implementation into its own (sub-)class ASAP! + # (WARNING: This PoC will not scale well for over >50 Metadata-Providers within OERSI + # and REQUIRES a separate infrastructure!) + self.fetch_vhb_data() has_next_page = True for provider_name in self.ELASTIC_PROVIDERS_TO_CRAWL: @@ -278,9 +286,14 @@ def elastic_fetch_all_provider_pages(self): total_count = current_page_json_response.get("hits").get("total").get("value") logging.debug(f"Expecting {total_count} items for the current API Pagination of {provider_name}") if "hits" in current_page_json_response.get("hits"): - provider_items: list = current_page_json_response.get("hits").get("hits") + provider_items: list[dict] = current_page_json_response.get("hits").get("hits") if provider_items: logging.debug(f"The provider_items list has {len(provider_items)} entries") + for provider_item in provider_items: + # we need to keep track of the metadata provider because the ElasticSearch query parameter + # will oftentimes NOT be the same string that we receive as the provider metadata value + # from "mainEntityOfPage.provider.name" + provider_item.update({"OERSI_QUERY_PROVIDER_NAME": provider_name}) all_items.extend(provider_items) last_entry: dict = provider_items[-1] # ToDo: pagination documentation @@ -300,6 +313,20 @@ def elastic_fetch_all_provider_pages(self): break return all_items + def fetch_vhb_data(self): + vhb_response: requests.Response = requests.get(url="https://open.vhb.org/oersi.json") + self.logger.info(f"BIRD: Fetching 'course'-data from vhb: {vhb_response.url} ...") + vhb_response_dict: dict = vhb_response.json() + if vhb_response_dict and isinstance(vhb_response_dict, dict): + if "data" in vhb_response_dict: + vhb_course_items = vhb_response_dict["data"] + self.logger.info( + f"BIRD: Successfully retrieved {len(vhb_course_items)} items " f"from {vhb_response.url} ." + ) + self.vhb_oersi_json = vhb_response_dict + else: + logging.warning(f"BIRD: Failed to retrieve 'course'-data from 'vhb' sourceOrganization.") + def getId(self, response=None, elastic_item: dict = dict) -> str: """ Uses OERSI's ElasticSearch "_id"-field to collect an uuid. See: @@ -734,7 +761,8 @@ def lifecycle_determine_type_of_identifier_and_save_uri( saved to an identifier-specific field of LomLifecycleItemLoader. If the URI string of "id" could not be recognized, it will save the value to 'lifecycle.url' as a fallback. """ - if "id" in item_dictionary: + if "id" in item_dictionary and isinstance(item_dictionary["id"], str): + # "creator.id" can be 'null', therefore we need to explicitly check its type before trying to parse it uri_string: str = item_dictionary.get("id") if ( "orcid.org" in uri_string @@ -833,8 +861,10 @@ def parse(self, response=None, **kwargs): if thumbnail_url: base.add_value("thumbnail", thumbnail_url) except KeyError: - logging.debug(f"OERSI Item {elastic_item['_id']} " - f"(name: {elastic_item_source['name']}) did not provide a thumbnail.") + logging.debug( + f"OERSI Item {elastic_item['_id']} " + f"(name: {elastic_item_source['name']}) did not provide a thumbnail." + ) if "image" in elastic_item_source: thumbnail_url = elastic_item_source.get("image") # thumbnail if thumbnail_url: @@ -861,9 +891,6 @@ def parse(self, response=None, **kwargs): for language_value in in_languages: general.add_value("language", language_value) - # noinspection DuplicatedCode - lom.add_value("general", general.load_item()) - technical = LomTechnicalItemLoader() try: identifier_url: str = self.get_item_url(elastic_item=elastic_item) @@ -875,7 +902,6 @@ def parse(self, response=None, **kwargs): if identifier_url: general.replace_value("identifier", identifier_url) technical.add_value("location", identifier_url) - lom.add_value("technical", technical.load_item()) organizations_from_affiliation_fields: set[str] = set() # this (temporary) set of strings is used to make a decision for OERSI's "sourceOrganization" field: @@ -926,13 +952,8 @@ def parse(self, response=None, **kwargs): if in_languages: for language_value in in_languages: educational.add_value("language", language_value) - # noinspection DuplicatedCode - lom.add_value("educational", educational.load_item()) classification = LomClassificationItemLoader() - lom.add_value("classification", classification.load_item()) - - base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() vs.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) @@ -1094,8 +1115,6 @@ def parse(self, response=None, **kwargs): audience_key = self.MAPPING_AUDIENCE_TO_INTENDED_END_USER_ROLE.get(audience_key) vs.add_value("intendedEndUserRole", audience_key) - base.add_value("valuespaces", vs.load_item()) - license_loader = LicenseItemLoader() if "license" in elastic_item_source: license_url: str = elastic_item_source.get("license").get("id") @@ -1104,7 +1123,53 @@ def parse(self, response=None, **kwargs): license_url_mapped = license_mapper.get_license_url(license_string=license_url) if license_url_mapped: license_loader.add_value("url", license_url_mapped) + + if "OERSI_QUERY_PROVIDER_NAME" in elastic_item: + # BIRD-related requirement: merge item with additional metadata retrieved directly from the source + if elastic_item["OERSI_QUERY_PROVIDER_NAME"]: + # checking if the "metadata provider name" that was used for the ElasticSearch query needs to be handled + query_parameter_provider_name: str = elastic_item["OERSI_QUERY_PROVIDER_NAME"] + if query_parameter_provider_name and query_parameter_provider_name == "vhb": + if self.vhb_oersi_json: + if "data" in self.vhb_oersi_json: + try: + vhb_items: list[dict] = self.vhb_oersi_json["data"] + vhb_item_matched: dict | None = None + for vhb_item in vhb_items: + # since the vhb_item has a different "id", the only way to match the OERSI item + # against the vhb item is by comparing their URLs: + vhb_course_url: str = vhb_item["attributes"]["url"] + if vhb_course_url and vhb_course_url == identifier_url: + self.logger.debug( + f"BIRD: Matched 'vhb'-item {vhb_course_url} with OERSI " + f"ElasticSearch item {elastic_item['_id']}!" + ) + vhb_item_matched = vhb_item + if vhb_item_matched: + # if we found a match, we're now trying to enrich the item with metadata from both + # sources + if "attributes" in vhb_item_matched: + if not in_languages and "languages" in vhb_item_matched["attributes"]: + # beware: the vhb 'languages'-property is a string value! + vhb_language: str | None = vhb_item_matched["attributes"]["languages"] + if vhb_language and isinstance(vhb_language, str): + general.add_value("language", vhb_language) + elif vhb_language: + self.logger.warning( + f"Received unexpected vhb 'languages'-type! " + f"(Type: {type(vhb_language)}" + ) + # ToDo: vhb "workload" / "learningObjectives" next? + except KeyError as ke: + raise ke + # noinspection DuplicatedCode + lom.add_value("general", general.load_item()) + lom.add_value("technical", technical.load_item()) + lom.add_value("educational", educational.load_item()) + lom.add_value("classification", classification.load_item()) + base.add_value("lom", lom.load_item()) + base.add_value("valuespaces", vs.load_item()) base.add_value("license", license_loader.load_item()) permissions = super().getPermissions(response) From b0094b142666cb17c673884bdb6e4a503eca97b9 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 18 Apr 2024 18:34:49 +0200 Subject: [PATCH 472/590] add 3 "CourseItem" properties to data model (work-in-progress) - implements the following BIRD-related properties for courses: - 'course_duration' - 'course_learningoutcome' - 'course_workload' --- converter/es_connector.py | 21 ++++++++++++++++----- converter/items.py | 21 +++++++++++++++++++++ 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 759d535a..fc81dea1 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -535,11 +535,22 @@ def transform_item(self, uuid, spider, item): if "toRange" in tar: spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] - # intendedEndUserRole = Field(output_processor=JoinMultivalues()) - # discipline = Field(output_processor=JoinMultivalues()) - # educationalContext = Field(output_processor=JoinMultivalues()) - # learningResourceType = Field(output_processor=JoinMultivalues()) - # sourceContentType = Field(output_processor=JoinMultivalues()) + if "course" in item: + # ToDo: activate these fields AFTER confirming that the edu-sharing properties are correct + # ToDo: implement a CourseItemPipeline in pipelines.py BEFORE activating these fields! + if "course_duration" in item["course"]: + # ToDo + # spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] + pass + if "course_learningoutcome" in item["course"]: + # ToDo + # spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] + pass + if "course_workload" in item["course"]: + # ToDo: which edu-sharing property should be used for workload per week? (and: which time unit?) + pass + pass + mdsId = env.get("EDU_SHARING_METADATASET", allow_null=True, default="mds_oeh") if mdsId != "default": spaces["cm:edu_metadataset"] = mdsId diff --git a/converter/items.py b/converter/items.py index ea0d08eb..880cbe40 100644 --- a/converter/items.py +++ b/converter/items.py @@ -319,6 +319,21 @@ class PermissionItem(Item): """Determines if this item should be 'public' (= accessible by anyone)""" +class CourseItem(Item): + """ + BIRD-specific metadata properties intended only for courses. + """ + course_duration = Field() + # ToDo: edu-sharing expects the course duration in seconds (as long as 'cclom:typicallearningtime' is used!) + """Corresponding edu-sharing property: 'cclom:typicallearningtime'""" + course_learningoutcome = Field() + """Describes "Lernergebnisse" or "learning objectives". (Expects a string, with or without HTML-formatting!) + Corresponding edu-sharing property: 'ccm:learninggoal'""" + course_workload = Field() + """Describes the workload per week.""" + # ToDo: confirm where "workload" values should be saved within edu-sharing + + class BaseItem(Item): """ BaseItem provides the basic data structure for any crawled item. @@ -339,6 +354,7 @@ class BaseItem(Item): """Binary data which should be uploaded to edu-sharing (= raw data, e.g. ".pdf"-files).""" collection = Field(output_processor=JoinMultivalues()) """id of edu-sharing collections this entry should be placed into""" + course = Field(serializer=CourseItem) custom = Field() """A field for custom data which can be used by the target transformer to store data in the native format (i.e. 'ccm:'/'cclom:'-properties in edu-sharing).""" @@ -398,6 +414,11 @@ class BaseItemLoader(ItemLoader): default_output_processor = TakeFirst() +class CourseItemLoader(ItemLoader): + default_item_class = CourseItem + default_output_processor = TakeFirst() + + class MutlilangItemLoader(ItemLoader): default_item_class = MutlilangItem default_output_processor = TakeFirst() From 90c6351cf17ca30f0f44944f34aca85a897afa08 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 18 Apr 2024 18:36:50 +0200 Subject: [PATCH 473/590] oersi_spider v0.2.0 (work-in-progress) - change: getHash() uses 'v' as a separator between time and crawler version to make reading its values (in the front-end or via Kibana) easier legible - implements the following BIRD-related properties: - "course_learningoutcome" - "course_url_video" - "course_description_short" new metadata properties for "vhb" course items: - feat: fetch "vhb"-metadata from MOOCHub for - VHB "abstract" (-> "CourseItem.course_description_short"): MOOCHub spec v2.x contained short descriptions of courses - (attention: "abstract" is not part of MOOCHub v3 and might become unavailable in the future!) - VHB "video.url" (-> "CourseItem.course_url_video"): contains an URL of a course teaser-/trailer-video - VHB "learningObjectives" (-> "CourseItem.course_learningoutcome"): contains a string describing the learning objectives of a course - currently inactive metadata properties (until the correct mapping edu-sharing can be confirmed): - 'course_duration' - 'course_workload' --- converter/spiders/oersi_spider.py | 139 ++++++++++++++++++++++++++---- 1 file changed, 120 insertions(+), 19 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 229dcf3d..5a7e3a78 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -1,6 +1,7 @@ import datetime import logging import random +import re from typing import Optional import requests @@ -20,6 +21,7 @@ ValuespaceItemLoader, LicenseItemLoader, ResponseItemLoader, + CourseItemLoader, ) from converter.spiders.base_classes import LomBase from converter.util.license_mapper import LicenseMapper @@ -38,7 +40,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.1.9" # last update: 2024-04-16 + version = "0.2.0" # last update: 2024-04-16 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -349,11 +351,11 @@ def getHash(self, response=None, elastic_item_source: dict = dict) -> str: if "dateCreated" in elastic_item_source: date_created: str = elastic_item_source["dateCreated"] if date_published: - hash_temp: str = f"{date_published}{self.version}" + hash_temp: str = f"{date_published}v{self.version}" elif date_created: - hash_temp: str = f"{date_created}{self.version}" + hash_temp: str = f"{date_created}v{self.version}" else: - hash_temp: str = f"{datetime.datetime.now().isoformat()}{self.version}" + hash_temp: str = f"{datetime.datetime.now().isoformat()}v{self.version}" return hash_temp @staticmethod @@ -1130,6 +1132,9 @@ def parse(self, response=None, **kwargs): # checking if the "metadata provider name" that was used for the ElasticSearch query needs to be handled query_parameter_provider_name: str = elastic_item["OERSI_QUERY_PROVIDER_NAME"] if query_parameter_provider_name and query_parameter_provider_name == "vhb": + # Reminder: "VHB" (= "Virtuelle Hochschule Bayern") uses MOOCHub for their JSON export! + # The following implementation is therefore MOOCHub-specific + # and NEEDS to be refactored into a separate class hook ASAP! if self.vhb_oersi_json: if "data" in self.vhb_oersi_json: try: @@ -1145,23 +1150,119 @@ def parse(self, response=None, **kwargs): f"ElasticSearch item {elastic_item['_id']}!" ) vhb_item_matched = vhb_item - if vhb_item_matched: - # if we found a match, we're now trying to enrich the item with metadata from both - # sources - if "attributes" in vhb_item_matched: - if not in_languages and "languages" in vhb_item_matched["attributes"]: - # beware: the vhb 'languages'-property is a string value! - vhb_language: str | None = vhb_item_matched["attributes"]["languages"] - if vhb_language and isinstance(vhb_language, str): - general.add_value("language", vhb_language) - elif vhb_language: - self.logger.warning( - f"Received unexpected vhb 'languages'-type! " - f"(Type: {type(vhb_language)}" - ) - # ToDo: vhb "workload" / "learningObjectives" next? except KeyError as ke: raise ke + if vhb_item_matched: + # if we found a match, we're now trying to enrich the item with metadata from both + # sources + course_itemloader: CourseItemLoader = CourseItemLoader() + if "attributes" in vhb_item_matched: + if not in_languages and "languages" in vhb_item_matched["attributes"]: + # beware: the vhb 'languages'-property is a string value! + vhb_language: str | None = vhb_item_matched["attributes"]["languages"] + if vhb_language and isinstance(vhb_language, str): + general.add_value("language", vhb_language) + elif vhb_language: + self.logger.warning( + f"Received unexpected vhb 'languages'-type! " + f"(Type: {type(vhb_language)}" + ) + if "abstract" in vhb_item_matched["attributes"]: + vhb_abstract: str = vhb_item_matched["attributes"]["abstract"] + if vhb_abstract and isinstance(vhb_abstract, str): + course_itemloader.add_value("course_description_short", vhb_abstract) + if "video" in vhb_item_matched["attributes"]: + video_item: dict = vhb_item_matched["attributes"]["video"] + if video_item: + if "url" in video_item: + vhb_course_video_url: str = video_item["url"] + if vhb_course_video_url: + course_itemloader.add_value( + "course_url_video", vhb_course_video_url + ) + # ToDo: "video.licenses" is of type list[dict] + # each "license"-dict can have an "id"- and "url"-property + if "workload" in vhb_item_matched["attributes"]: + vhb_workload_raw: str = vhb_item_matched["attributes"]["workload"] + if vhb_workload_raw and isinstance(vhb_workload_raw, str): + # vhb "workload"-values are described as a natural lange (German) + # " "-string, e.g.: "5 Stunden" or "60 Stunden". + # Since edu-sharing expects seconds in "cclom:typicallearningtime", + # we need to parse the string and convert it to seconds. + vhb_workload: str = vhb_workload_raw.strip() + duration_pattern = re.compile( + r"""(?P\d+)\s*(?P\w*)""" + ) + # ToDo: refactor into "course duration" parser method + duration_match: re.Match | None = duration_pattern.search(vhb_workload) + duration_delta: datetime.timedelta = datetime.timedelta() + if duration_match: + duration_result = duration_match.groupdict() + if "duration_number" in duration_result: + duration_number_raw: str = duration_result["duration_number"] + duration_number: int = int(duration_number_raw) + if "duration_unit" in duration_result: + duration_unit: str = duration_result["duration_unit"] + duration_unit = duration_unit.lower() + match duration_unit: + case "sekunden": + duration_delta = duration_delta + datetime.timedelta( + seconds=duration_number + ) + case "minuten": + duration_delta = duration_delta + datetime.timedelta( + minutes=duration_number + ) + case "stunden": + duration_delta = duration_delta + datetime.timedelta( + hours=duration_number + ) + case "tage": + duration_delta = duration_delta + datetime.timedelta( + days=duration_number + ) + case "wochen": + duration_delta = duration_delta + datetime.timedelta( + weeks=duration_number + ) + case "monate": + # timedelta has no parameter for months + # -> X months = X * (4 weeks) + duration_delta = duration_delta + ( + duration_number * datetime.timedelta(weeks=4) + ) + case _: + self.logger.warning( + f"Failed to parse 'workload' time unit" + f"from vhb course: " + f"{vhb_item_matched}" + ) + if duration_delta: + # full seconds is as precise as we need to be, + # therefore we convert the seconds to int values + workload_in_seconds: int = int( + duration_delta.total_seconds() + ) + if workload_in_seconds: + # ToDo: confirm that course_duration is correct + # (BIRD "course_workload" seems to be a closer match) + # course_itemloader.add_value( + # "course_duration", workload_in_seconds + # ) + # ToDo: choose only 1 of these 2 possible properties + # course_itemloader.add_value( + # "course_workload", workload_in_seconds + # ) + pass + if "learningObjectives" in vhb_item_matched["attributes"]: + vhb_learning_objectives: str = vhb_item_matched["attributes"][ + "learningObjectives" + ] + if vhb_learning_objectives and isinstance(vhb_learning_objectives, str): + course_itemloader.add_value( + "course_learningoutcome", vhb_learning_objectives + ) + base.add_value("course", course_itemloader.load_item()) # noinspection DuplicatedCode lom.add_value("general", general.load_item()) From e31cdec2d339b5e57ab8acb6593a157e7968e8c8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Apr 2024 20:05:45 +0200 Subject: [PATCH 474/590] feat: add BIRD metadata properties "course_description_short" and "course_url_video" to CourseItem - "course_description_short": a (shorter than 'general.description') string that describes the course - can either be a raw string or HTML formatted - this property is mapped to "ccm:oeh_course_description_short" - "course_url_video": a URL pointing towards a teaser- or trailer-video of a course - this property is mapped to "ccm:oeh_course_url_video" --- converter/items.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/converter/items.py b/converter/items.py index 880cbe40..6f4e0643 100644 --- a/converter/items.py +++ b/converter/items.py @@ -332,6 +332,11 @@ class CourseItem(Item): course_workload = Field() """Describes the workload per week.""" # ToDo: confirm where "workload" values should be saved within edu-sharing + course_description_short = Field() + """Corresponding edu-sharing property: 'ccm:oeh_course_description_short'""" + course_url_video = Field() + """URL of a course-specific trailer- or teaser-video. + Corresponding edu-sharing property: 'ccm:oeh_course_url_video'""" class BaseItem(Item): From 1958a5d05af306c260f105dfa929cade6bbb4e02 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Apr 2024 20:10:14 +0200 Subject: [PATCH 475/590] feat: activate BIRD metadata properties "course_learningoutcome", "course_description_short" and "course_url_video" - activated "course_learningoutcome": describes the learning objectives of a course - this property is mapped to edu-sharing property "ccm:learninggoal" - added "course_description_short": a (shorter than 'general.description') string that describes the course - can either be a raw string or HTML formatted - this property is mapped to "ccm:oeh_course_description_short" - added "course_url_video": a URL pointing towards a teaser- or trailer-video of a course - this property is mapped to "ccm:oeh_course_url_video" --- converter/es_connector.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index fc81dea1..36c10aa7 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -538,14 +538,16 @@ def transform_item(self, uuid, spider, item): if "course" in item: # ToDo: activate these fields AFTER confirming that the edu-sharing properties are correct # ToDo: implement a CourseItemPipeline in pipelines.py BEFORE activating these fields! + if "course_description_short" in item["course"]: + spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] if "course_duration" in item["course"]: # ToDo # spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] pass if "course_learningoutcome" in item["course"]: - # ToDo - # spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] - pass + spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] + if "course_url_video" in item["course"]: + spaces["ccm:oeh_course_url_video"] = item["course"]["course_url_video"] if "course_workload" in item["course"]: # ToDo: which edu-sharing property should be used for workload per week? (and: which time unit?) pass From c8c04edae19518fc3e5ef81e69898103a3f81a6d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 25 Apr 2024 09:55:56 +0200 Subject: [PATCH 476/590] BREAKING: oersi_spider v0.2.1 - breaking change: instead of using OERSI's ElasticSearch "_id"-property for identifying unique objects, we'll be using "_source.id" from now on in the getId() method - background for this decision: while developing oersi_spider "_id" seemed to be a reasonable choice to receive a stable (and unique) identifier for a document (see: https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-id-field.html), but this property was not as stable as we hoped it to be: - an object might that was previously crawled with a specific "_id" might disappear from OERSIs ElasticSearch index and reappear with a completely different "_id" between two crawls - (this might happen during (re-)builds of the ElasticSearch index, object merges and deletes on OERSI's side) ATTENTION: - running oersi_spider v0.2.1+ REQUIRES a clean slate in edu-sharing's "SYNC_OBJ/oersi_spider/" directory! learning objects of str: """ - Uses OERSI's ElasticSearch "_id"-field to collect an uuid. See: - https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-id-field.html + Uses OERSI's "_source.id"-property to collect a URI. According to the AMB Specifications, the URI can be either: + - a (direct) URL to the educational resource + - a URL pointing towards a landing page (describing the educational resource) + + See: https://dini-ag-kim.github.io/amb/latest/#id """ - return elastic_item["_id"] + return elastic_item["_source"]["id"] def getHash(self, response=None, elastic_item_source: dict = dict) -> str: """ From 741f90d9c9813733b11ea8336163b97f0a988de5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 25 Apr 2024 18:04:42 +0200 Subject: [PATCH 477/590] oersi_spider v0.2.2 - feat: fetch dynamic list of "MetadataProviders" from OERSI's ElasticSearch API if no limited crawl setting is enabled: - if the class variable "ELASTIC_PROVIDERS_TO_CRAWL" list is empty at the beginning of a crawl process, the crawler will automatically fetch the most up-to-date list of strings from OERSI - you can still use this setting during debugging if you want to test against specific providers - the .env setting "OERSI_METADATA_PROVIDER" still takes precedence and has the highest priority to control the program flow - logging: remove obsolete debugger messages and replace "logging"-calls with spider-specific logger - fix: fixed weak warnings (code formatting) --- converter/spiders/oersi_spider.py | 301 +++++++++++++++++++----------- 1 file changed, 192 insertions(+), 109 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index ebbd3f8f..6d27f209 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -1,7 +1,7 @@ import datetime -import logging import random import re +from collections import Counter from typing import Optional import requests @@ -40,7 +40,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.1" # last update: 2024-04-25 + version = "0.2.2" # last update: 2024-04-25 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -60,50 +60,53 @@ class OersiSpider(scrapy.Spider, LomBase): ) ELASTIC_PIT_ID: dict = dict() - # the provider-filter at https://oersi.org/resources/ shows you which String values can be used as a provider-name - # ToDo: regularly check if new providers need to be added to the list below (and insert/sort them alphabetically!) + + # the "Provider"-filter in the frontend of https://oersi.org/resources/ shows you which string values + # can be used as a query-parameter for ElasticSearch (names are case-sensitive and need to be matches!) + # You can use the ELASTIC_PROVIDERS_TO_CRAWL list to manually override the crawling targets. If the list is empty, + # the crawler will query the ElasticSearch API and fill the list at the beginning of a crawl! ELASTIC_PROVIDERS_TO_CRAWL: list = [ - "BC Campus", # BC Campus website cannot be crawled at the moment, needs further investigation + # "BC Campus", # BC Campus website cannot be crawled at the moment, needs further investigation # "ComeIn", # should not be crawled, datasets were exported to OERSI from WLO - "detmoldMusicTools", - "digiLL", - "DuEPublico", - "eaDNURT", - "eCampusOntario", - "eGov-Campus", - "Finnish Library of Open Educational Resources", # URLs of this metadata-provider cannot be resolved - "GitHub", - "GitLab", - "Helmholtz Codebase", - "HessenHub", - "HHU Mediathek", - "HOOU", - "iMoox", - "KI Campus", - "langSci Press", # new provider as of 2023-04-27 - "lecture2go (Hamburg)", # new provider as of 2023-12-14 - "MIT OpenCourseWare", + # "detmoldMusicTools", + # "digiLL", + # "DuEPublico", + # "eaDNURT", + # "eCampusOntario", + # "eGov-Campus", + # "Finnish Library of Open Educational Resources", # URLs of this metadata-provider cannot be resolved + # "GitHub", + # "GitLab", + # "Helmholtz Codebase", + # "HessenHub", + # "HHU Mediathek", + # "HOOU", + # "iMoox", + # "KI Campus", + # "langSci Press", # new provider as of 2023-04-27 + # "lecture2go (Hamburg)", # new provider as of 2023-12-14 + # "MIT OpenCourseWare", # "OEPMS", # new provider as of 2023-04-27 # ToDo: cannot be crawled - "OER Portal Uni Graz", - "oncampus", # (temporarily) not available? (2023-12-14) - "Open Music Academy", - "Open Textbook Library", - "Opencast Universität Osnabrück", - "openHPI", - "OpenLearnWare", - "OpenRub", - "ORCA.nrw", - "Phaidra Uni Wien", - "Pressbooks Directory", # new provider as of 2023-12-14 - "RWTH Aachen GitLab", - "TIB AV-Portal", - "TU Delft OpenCourseWare", - "twillo", - "Universität Innsbruck OER Repositorium", - "VCRP", - "vhb", - "Virtual Linguistics Campus", - "ZOERR", + # "OER Portal Uni Graz", + # "oncampus", # (temporarily) not available? (2023-12-14) + # "Open Music Academy", + # "Open Textbook Library", + # "Opencast Universität Osnabrück", + # "openHPI", + # "OpenLearnWare", + # "OpenRub", + # "ORCA.nrw", + # "Phaidra Uni Wien", + # "Pressbooks Directory", # new provider as of 2023-12-14 + # "RWTH Aachen GitLab", + # "TIB AV-Portal", + # "TU Delft OpenCourseWare", + # "twillo", + # "Universität Innsbruck OER Repositorium", + # "VCRP", + # "vhb", + # "Virtual Linguistics Campus", + # "ZOERR", ] ELASTIC_ITEMS_ALL = list() @@ -133,11 +136,15 @@ def __init__(self, **kwargs): self.ELASTIC_PIT_ID = self.elastic_pit_get_id(self.elastic_pit_create()) # querying the ElasticSearch API for metadata-sets of specific providers, this allows us to control which # providers we want to include/exclude by using the "ELASTIC_PROVIDERS_TO_CRAWL"-list + if not self.ELASTIC_PROVIDERS_TO_CRAWL: + # if no crawling targets were set (e.g. during debugging), the default behavior is to query all + # metadata providers from OERSI's ElasticSearch + self.elastic_fetch_list_of_provider_names() self.ELASTIC_ITEMS_ALL = self.elastic_fetch_all_provider_pages() # after all items have been collected, delete the ElasticSearch PIT json_response = self.elastic_pit_delete() if json_response: - logging.info(f"ElasticSearch API response (upon PIT delete): {json_response}") + self.logger.info(f"ElasticSearch API response (upon PIT delete): {json_response}") def start_requests(self): # yield dummy request, so that Scrapy's start_item method requirement is satisfied, @@ -147,6 +154,30 @@ def start_requests(self): def handle_collected_elastic_items(self, response: scrapy.http.Response): random.shuffle(self.ELASTIC_ITEMS_ALL) # shuffling the list of ElasticSearch items to improve concurrency and # distribute the load between several target domains. + + # counting duplicates across "metadata provider"-queries: + urls_all: list = [x["_source"]["id"] for x in self.ELASTIC_ITEMS_ALL] + urls_counted = Counter(urls_all) + duplicates: set = set() + for item in urls_counted: + # if items occur more than once, we'll add their URL to the duplicate set (to compare it later) + if urls_counted[item] > 1: + duplicates.add(item) + duplicate_dict = dict() + for elastic_item in self.ELASTIC_ITEMS_ALL: + _source_id: str = elastic_item["_source"]["id"] + if _source_id in duplicates: + # if an object appears in more than one "MetadataProvider"-query response, we'll create a dictionary, + # where the "key" is the URL and the "value" is a list of duplicate objects (Type: list[dict]) + if _source_id in duplicate_dict: + duplicate_list: list = duplicate_dict[_source_id] + duplicate_list.append(elastic_item) + duplicate_dict.update({_source_id: duplicate_list}) + else: + duplicate_dict.update({_source_id: [elastic_item]}) + # Dumping duplicates to local .json for further analysis: + # with open("oersi_duplicates.json", "w") as fp: + # json.dump(duplicate_dict, fp) for elastic_item in self.ELASTIC_ITEMS_ALL: yield from self.check_item_and_yield_to_parse_method(elastic_item) @@ -159,15 +190,15 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req item_url: str = self.get_item_url(elastic_item) if item_url: if self.shouldImport(response=None) is False: - logging.debug( + self.logger.debug( "Skipping entry {} because shouldImport() returned false".format( str(self.getId(response=None, elastic_item=elastic_item)) ) ) return None if ( - self.getId(response=None, elastic_item=elastic_item) is not None - and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None + self.getId(response=None, elastic_item=elastic_item) is not None + and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None ): if not self.hasChanged(None, elastic_item=elastic_item): return None @@ -205,14 +236,70 @@ def elastic_pit_delete(self) -> dict: """ url = f"https://oersi.org/resources/api-internal/search/_pit" delete_request = requests.delete(url=url, json=self.ELASTIC_PIT_ID) - logging.debug(f"Deleting ElasticSearch PIT: {self.ELASTIC_PIT_ID}") + self.logger.debug(f"Deleting ElasticSearch PIT: {self.ELASTIC_PIT_ID}") return delete_request.json() + def elastic_fetch_list_of_provider_names(self): + _url = "https://oersi.org/resources/api-internal/search/oer_data/_search" + + _payload = { + "_source": False, + "size": 0, + "aggs": {"MetadataProviders": {"terms": {"field": "mainEntityOfPage.provider.name", "size": 500}}}, + } + # remember to increase the "size"-parameter if the list of metadata-providers reaches > 500 results + _headers = {"Content-Type": "application/json", "accept": "application/json"} + response = requests.request("POST", _url, json=_payload, headers=_headers) + if response.ok: + response_json: dict = response.json() + if "aggregations" in response_json: + aggregations: dict = response_json["aggregations"] + try: + buckets: list[dict] = aggregations["MetadataProviders"]["buckets"] + metadata_provider_count_total: int = 0 + if buckets and isinstance(buckets, list): + self.logger.debug( + f"OERSI 'MetadataProviders'-query returned {len(buckets)} metadata providers." + ) + self.logger.debug(f"{buckets}") + for bucket_item in buckets: + if "key" in bucket_item: + metadata_provider_name: str = bucket_item["key"] + if metadata_provider_name and isinstance(metadata_provider_name, str): + self.ELASTIC_PROVIDERS_TO_CRAWL.append(metadata_provider_name) + if "doc_count" in bucket_item: + metadata_provider_count: int = bucket_item["doc_count"] + if metadata_provider_count and isinstance(metadata_provider_count, int): + metadata_provider_count_total += metadata_provider_count + if self.ELASTIC_PROVIDERS_TO_CRAWL: + self.logger.info( + f"Successfully retrieved the following metadata providers for future API " + f"requests:\n" + f"{self.ELASTIC_PROVIDERS_TO_CRAWL}" + ) + if metadata_provider_count_total: + self.logger.info(f"Expecting {metadata_provider_count_total} ElasticSearch objects in " + f"total.") + except KeyError as ke: + self.logger.error( + f"Failed to retrieve 'buckets'-list of metadata providers from OERSI " + f"ElasticSearch response (please check (with a debugger) if the property " + f"'aggregations.MetadataProviders.buckets' was part of the API response!)" + ) + raise ke + else: + self.logger.error( + f"Failed to retrieve list of metadata providers from OERSI's ElasticSearch API. " + f"(The response object did not return a 'aggregations'-object. Please check the API!)" + ) + def elastic_query_provider_metadata(self, provider_name, search_after=None): """ - Queries OERSI's ElasticSearch API for a metadata from a specific provider. + Queries OERSI's ElasticSearch API for metadata items from a specific metadata provider, as specified by the + "provider_name"-string. - See: https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#paginate-search-results + See: + https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#paginate-search-results """ url = "https://oersi.org/resources/api-internal/search/_search" if search_after is None: @@ -240,7 +327,6 @@ def elastic_query_provider_metadata(self, provider_name, search_after=None): } headers = {"Content-Type": "application/json", "accept": "application/json"} response = requests.post(url=url, json=payload, headers=headers) - # logging.debug(response.text) return response.json() def elastic_fetch_all_provider_pages(self): @@ -255,11 +341,13 @@ def elastic_fetch_all_provider_pages(self): # 1:1 identical to the metadata-provider string values on OERSI.org. provider_target_from_env: str = env.get(key="OERSI_METADATA_PROVIDER", allow_null=True, default=None) if provider_target_from_env: - logging.info(f"Recognized OERSI_METADATA_PROVIDER .env setting. Value: {provider_target_from_env}") + self.logger.info( + f"Recognized OERSI_METADATA_PROVIDER .env setting. Limiting crawl to the following target(s): " + f"{provider_target_from_env}") self.ELASTIC_PROVIDERS_TO_CRAWL = [provider_target_from_env] if ";" in provider_target_from_env: provider_list: list[str] = provider_target_from_env.split(";") - logging.info( + self.logger.info( f"Recognized multiple providers within OERSI_METADATA_PROVIDER .env setting:" f"{provider_list}" ) self.ELASTIC_PROVIDERS_TO_CRAWL = provider_list @@ -280,17 +368,14 @@ def elastic_fetch_all_provider_pages(self): if "pit_id" in current_page_json_response: if current_page_json_response.get("pit_id") != self.ELASTIC_PIT_ID.get("id"): self.ELASTIC_PIT_ID = current_page_json_response.get("pit_id") - logging.info( + self.logger.info( f"ElasticSearch: pit_id changed between queries, using the new pit_id " f"{current_page_json_response.get('pit_id')} for subsequent queries." ) - if "hits" in current_page_json_response: - total_count = current_page_json_response.get("hits").get("total").get("value") - logging.debug(f"Expecting {total_count} items for the current API Pagination of {provider_name}") if "hits" in current_page_json_response.get("hits"): provider_items: list[dict] = current_page_json_response.get("hits").get("hits") if provider_items: - logging.debug(f"The provider_items list has {len(provider_items)} entries") + self.logger.debug(f"The provider_items list has {len(provider_items)} entries") for provider_item in provider_items: # we need to keep track of the metadata provider because the ElasticSearch query parameter # will oftentimes NOT be the same string that we receive as the provider metadata value @@ -308,7 +393,7 @@ def elastic_fetch_all_provider_pages(self): has_next_page = False break else: - logging.info( + self.logger.info( f"Reached the end of the ElasticSearch results for '{provider_name}' // " f"Total amount of items collected (across all metadata-providers): {len(all_items)}" ) @@ -327,7 +412,7 @@ def fetch_vhb_data(self): ) self.vhb_oersi_json = vhb_response_dict else: - logging.warning(f"BIRD: Failed to retrieve 'course'-data from 'vhb' sourceOrganization.") + self.logger.warning(f"BIRD: Failed to retrieve 'course'-data from 'vhb' sourceOrganization.") def getId(self, response=None, elastic_item: dict = dict) -> str: """ @@ -361,18 +446,16 @@ def getHash(self, response=None, elastic_item_source: dict = dict) -> str: hash_temp: str = f"{datetime.datetime.now().isoformat()}v{self.version}" return hash_temp - @staticmethod - def get_uuid(elastic_item: dict): + def get_uuid(self, elastic_item: dict): """ Builds a UUID string from the to-be-parsed target URL and returns it. """ # The "getUUID"-method of LomBase couldn't be cleanly overridden because at the point of time when we do this # check, there is no "Response"-object available yet. - item_url = OersiSpider.get_item_url(elastic_item=elastic_item) + item_url = self.get_item_url(elastic_item=elastic_item) return EduSharing.build_uuid(item_url) - @staticmethod - def get_item_url(elastic_item: dict) -> str | None: + def get_item_url(self, elastic_item: dict) -> str | None: """ Retrieves the to-be-parsed URL from OERSI's '_source.id'-field. If that (REQUIRED) field was not available, returns None. @@ -381,7 +464,7 @@ def get_item_url(elastic_item: dict) -> str | None: if item_url: return item_url else: - logging.warning(f"OERSI Item {elastic_item['_id']} did not provide a URL string. Dropping item.") + self.logger.warning(f"OERSI Item {elastic_item['_id']} did not provide a URL string. Dropping item.") return None def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: @@ -390,27 +473,27 @@ def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: return True if self.uuid: if self.get_uuid(elastic_item=elastic_item) == self.uuid: - logging.info(f"matching requested id: {self.uuid}") + self.logger.info(f"matching requested id: {self.uuid}") return True return False if self.remoteId: if str(self.getId(response, elastic_item=elastic_item)) == self.remoteId: - logging.info(f"matching requested id: {self.remoteId}") + self.logger.info(f"matching requested id: {self.remoteId}") return True return False db = EduSharing().find_item(self.getId(response, elastic_item=elastic_item), self) changed = db is None or db[1] != self.getHash(response, elastic_item_source=elastic_item["_source"]) if not changed: - logging.info(f"Item {self.getId(response, elastic_item=elastic_item)} (uuid: {db[0]}) has not changed") + self.logger.info(f"Item {self.getId(response, elastic_item=elastic_item)} (uuid: {db[0]}) has not changed") return changed def get_lifecycle_author( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - date_created: Optional[str] = None, - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + date_created: Optional[str] = None, + date_published: Optional[str] = None, ): """ If a "creator"-field is available in the OERSI API for a specific '_source'-item, creates an 'author'-specific @@ -478,11 +561,11 @@ def get_lifecycle_author( return authors def get_affiliation_and_save_to_lifecycle( - self, - affiliation_dict: dict, - lom_base_item_loader: LomBaseItemloader, - organization_fallback: set[str], - lifecycle_role: str, + self, + affiliation_dict: dict, + lom_base_item_loader: LomBaseItemloader, + organization_fallback: set[str], + lifecycle_role: str, ): """ Retrieves metadata from OERSI's "affiliation"-field (which is typically found within a "creator"- or @@ -527,8 +610,7 @@ def get_affiliation_and_save_to_lifecycle( ) lom_base_item_loader.add_value("lifecycle", lifecycle_affiliated_org.load_item()) - @staticmethod - def validate_academic_title_string(honorific_prefix: str) -> str: + def validate_academic_title_string(self, honorific_prefix: str) -> str: """ Some metadata-providers provide weird values for the 'honorificPrefix'-attribute within a "creator"- or "contributor"-item. This method checks for known edge-cases and drops the string if necessary. @@ -539,7 +621,7 @@ def validate_academic_title_string(honorific_prefix: str) -> str: # ORCA.nrw: "http://hbz-nrw.de/regal#academicDegree/unkown", "unknown", # Open Textbook Library: single backticks if "unknown" in honorific_prefix or "unkown" in honorific_prefix or len(honorific_prefix) == 1: - logging.debug( + self.logger.debug( f"'honorificPrefix'-validation: The string {honorific_prefix} was recognized as an invalid " f"edge-case value. Deleting string..." ) @@ -547,11 +629,11 @@ def validate_academic_title_string(honorific_prefix: str) -> str: return honorific_prefix.strip() def get_lifecycle_contributor( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - author_list: Optional[list[str]] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + author_list: Optional[list[str]] = None, ): """ Collects metadata from the OERSI "contributor"-field and stores it within a LomLifecycleItemLoader. @@ -653,11 +735,11 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) def get_lifecycle_publisher( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organizations_from_publisher_fields: set[str], - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organizations_from_publisher_fields: set[str], + date_published: Optional[str] = None, ): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. Successfully @@ -693,7 +775,7 @@ def get_lifecycle_publisher( lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) def get_lifecycle_organization_from_source_organization_fallback( - self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] + self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] ): # ATTENTION: the "sourceOrganization"-field is not part of the AMB draft, therefore this method is currently # used a fallback, so we don't lose any useful metadata (even if that metadata is not part of the AMB spec). @@ -735,7 +817,8 @@ def get_lifecycle_organization_from_source_organization_fallback( lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) def get_lifecycle_publisher_from_source_organization( - self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, previously_collected_publishers: set[str] + self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, + previously_collected_publishers: set[str] ): source_organizations: list[dict] = elastic_item_source.get("sourceOrganization") for so in source_organizations: @@ -755,10 +838,10 @@ def get_lifecycle_publisher_from_source_organization( lifecycle_org.add_value("url", org_url) lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) - @staticmethod - def lifecycle_determine_type_of_identifier_and_save_uri( - item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader - ): + def lifecycle_determine_type_of_identifier_and_save_uri(self, + item_dictionary: dict, + lifecycle_item_loader: LomLifecycleItemloader + ): """ OERSI's "creator"/"contributor"/"affiliation" items might contain an 'id'-field which (optionally) provides URI-identifiers that reference GND / ORCID / Wikidata / ROR. @@ -770,10 +853,10 @@ def lifecycle_determine_type_of_identifier_and_save_uri( # "creator.id" can be 'null', therefore we need to explicitly check its type before trying to parse it uri_string: str = item_dictionary.get("id") if ( - "orcid.org" in uri_string - or "/gnd/" in uri_string - or "wikidata.org" in uri_string - or "ror.org" in uri_string + "orcid.org" in uri_string + or "/gnd/" in uri_string + or "wikidata.org" in uri_string + or "ror.org" in uri_string ): if "/gnd/" in uri_string: lifecycle_item_loader.add_value("id_gnd", uri_string) @@ -784,7 +867,7 @@ def lifecycle_determine_type_of_identifier_and_save_uri( if "wikidata.org" in uri_string: lifecycle_item_loader.add_value("id_wikidata", uri_string) else: - logging.info( + self.logger.info( f"The URI identifier '{uri_string}' was not recognized. " f"Fallback: Saving its value to 'lifecycle.url'." ) @@ -866,7 +949,7 @@ def parse(self, response=None, **kwargs): if thumbnail_url: base.add_value("thumbnail", thumbnail_url) except KeyError: - logging.debug( + self.logger.debug( f"OERSI Item {elastic_item['_id']} " f"(name: {elastic_item_source['name']}) did not provide a thumbnail." ) @@ -902,7 +985,7 @@ def parse(self, response=None, **kwargs): # this URL is REQUIRED and should always be available # see https://dini-ag-kim.github.io/amb/draft/#id except KeyError: - logging.warning(f"Item {elastic_item['_id']} did not have an item URL (AMB 'id' was missing)!") + self.logger.warning(f"Item {elastic_item['_id']} did not have an item URL (AMB 'id' was missing)!") return if identifier_url: general.replace_value("identifier", identifier_url) @@ -1070,7 +1153,7 @@ def parse(self, response=None, **kwargs): if about_id_key: vs.add_value("hochschulfaechersystematik", about_id_key) else: - logging.debug( + self.logger.debug( f"The value of OERSI 'about.id' was not recognized during mapping to " f"valuespaces 'hochschulfaechersystematik': {about_id} ." ) @@ -1150,7 +1233,7 @@ def parse(self, response=None, **kwargs): if vhb_course_url and vhb_course_url == identifier_url: self.logger.debug( f"BIRD: Matched 'vhb'-item {vhb_course_url} with OERSI " - f"ElasticSearch item {elastic_item['_id']}!" + f"ElasticSearch item {elastic_item['_id']}" ) vhb_item_matched = vhb_item except KeyError as ke: @@ -1232,7 +1315,7 @@ def parse(self, response=None, **kwargs): # timedelta has no parameter for months # -> X months = X * (4 weeks) duration_delta = duration_delta + ( - duration_number * datetime.timedelta(weeks=4) + duration_number * datetime.timedelta(weeks=4) ) case _: self.logger.warning( From abe42fe4c2069aa255867b22548a61662603d64c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 26 Apr 2024 12:57:42 +0200 Subject: [PATCH 478/590] fix: typecheck "affiliation"-objects before trying to parse them - this fixes TypeErrors when encountering empty "affiliation"-dictionaries (edge-case) --- converter/spiders/oersi_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 6d27f209..25836c44 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -591,7 +591,7 @@ def get_affiliation_and_save_to_lifecycle( # (for future reference: # vCard v3: https://datatracker.ietf.org/doc/html/rfc2426 # vCard v4: https://www.rfc-editor.org/rfc/rfc6350.html#section-6.6.6 ) - if "name" in affiliation_dict: + if affiliation_dict and isinstance(affiliation_dict, dict) and "name" in affiliation_dict: affiliation_name = affiliation_dict.get("name") lifecycle_affiliated_org = LomLifecycleItemloader() if affiliation_name: From 9b867cc36e063d5608ec0b71c49e1ea6117481f1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 8 May 2024 13:53:41 +0200 Subject: [PATCH 479/590] add BIRD CourseItem properties ("course_availability_from" and "course_availability_to") - docs: update DocStrings for "course_duration"-property --- converter/items.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/converter/items.py b/converter/items.py index 6f4e0643..4a9e927c 100644 --- a/converter/items.py +++ b/converter/items.py @@ -167,6 +167,7 @@ class LomEducationalItem(Item): """See LomAgeRangeItem. Corresponding edu-sharing properties: 'ccm:educationaltypicalagerange_from' & 'ccm:educationaltypicalagerange_to'""" typicalLearningTime = Field() + """Corresponding edu-sharing property: 'cclom:typicallearningtime' (expects values in ms!)""" # please use the seperate license data @@ -323,9 +324,13 @@ class CourseItem(Item): """ BIRD-specific metadata properties intended only for courses. """ + course_availability_from = Field() + """Corresponding edu-sharing property: 'ccm:oeh_event_begin' (date)""" + course_availability_to = Field() + """Corresponding edu-sharing property: 'ccm:oeh_event_end' (date)""" course_duration = Field() - # ToDo: edu-sharing expects the course duration in seconds (as long as 'cclom:typicallearningtime' is used!) - """Corresponding edu-sharing property: 'cclom:typicallearningtime'""" + """Corresponding edu-sharing property: 'cclom:typicallearningtime' + (edu-sharing expects 'cclom:typicallearningtime' values in milliseconds!)""" course_learningoutcome = Field() """Describes "Lernergebnisse" or "learning objectives". (Expects a string, with or without HTML-formatting!) Corresponding edu-sharing property: 'ccm:learninggoal'""" From 8051028c55ec1e05832a836b73ffaf76108164d6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 8 May 2024 13:55:47 +0200 Subject: [PATCH 480/590] feat: connect BIRD CourseItem properties ("course_availability_from" and "course_availability_to") to edu-sharing - "course_availability_from" -> "ccm:oeh_event_begin" - "course_availability_to" -> "ccm:oeh_event_end" - change: activate "course_duration" property (-> "cclom:typicallearningtime") --- converter/es_connector.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 36c10aa7..ca00fa78 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -538,12 +538,17 @@ def transform_item(self, uuid, spider, item): if "course" in item: # ToDo: activate these fields AFTER confirming that the edu-sharing properties are correct # ToDo: implement a CourseItemPipeline in pipelines.py BEFORE activating these fields! + if "course_availability_from" in item["course"]: + # ToDo: confirm is this field should store a datetime or a date (and implement check in pipelines.py) + spaces["ccm:oeh_event_begin"] = item["course"]["course_availability_from"] + if "course_availability_to" in item["course"]: + # ToDo: confirm if this field should store a datetime or a date (and implement check in pipelines.py) + spaces["ccm:oeh_event_end"] = item["course"]["course_availability_to"] if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] if "course_duration" in item["course"]: - # ToDo - # spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] - pass + # edu-sharing property 'cclom:typicallearningtime' expects values in ms! + spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] if "course_learningoutcome" in item["course"]: spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] if "course_url_video" in item["course"]: From 85165909c07ab76c5d3171fda1a080de99bbede2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 8 May 2024 14:09:43 +0200 Subject: [PATCH 481/590] oersi_spider v0.2.3 - fix: the "vhb"-hook only triggered in the restricted crawling mode (limited via .env setting), but not in the "complete"-crawling mode of oersi_spider (where all metadata-providers are queried) Metadata Changelog: - feat: vhb MOOCHub "workload" -> "CourseItem.duration" (-> edu-sharing "cclom:typicallearningtime") -> BIRD "duration" - this is a unique case since vhb's "workload" property is understood as a typicalLearningTime value (LOM) and not in the sense of "workload per day/week/month" - feat: vhb MOOCHub "startDate" -> "CourseItem.course_availability_from" -> edu-sharing "ccm:oeh_event_begin" -> BIRD "course_availability_from" --- converter/spiders/oersi_spider.py | 53 +++++++++++++++++++------------ 1 file changed, 33 insertions(+), 20 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 25836c44..1258f1e6 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -4,6 +4,7 @@ from collections import Counter from typing import Optional +import dateparser import requests import scrapy @@ -40,7 +41,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.2" # last update: 2024-04-25 + version = "0.2.3" # last update: 2024-05-08 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -351,12 +352,12 @@ def elastic_fetch_all_provider_pages(self): f"Recognized multiple providers within OERSI_METADATA_PROVIDER .env setting:" f"{provider_list}" ) self.ELASTIC_PROVIDERS_TO_CRAWL = provider_list - if "vhb" in self.ELASTIC_PROVIDERS_TO_CRAWL: - # experimental BIRD-Hook for "vhb"-courses! - # ToDo: refactor this implementation into its own (sub-)class ASAP! - # (WARNING: This PoC will not scale well for over >50 Metadata-Providers within OERSI - # and REQUIRES a separate infrastructure!) - self.fetch_vhb_data() + if "vhb" in self.ELASTIC_PROVIDERS_TO_CRAWL: + # experimental BIRD-Hook for "vhb"-courses! + # ToDo: refactor this implementation into its own (sub-)class ASAP! + # (WARNING: This PoC will not scale well for over >50 Metadata-Providers within OERSI + # and REQUIRES a separate infrastructure!) + self.fetch_vhb_data() has_next_page = True for provider_name in self.ELASTIC_PROVIDERS_TO_CRAWL: @@ -1279,7 +1280,7 @@ def parse(self, response=None, **kwargs): duration_pattern = re.compile( r"""(?P\d+)\s*(?P\w*)""" ) - # ToDo: refactor into "course duration" parser method + # ToDo: refactor into "MOOCHub workload to BIRD course duration" method duration_match: re.Match | None = duration_pattern.search(vhb_workload) duration_delta: datetime.timedelta = datetime.timedelta() if duration_match: @@ -1324,22 +1325,18 @@ def parse(self, response=None, **kwargs): f"{vhb_item_matched}" ) if duration_delta: - # full seconds is as precise as we need to be, - # therefore we convert the seconds to int values workload_in_seconds: int = int( duration_delta.total_seconds() ) if workload_in_seconds: - # ToDo: confirm that course_duration is correct - # (BIRD "course_workload" seems to be a closer match) - # course_itemloader.add_value( - # "course_duration", workload_in_seconds - # ) - # ToDo: choose only 1 of these 2 possible properties - # course_itemloader.add_value( - # "course_workload", workload_in_seconds - # ) - pass + # the edu-sharing property 'cclom:typicallearningtime' + # expects values in ms: + workload_in_ms: int = ( + workload_in_seconds * 1000 + ) + course_itemloader.add_value( + "course_duration", workload_in_ms + ) if "learningObjectives" in vhb_item_matched["attributes"]: vhb_learning_objectives: str = vhb_item_matched["attributes"][ "learningObjectives" @@ -1348,6 +1345,22 @@ def parse(self, response=None, **kwargs): course_itemloader.add_value( "course_learningoutcome", vhb_learning_objectives ) + if "startDate" in vhb_item_matched["attributes"]: + start_date_raw: str = vhb_item_matched["attributes"]["startDate"] + if start_date_raw and isinstance(start_date_raw, str): + # parsing the date string first to check its validity + sdt_parsed: datetime = dateparser.parse(start_date_raw) + if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): + # just to make sure that we don't parse bogus data, we run the string + # through the dateparser module first and convert it to iso 8601 + sd_parsed_iso: str = sdt_parsed.date().isoformat() + # ToDo: confirm if this field should be saved as datetime or date + course_itemloader.add_value("course_availability_from", sd_parsed_iso) + else: + self.logger.warning( + f"Could not parse vhb 'start_date' value {start_date_raw} " + f"to datetime. (Please check for new edge-cases " + f"and update the crawler!)") base.add_value("course", course_itemloader.load_item()) # noinspection DuplicatedCode From 7014d3c8f7dd8b5a2f83d2a613abd8f9a704d94e Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 May 2024 17:42:38 +0200 Subject: [PATCH 482/590] introduce "course_schedule"-property to CourseItem and es_connector - style: ordered CourseItem properties alphabetically - docs: updated code comments regarding "ccm:oeh_event_begin" and "ccm:oeh_event_end" to reflect the recent (2024-05-14) changes (from type 'date' to 'datetime') --- converter/es_connector.py | 8 ++++++-- converter/items.py | 13 ++++++++----- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index ca00fa78..f9c52094 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -539,10 +539,12 @@ def transform_item(self, uuid, spider, item): # ToDo: activate these fields AFTER confirming that the edu-sharing properties are correct # ToDo: implement a CourseItemPipeline in pipelines.py BEFORE activating these fields! if "course_availability_from" in item["course"]: - # ToDo: confirm is this field should store a datetime or a date (and implement check in pipelines.py) + # as of 2024-05-14: "ccm:oeh_event_begin" expects a datetime value + # ToDo: implement datetime typecheck for this property in pipelines.py spaces["ccm:oeh_event_begin"] = item["course"]["course_availability_from"] if "course_availability_to" in item["course"]: - # ToDo: confirm if this field should store a datetime or a date (and implement check in pipelines.py) + # as of 2024-05-14: "ccm:oeh_event_end" expects a datetime value + # Todo: implement datetime typecheck for this property in pipelines.py spaces["ccm:oeh_event_end"] = item["course"]["course_availability_to"] if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] @@ -551,6 +553,8 @@ def transform_item(self, uuid, spider, item): spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] if "course_learningoutcome" in item["course"]: spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] + if "course_schedule" in item["course"]: + spaces["ccm:oeh_course_schedule"] = item["course"]["course_schedule"] if "course_url_video" in item["course"]: spaces["ccm:oeh_course_url_video"] = item["course"]["course_url_video"] if "course_workload" in item["course"]: diff --git a/converter/items.py b/converter/items.py index 4a9e927c..a526cb99 100644 --- a/converter/items.py +++ b/converter/items.py @@ -328,20 +328,23 @@ class CourseItem(Item): """Corresponding edu-sharing property: 'ccm:oeh_event_begin' (date)""" course_availability_to = Field() """Corresponding edu-sharing property: 'ccm:oeh_event_end' (date)""" + course_description_short = Field() + """Corresponding edu-sharing property: 'ccm:oeh_course_description_short'""" course_duration = Field() """Corresponding edu-sharing property: 'cclom:typicallearningtime' (edu-sharing expects 'cclom:typicallearningtime' values in milliseconds!)""" course_learningoutcome = Field() """Describes "Lernergebnisse" or "learning objectives". (Expects a string, with or without HTML-formatting!) Corresponding edu-sharing property: 'ccm:learninggoal'""" - course_workload = Field() - """Describes the workload per week.""" - # ToDo: confirm where "workload" values should be saved within edu-sharing - course_description_short = Field() - """Corresponding edu-sharing property: 'ccm:oeh_course_description_short'""" + course_schedule = Field() + """Describes the schedule of a course ("Kursablauf"). (Expects a string, with or without HTML-formatting!) + Corresponding edu-sharing property: 'ccm:oeh_course_schedule'.""" course_url_video = Field() """URL of a course-specific trailer- or teaser-video. Corresponding edu-sharing property: 'ccm:oeh_course_url_video'""" + course_workload = Field() + """Describes the workload per week.""" + # ToDo: confirm where "workload" values should be saved within edu-sharing class BaseItem(Item): From 490423b256f092babec185b840224dc6b322ba9b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 14 May 2024 17:47:05 +0200 Subject: [PATCH 483/590] oersi_spider v0.2.4 - change: vhb "startDate" is stored as a "datetime" (instead of "date") - feat: vhb "outline" -> "CourseItem.course_schedule" -> BIRD "course_schedule" - style/refactor: process "vhb"-attributes in an alphabetical manner to increase code readability --- converter/spiders/oersi_spider.py | 61 ++++++++++++++++++------------- 1 file changed, 35 insertions(+), 26 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 1258f1e6..955e4fde 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -41,7 +41,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.3" # last update: 2024-05-08 + version = "0.2.4" # last update: 2024-05-14 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -1258,6 +1258,39 @@ def parse(self, response=None, **kwargs): vhb_abstract: str = vhb_item_matched["attributes"]["abstract"] if vhb_abstract and isinstance(vhb_abstract, str): course_itemloader.add_value("course_description_short", vhb_abstract) + if "learningObjectives" in vhb_item_matched["attributes"]: + vhb_learning_objectives: str = vhb_item_matched["attributes"][ + "learningObjectives" + ] + if vhb_learning_objectives and isinstance(vhb_learning_objectives, str): + course_itemloader.add_value( + "course_learningoutcome", vhb_learning_objectives + ) + if "outline" in vhb_item_matched["attributes"]: + outline_raw: str = vhb_item_matched["attributes"]["outline"] + if outline_raw and isinstance(outline_raw, str): + # ToDo: vhb "outline" -> course_schedule -> "ccm:oeh_course_schedule" + # the vhb attribute "outline" describes a course's schedule (Kursablauf) + # IMPORTANT: "outline" is not part of MOOCHub v2.x nor 3.x! + course_itemloader.add_value("course_schedule", outline_raw) + else: + self.logger.warning(f"Received vhb 'outline'-property of unexpected type: " + f"{outline_raw}") + if "startDate" in vhb_item_matched["attributes"]: + start_date_raw: str = vhb_item_matched["attributes"]["startDate"] + if start_date_raw and isinstance(start_date_raw, str): + # parsing the date string first to check its validity + sdt_parsed: datetime = dateparser.parse(start_date_raw) + if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): + # just to make sure that we don't parse bogus data, we run the string + # through the dateparser module first and convert it to iso 8601 + sd_parsed_iso: str = sdt_parsed.isoformat() + course_itemloader.add_value("course_availability_from", sd_parsed_iso) + else: + self.logger.warning( + f"Could not parse vhb 'start_date' value {start_date_raw} " + f"to datetime. (Please check for new edge-cases " + f"and update the crawler!)") if "video" in vhb_item_matched["attributes"]: video_item: dict = vhb_item_matched["attributes"]["video"] if video_item: @@ -1268,7 +1301,7 @@ def parse(self, response=None, **kwargs): "course_url_video", vhb_course_video_url ) # ToDo: "video.licenses" is of type list[dict] - # each "license"-dict can have an "id"- and "url"-property + # each "license"-dict can have an "id"- and "url"-property if "workload" in vhb_item_matched["attributes"]: vhb_workload_raw: str = vhb_item_matched["attributes"]["workload"] if vhb_workload_raw and isinstance(vhb_workload_raw, str): @@ -1337,30 +1370,6 @@ def parse(self, response=None, **kwargs): course_itemloader.add_value( "course_duration", workload_in_ms ) - if "learningObjectives" in vhb_item_matched["attributes"]: - vhb_learning_objectives: str = vhb_item_matched["attributes"][ - "learningObjectives" - ] - if vhb_learning_objectives and isinstance(vhb_learning_objectives, str): - course_itemloader.add_value( - "course_learningoutcome", vhb_learning_objectives - ) - if "startDate" in vhb_item_matched["attributes"]: - start_date_raw: str = vhb_item_matched["attributes"]["startDate"] - if start_date_raw and isinstance(start_date_raw, str): - # parsing the date string first to check its validity - sdt_parsed: datetime = dateparser.parse(start_date_raw) - if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): - # just to make sure that we don't parse bogus data, we run the string - # through the dateparser module first and convert it to iso 8601 - sd_parsed_iso: str = sdt_parsed.date().isoformat() - # ToDo: confirm if this field should be saved as datetime or date - course_itemloader.add_value("course_availability_from", sd_parsed_iso) - else: - self.logger.warning( - f"Could not parse vhb 'start_date' value {start_date_raw} " - f"to datetime. (Please check for new edge-cases " - f"and update the crawler!)") base.add_value("course", course_itemloader.load_item()) # noinspection DuplicatedCode From f7b5162aecd8cfb5db4be8c5567a2112bdbdeeec Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 16 May 2024 14:49:20 +0200 Subject: [PATCH 484/590] oersi_spider v0.2.5 (new public data API) - change: use OERSI'S (new) public data API (https://oersi.org/resources/api/search/) instead of the (soon to be deprecated) internal API (https://oersi.org/resources/api-internal/search/) - for further details, please see: https://oersi.org/resources/pages/de/docs/api/data/#public-data-api Sidenote: OERSI Weekly metadata dump - OERSI started offering a weekly metadata dump, which might become relevant in the future - see: https://oersi.org/resources/pages/de/docs/api/data/#metadata-dump - (Thank you, Manuel, for making me aware of this fact!) --- converter/spiders/oersi_spider.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 955e4fde..a9ce4c5d 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -41,7 +41,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.4" # last update: 2024-05-14 + version = "0.2.5" # last update: 2024-05-16 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -215,7 +215,7 @@ def elastic_pit_create(self) -> dict: See: https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html """ url = ( - f"https://oersi.org/resources/api-internal/search/oer_data/_pit?keep_alive=" + f"https://oersi.org/resources/api/search/oer_data/_pit?keep_alive=" f"{self.ELASTIC_PARAMETER_KEEP_ALIVE}&pretty" ) headers = {"accept": "application/json"} @@ -235,13 +235,13 @@ def elastic_pit_delete(self) -> dict: Deletes the ElasticSearch PIT once it's no longer needed for page iteration. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html#close-point-in-time-api """ - url = f"https://oersi.org/resources/api-internal/search/_pit" + url = f"https://oersi.org/resources/api/search/_pit" delete_request = requests.delete(url=url, json=self.ELASTIC_PIT_ID) self.logger.debug(f"Deleting ElasticSearch PIT: {self.ELASTIC_PIT_ID}") return delete_request.json() def elastic_fetch_list_of_provider_names(self): - _url = "https://oersi.org/resources/api-internal/search/oer_data/_search" + _url = "https://oersi.org/resources/api/search/oer_data/_search" _payload = { "_source": False, @@ -302,7 +302,7 @@ def elastic_query_provider_metadata(self, provider_name, search_after=None): See: https://www.elastic.co/guide/en/elasticsearch/reference/current/paginate-search-results.html#paginate-search-results """ - url = "https://oersi.org/resources/api-internal/search/_search" + url = "https://oersi.org/resources/api/search/_search" if search_after is None: payload = { "size": self.ELASTIC_PARAMETER_REQUEST_SIZE, From fab21367f70bb59675c0e50139ad97810e5f2f98 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 16 May 2024 15:10:11 +0200 Subject: [PATCH 485/590] todo: typicalLearningTime ToDos - while implementing "CourseItem.course_duration" I noticed that the current implementation of the ConvertTimePipeline lacks several checks and needs to be updated ASAP --- converter/pipelines.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index c2e151b0..3d09d1fc 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -267,6 +267,11 @@ def process_item(self, raw_item, spider): if "typicalLearningTime" in item["lom"]["educational"]: t = item["lom"]["educational"]["typicalLearningTime"] mapped = None + # ToDo: typecheck the provided value first and handle it accordingly! + # - strings: check commonly provided "duration" formats (e.g. "hh:mm:ss" or "12 Stunden") + # - convert to int: 'cclom:typicallearningtime' expects values to be in milliseconds! + # - improve error-handling by reworking the bare "except"-clause + # - update es_connector.py and connect this property to the backend splitted = t.split(":") if len(splitted) == 3: mapped = ( From 72253b06e90c7aab35ecbededde57e1ff47e7156 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 16 May 2024 15:48:04 +0200 Subject: [PATCH 486/590] fix: add missing license constants to OER pipeline --- converter/pipelines.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 3d09d1fc..a0d75da3 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -213,10 +213,12 @@ def process_item(self, raw_item, spider): if "url" in item["license"] and "oer" not in item["license"]: match item["license"]["url"]: - case Constants.LICENSE_CC_BY_20 | \ + case Constants.LICENSE_CC_BY_10 | \ + Constants.LICENSE_CC_BY_20 | \ Constants.LICENSE_CC_BY_25 | \ Constants.LICENSE_CC_BY_30 | \ Constants.LICENSE_CC_BY_40 | \ + Constants.LICENSE_CC_BY_SA_10 | \ Constants.LICENSE_CC_BY_SA_20 | \ Constants.LICENSE_CC_BY_SA_25 | \ Constants.LICENSE_CC_BY_SA_30 | \ From a69a2092d4489dfe34de8d208cdc3d2788ca5a4c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 27 May 2024 17:58:55 +0200 Subject: [PATCH 487/590] change: rename "CourseItem.course_availability_to" to "..._until" - renamed this CourseItem property to keep in line with the BIRD naming scheme --- converter/es_connector.py | 4 ++-- converter/items.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index f9c52094..6150a9c3 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -542,10 +542,10 @@ def transform_item(self, uuid, spider, item): # as of 2024-05-14: "ccm:oeh_event_begin" expects a datetime value # ToDo: implement datetime typecheck for this property in pipelines.py spaces["ccm:oeh_event_begin"] = item["course"]["course_availability_from"] - if "course_availability_to" in item["course"]: + if "course_availability_until" in item["course"]: # as of 2024-05-14: "ccm:oeh_event_end" expects a datetime value # Todo: implement datetime typecheck for this property in pipelines.py - spaces["ccm:oeh_event_end"] = item["course"]["course_availability_to"] + spaces["ccm:oeh_event_end"] = item["course"]["course_availability_until"] if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] if "course_duration" in item["course"]: diff --git a/converter/items.py b/converter/items.py index a526cb99..132a8507 100644 --- a/converter/items.py +++ b/converter/items.py @@ -326,7 +326,7 @@ class CourseItem(Item): """ course_availability_from = Field() """Corresponding edu-sharing property: 'ccm:oeh_event_begin' (date)""" - course_availability_to = Field() + course_availability_until = Field() """Corresponding edu-sharing property: 'ccm:oeh_event_end' (date)""" course_description_short = Field() """Corresponding edu-sharing property: 'ccm:oeh_course_description_short'""" From 0a8beaf409d5ab0f46d5ecb8fe5298d29ae7c882 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Mon, 27 May 2024 20:01:05 +0200 Subject: [PATCH 488/590] oersi_spider v0.2.6 (additional "iMoox"-metadata) - feat: query iMoox for additional metadata if "iMoox" is part of the OERSI query "iMoox"-specific (MOOCHub v3.x) metadata changes: - iMoox "startDate" -> "ccm:oeh_event_begin" -> BIRD "course_availability_from" - iMoox "endDate" -> "ccm:oeh_event_end" -> BIRD "course_availability_until" - iMoox "trailer.contentUrl" -> "ccm:oeh_course_url_video" -> BIRD "course_url_video" - iMoox "duration" & "workload" (combined) -> "cclom:typicallearningtime" (as ms) -> BIRD "course_duration" --- converter/spiders/oersi_spider.py | 158 +++++++++++++++++++++++++++++- 1 file changed, 154 insertions(+), 4 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index a9ce4c5d..4ce48974 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -1,3 +1,4 @@ +import copy import datetime import random import re @@ -41,7 +42,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.5" # last update: 2024-05-16 + version = "0.2.6" # last update: 2024-05-27 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -130,6 +131,8 @@ class OersiSpider(scrapy.Spider, LomBase): } # BIRD-related: "vhb" response dict (from https://open.vhb.org/oersi.json) vhb_oersi_json: dict | None = None + # BIRD-related "iMoox" response dict (from https://imoox.at/mooc/local/moochubs/classes/webservice.php) + imoox_json: dict | None = None def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -352,6 +355,9 @@ def elastic_fetch_all_provider_pages(self): f"Recognized multiple providers within OERSI_METADATA_PROVIDER .env setting:" f"{provider_list}" ) self.ELASTIC_PROVIDERS_TO_CRAWL = provider_list + # --- BIRD-related hooks --- + if "iMoox" in self.ELASTIC_PROVIDERS_TO_CRAWL: + self.fetch_imoox_data() if "vhb" in self.ELASTIC_PROVIDERS_TO_CRAWL: # experimental BIRD-Hook for "vhb"-courses! # ToDo: refactor this implementation into its own (sub-)class ASAP! @@ -401,6 +407,18 @@ def elastic_fetch_all_provider_pages(self): break return all_items + def fetch_imoox_data(self): + imoox_response: requests.Response = requests.get("https://imoox.at/mooc/local/moochubs/classes/webservice.php") + self.logger.info(f"BIRD: Fetching 'course'-data from iMoox: {imoox_response.url} ...") + imoox_response_dict: dict = imoox_response.json() + if imoox_response_dict and isinstance(imoox_response_dict, dict): + if "data" in imoox_response_dict: + imoox_course_items = imoox_response_dict["data"] + self.logger.info(f"BIRD: Successfully retrieved {len(imoox_course_items)} items from {imoox_response.url} .") + self.imoox_json = copy.deepcopy(imoox_response_dict) + else: + self.logger.warning(f"BIRD: Failed to retrieve 'course'-data from 'iMoox' sourceOrganization.") + def fetch_vhb_data(self): vhb_response: requests.Response = requests.get(url="https://open.vhb.org/oersi.json") self.logger.info(f"BIRD: Fetching 'course'-data from vhb: {vhb_response.url} ...") @@ -411,7 +429,7 @@ def fetch_vhb_data(self): self.logger.info( f"BIRD: Successfully retrieved {len(vhb_course_items)} items " f"from {vhb_response.url} ." ) - self.vhb_oersi_json = vhb_response_dict + self.vhb_oersi_json = copy.deepcopy(vhb_response_dict) else: self.logger.warning(f"BIRD: Failed to retrieve 'course'-data from 'vhb' sourceOrganization.") @@ -1213,11 +1231,141 @@ def parse(self, response=None, **kwargs): if license_url_mapped: license_loader.add_value("url", license_url_mapped) + # --- BIRD HOOKS START HERE! if "OERSI_QUERY_PROVIDER_NAME" in elastic_item: # BIRD-related requirement: merge item with additional metadata retrieved directly from the source if elastic_item["OERSI_QUERY_PROVIDER_NAME"]: # checking if the "metadata provider name" that was used for the ElasticSearch query needs to be handled query_parameter_provider_name: str = elastic_item["OERSI_QUERY_PROVIDER_NAME"] + # --- "iMoox" metadata hook starts here: + if query_parameter_provider_name and query_parameter_provider_name == "iMoox": + if self.imoox_json: + if "data" in self.imoox_json: + imoox_item_matched: dict | None = None + try: + imoox_items: list[dict] = self.imoox_json["data"] + for imoox_item in imoox_items: + imoox_course_url: str = imoox_item["attributes"]["url"] + if imoox_course_url and imoox_course_url == identifier_url: + self.logger.debug(f"BIRD: Matched 'iMoox'-item {imoox_course_url} with OERSI " + f"ElasticSearch item {elastic_item['_id']} " + f"({elastic_item_source['id']})") + imoox_item_matched = imoox_item + except KeyError as ke: + raise ke + if imoox_item_matched: + course_itemloader: CourseItemLoader = CourseItemLoader() + if "attributes" in imoox_item_matched: + imoox_attributes: dict = imoox_item_matched["attributes"] + # ToDo: MOOCHUb Spec v3 allows a list of (multiple, unique) date strings for + # - "startDate" + # - "endDate" + # -> "CourseItem" needs to be expanded to support multiple values for this field + # (this problem is theoretical in nature at the moment, + # since "iMoox" currently provides only 1 value per property, + # but this might change in the future!) + if "startDate" in imoox_attributes: + start_dates: list[str] = imoox_attributes["startDate"] + if start_dates and isinstance(start_dates, list): + for start_date_raw in start_dates: + if start_date_raw and isinstance(start_date_raw, str): + sdt_parsed: datetime = dateparser.parse(start_date_raw) + if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): + sd_parsed_iso: str = sdt_parsed.isoformat() + course_itemloader.add_value("course_availability_from", + sd_parsed_iso) + if "endDate" in imoox_attributes: + end_dates: list[str] = imoox_attributes["endDate"] + if end_dates and isinstance(end_dates, list): + for end_date_raw in end_dates: + if end_date_raw and isinstance(end_date_raw, str): + edt_parsed: datetime = dateparser.parse(end_date_raw) + if edt_parsed and isinstance(edt_parsed, datetime.datetime): + ed_parsed_iso: str = edt_parsed.isoformat() + course_itemloader.add_value("course_availability_until", + ed_parsed_iso) + if "trailer" in imoox_attributes: + # example data (as of 2024-05-27) + # "trailer": { + # "contentUrl": "https://www.youtube.com/watch?v=DljC8FPpE1s", + # "type": "VideoObject", + # "license": [ + # { + # "identifier": "CC-BY-SA-4.0", + # "url": "https://creativecommons.org/licenses/by-sa/4.0/" + # } + # ] + # }, + if "contentUrl" in imoox_attributes["trailer"]: + imoox_course_trailer_url: str = imoox_attributes["trailer"]["contentUrl"] + if imoox_course_trailer_url and isinstance(imoox_course_trailer_url, str): + course_itemloader.add_value("course_url_video", + imoox_course_trailer_url) + if "duration" in imoox_attributes and "workload" in imoox_attributes: + # ToDo: "duration" and "workload" can currently only be saved as a (coupled) + # value since the destination is "cclom:typicallearningtime" for both fields + # which expects a (total) duration in milliseconds + + # iMoox provides "duration" as ISO-8601 formatted duration (period) strings. + # Typical "duration" values (as of 2024-05-27): "P7W", "P12W" etc. + # see MOOCHub v3 Schema: + # https://github.com/MOOChub/schema/blob/main/moochub-schema.json#L907-L912 + amount_of_weeks: int | None = None + duration_in_weeks_raw: str = imoox_attributes["duration"] + if duration_in_weeks_raw and isinstance(duration_in_weeks_raw, str): + duration_pattern: re.Pattern = re.compile( + r"""^P(?P\d+)W$""") + duration_result: re.Match | None = duration_pattern.search( + duration_in_weeks_raw) + if duration_result: + dura_dict: dict = duration_result.groupdict() + if "amount_of_weeks" in dura_dict: + amount_of_weeks = dura_dict["amount_of_weeks"] + # convert to Integer for further calculations + amount_of_weeks = int(amount_of_weeks) + # ATTENTION: iMoox uses a different structure for "workload"-objects than vhb! + # (due to different MOOCHub versions) + # example data (as of 2024-05-27): + # "workload": { + # "timeValue": 2, + # "timeUnit": "h/week" + # }, + # see MOOCHub v3 Schema - workload: + # (https://github.com/MOOChub/schema/blob/main/moochub-schema.json#L1634-L1662), + time_value: int | None = None + time_unit: str | None = None + if "timeUnit" in imoox_attributes["workload"]: + # "timeUnit" can be one of several values: + # "h/month", "h/week", "h/day" + time_unit: str = imoox_attributes["workload"]["timeUnit"] + if "timeValue" in imoox_attributes["workload"]: + time_value: int = imoox_attributes["workload"]["timeValue"] + if time_unit and time_value and amount_of_weeks: + # "iMoox" provides all their durations / workloads in a week-related way, + # while "cclom:typicallearningtime" expects ms. Therefore: + # 1) we extract the amount of weeks from "duration" + # 2) calculate: * = total duration in h + # 3) convert total duration from h to ms + if time_unit == "h/week": + total_duration_in_hours: int = amount_of_weeks * time_value + duration_delta = datetime.timedelta(hours=total_duration_in_hours) + if duration_delta: + total_duration_in_ms: int = int( + duration_delta.total_seconds() * 1000) + course_itemloader.add_value("course_duration", total_duration_in_ms) + self.logger.debug(f"BIRD: combined iMoox 'duration' " + f"( {duration_in_weeks_raw} ) and 'workload' " + f"( {time_value} {time_unit} ) to " + f"{total_duration_in_ms} ms.") + else: + # ToDo: convert "h/day" and "h/month" in a similar fashion + self.logger.warning(f"BIRD: iMoox provided a time unit {time_unit} " + f"which couldn't be handled. " + f"(Please update the crawler!)") + pass + base.add_value("course", course_itemloader.load_item()) + # --- iMoox hook ends here --- + # --- "vhb" metadata hook starts here: if query_parameter_provider_name and query_parameter_provider_name == "vhb": # Reminder: "VHB" (= "Virtuelle Hochschule Bayern") uses MOOCHub for their JSON export! # The following implementation is therefore MOOCHub-specific @@ -1313,11 +1461,12 @@ def parse(self, response=None, **kwargs): duration_pattern = re.compile( r"""(?P\d+)\s*(?P\w*)""" ) - # ToDo: refactor into "MOOCHub workload to BIRD course duration" method + # ToDo: refactor into + # "MOOCHub (v2?) workload to BIRD course_duration" method duration_match: re.Match | None = duration_pattern.search(vhb_workload) duration_delta: datetime.timedelta = datetime.timedelta() if duration_match: - duration_result = duration_match.groupdict() + duration_result: dict = duration_match.groupdict() if "duration_number" in duration_result: duration_number_raw: str = duration_result["duration_number"] duration_number: int = int(duration_number_raw) @@ -1371,6 +1520,7 @@ def parse(self, response=None, **kwargs): "course_duration", workload_in_ms ) base.add_value("course", course_itemloader.load_item()) + # --- BIRD HOOKS END HERE! # noinspection DuplicatedCode lom.add_value("general", general.load_item()) From 8e08e6c191e92a45dd7544f16449436214c894ba Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 28 May 2024 13:45:08 +0200 Subject: [PATCH 489/590] refactor: "iMoox" and "vhb" metadata enrichment - to increase the readability of the "parse()"-method, refactored the BIRD-related metadata hooks into their own methods - style: code formatting via black --- converter/spiders/oersi_spider.py | 652 +++++++++++++++--------------- 1 file changed, 330 insertions(+), 322 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 4ce48974..3af1e35d 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -42,7 +42,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.6" # last update: 2024-05-27 + version = "0.2.6" # last update: 2024-05-28 allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, @@ -201,8 +201,8 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req ) return None if ( - self.getId(response=None, elastic_item=elastic_item) is not None - and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None + self.getId(response=None, elastic_item=elastic_item) is not None + and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None ): if not self.hasChanged(None, elastic_item=elastic_item): return None @@ -282,8 +282,9 @@ def elastic_fetch_list_of_provider_names(self): f"{self.ELASTIC_PROVIDERS_TO_CRAWL}" ) if metadata_provider_count_total: - self.logger.info(f"Expecting {metadata_provider_count_total} ElasticSearch objects in " - f"total.") + self.logger.info( + f"Expecting {metadata_provider_count_total} ElasticSearch objects in " f"total." + ) except KeyError as ke: self.logger.error( f"Failed to retrieve 'buckets'-list of metadata providers from OERSI " @@ -347,7 +348,8 @@ def elastic_fetch_all_provider_pages(self): if provider_target_from_env: self.logger.info( f"Recognized OERSI_METADATA_PROVIDER .env setting. Limiting crawl to the following target(s): " - f"{provider_target_from_env}") + f"{provider_target_from_env}" + ) self.ELASTIC_PROVIDERS_TO_CRAWL = [provider_target_from_env] if ";" in provider_target_from_env: provider_list: list[str] = provider_target_from_env.split(";") @@ -414,7 +416,9 @@ def fetch_imoox_data(self): if imoox_response_dict and isinstance(imoox_response_dict, dict): if "data" in imoox_response_dict: imoox_course_items = imoox_response_dict["data"] - self.logger.info(f"BIRD: Successfully retrieved {len(imoox_course_items)} items from {imoox_response.url} .") + self.logger.info( + f"BIRD: Successfully retrieved {len(imoox_course_items)} items from {imoox_response.url} ." + ) self.imoox_json = copy.deepcopy(imoox_response_dict) else: self.logger.warning(f"BIRD: Failed to retrieve 'course'-data from 'iMoox' sourceOrganization.") @@ -507,12 +511,12 @@ def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: return changed def get_lifecycle_author( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - date_created: Optional[str] = None, - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + date_created: Optional[str] = None, + date_published: Optional[str] = None, ): """ If a "creator"-field is available in the OERSI API for a specific '_source'-item, creates an 'author'-specific @@ -580,11 +584,11 @@ def get_lifecycle_author( return authors def get_affiliation_and_save_to_lifecycle( - self, - affiliation_dict: dict, - lom_base_item_loader: LomBaseItemloader, - organization_fallback: set[str], - lifecycle_role: str, + self, + affiliation_dict: dict, + lom_base_item_loader: LomBaseItemloader, + organization_fallback: set[str], + lifecycle_role: str, ): """ Retrieves metadata from OERSI's "affiliation"-field (which is typically found within a "creator"- or @@ -648,11 +652,11 @@ def validate_academic_title_string(self, honorific_prefix: str) -> str: return honorific_prefix.strip() def get_lifecycle_contributor( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - author_list: Optional[list[str]] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + author_list: Optional[list[str]] = None, ): """ Collects metadata from the OERSI "contributor"-field and stores it within a LomLifecycleItemLoader. @@ -754,11 +758,11 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) def get_lifecycle_publisher( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organizations_from_publisher_fields: set[str], - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organizations_from_publisher_fields: set[str], + date_published: Optional[str] = None, ): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. Successfully @@ -794,7 +798,7 @@ def get_lifecycle_publisher( lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) def get_lifecycle_organization_from_source_organization_fallback( - self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] + self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] ): # ATTENTION: the "sourceOrganization"-field is not part of the AMB draft, therefore this method is currently # used a fallback, so we don't lose any useful metadata (even if that metadata is not part of the AMB spec). @@ -836,8 +840,7 @@ def get_lifecycle_organization_from_source_organization_fallback( lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) def get_lifecycle_publisher_from_source_organization( - self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, - previously_collected_publishers: set[str] + self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, previously_collected_publishers: set[str] ): source_organizations: list[dict] = elastic_item_source.get("sourceOrganization") for so in source_organizations: @@ -857,10 +860,9 @@ def get_lifecycle_publisher_from_source_organization( lifecycle_org.add_value("url", org_url) lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) - def lifecycle_determine_type_of_identifier_and_save_uri(self, - item_dictionary: dict, - lifecycle_item_loader: LomLifecycleItemloader - ): + def lifecycle_determine_type_of_identifier_and_save_uri( + self, item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader + ): """ OERSI's "creator"/"contributor"/"affiliation" items might contain an 'id'-field which (optionally) provides URI-identifiers that reference GND / ORCID / Wikidata / ROR. @@ -872,10 +874,10 @@ def lifecycle_determine_type_of_identifier_and_save_uri(self, # "creator.id" can be 'null', therefore we need to explicitly check its type before trying to parse it uri_string: str = item_dictionary.get("id") if ( - "orcid.org" in uri_string - or "/gnd/" in uri_string - or "wikidata.org" in uri_string - or "ror.org" in uri_string + "orcid.org" in uri_string + or "/gnd/" in uri_string + or "wikidata.org" in uri_string + or "ror.org" in uri_string ): if "/gnd/" in uri_string: lifecycle_item_loader.add_value("id_gnd", uri_string) @@ -911,6 +913,290 @@ def split_names_if_possible_and_add_to_lifecycle(name_string: str, lifecycle_ite elif name_string: lifecycle_item_loader.add_value("firstName", name_string) + def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: dict): + """ + Combines the retrieved metadata from OERSI's elastic_item with iMoox (MOOCHub v3) metadata + if the identifiers match. + """ + if self.imoox_json: + if "data" in self.imoox_json: + imoox_item_matched: dict | None = None + try: + imoox_items: list[dict] = self.imoox_json["data"] + for imoox_item in imoox_items: + imoox_course_url: str = imoox_item["attributes"]["url"] + if imoox_course_url and imoox_course_url == self.get_item_url(elastic_item): + self.logger.debug( + f"BIRD: Matched 'iMoox'-item {imoox_course_url} with OERSI " + f"ElasticSearch item {elastic_item['_id']} " + f"({elastic_item['_source']['id']})" + ) + imoox_item_matched = imoox_item + except KeyError as ke: + raise ke + if imoox_item_matched: + course_itemloader: CourseItemLoader = CourseItemLoader() + if "attributes" in imoox_item_matched: + imoox_attributes: dict = imoox_item_matched["attributes"] + # ToDo: MOOCHUb Spec v3 allows a list of (multiple, unique) date strings for + # - "startDate" + # - "endDate" + # -> "CourseItem" needs to be expanded to support multiple values for this field + # + # (this problem is theoretical in nature at the moment, + # since "iMoox" currently provides only 1 value per property, + # but this might change in the future!) + if "startDate" in imoox_attributes: + start_dates: list[str] = imoox_attributes["startDate"] + if start_dates and isinstance(start_dates, list): + for start_date_raw in start_dates: + if start_date_raw and isinstance(start_date_raw, str): + sdt_parsed: datetime = dateparser.parse(start_date_raw) + if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): + sd_parsed_iso: str = sdt_parsed.isoformat() + course_itemloader.add_value("course_availability_from", sd_parsed_iso) + if "endDate" in imoox_attributes: + end_dates: list[str] = imoox_attributes["endDate"] + if end_dates and isinstance(end_dates, list): + for end_date_raw in end_dates: + if end_date_raw and isinstance(end_date_raw, str): + edt_parsed: datetime = dateparser.parse(end_date_raw) + if edt_parsed and isinstance(edt_parsed, datetime.datetime): + ed_parsed_iso: str = edt_parsed.isoformat() + course_itemloader.add_value("course_availability_until", ed_parsed_iso) + if "trailer" in imoox_attributes: + # example data (as of 2024-05-27) + # "trailer": { + # "contentUrl": "https://www.youtube.com/watch?v=DljC8FPpE1s", + # "type": "VideoObject", + # "license": [ + # { + # "identifier": "CC-BY-SA-4.0", + # "url": "https://creativecommons.org/licenses/by-sa/4.0/" + # } + # ] + # }, + if "contentUrl" in imoox_attributes["trailer"]: + imoox_course_trailer_url: str = imoox_attributes["trailer"]["contentUrl"] + if imoox_course_trailer_url and isinstance(imoox_course_trailer_url, str): + course_itemloader.add_value("course_url_video", imoox_course_trailer_url) + if "duration" in imoox_attributes and "workload" in imoox_attributes: + # ToDo: "duration" and "workload" can currently only be saved as a (coupled) + # value since the destination is "cclom:typicallearningtime" for both fields + # which expects a (total) duration in milliseconds + + # iMoox provides "duration" as ISO-8601 formatted duration (period) strings. + # Typical "duration" values (as of 2024-05-27): "P7W", "P12W" etc. + # see MOOCHub v3 Schema: + # https://github.com/MOOChub/schema/blob/main/moochub-schema.json#L907-L912 + amount_of_weeks: int | None = None + duration_in_weeks_raw: str = imoox_attributes["duration"] + if duration_in_weeks_raw and isinstance(duration_in_weeks_raw, str): + duration_pattern: re.Pattern = re.compile(r"""^P(?P\d+)W$""") + duration_result: re.Match | None = duration_pattern.search(duration_in_weeks_raw) + if duration_result: + dura_dict: dict = duration_result.groupdict() + if "amount_of_weeks" in dura_dict: + amount_of_weeks = dura_dict["amount_of_weeks"] + # convert to Integer for further calculations + amount_of_weeks = int(amount_of_weeks) + # ATTENTION: iMoox uses a different structure for "workload"-objects than vhb! + # (due to different MOOCHub versions) + # example data (as of 2024-05-27): + # "workload": { + # "timeValue": 2, + # "timeUnit": "h/week" + # }, + # see MOOCHub v3 Schema - workload: + # (https://github.com/MOOChub/schema/blob/main/moochub-schema.json#L1634-L1662), + time_value: int | None = None + time_unit: str | None = None + if "timeUnit" in imoox_attributes["workload"]: + # "timeUnit" can be one of several values: + # "h/month", "h/week", "h/day" + time_unit: str = imoox_attributes["workload"]["timeUnit"] + if "timeValue" in imoox_attributes["workload"]: + time_value: int = imoox_attributes["workload"]["timeValue"] + if time_unit and time_value and amount_of_weeks: + # "iMoox" provides all their durations / workloads in a week-related way, + # while "cclom:typicallearningtime" expects ms. Therefore: + # 1) we extract the amount of weeks from "duration" + # 2) calculate: * = total duration in h + # 3) convert total duration from h to ms + if time_unit == "h/week": + total_duration_in_hours: int = amount_of_weeks * time_value + duration_delta = datetime.timedelta(hours=total_duration_in_hours) + if duration_delta: + total_duration_in_ms: int = int(duration_delta.total_seconds() * 1000) + course_itemloader.add_value("course_duration", total_duration_in_ms) + self.logger.debug( + f"BIRD: combined iMoox 'duration' " + f"( {duration_in_weeks_raw} ) and 'workload' " + f"( {time_value} {time_unit} ) to {total_duration_in_hours} h " + f"(-> {total_duration_in_ms} ms)." + ) + else: + # ToDo: convert "h/day" and "h/month" in a similar fashion + self.logger.warning( + f"BIRD: iMoox provided a time unit {time_unit} for 'workload' " + f"which couldn't be handled. " + f"(Please update the crawler!)" + ) + pass + base_itemloader.add_value("course", course_itemloader.load_item()) + + def enrich_vhb_metadata( + self, + base_itemloader: BaseItemLoader, + elastic_item: dict, + lom_general_itemloader: LomGeneralItemloader, + in_languages: list[str] | None, + ): + """ + Combines metadata from OERSI's elastic_item with MOOCHub v2.x metadata from the source (vhb) + if the identifiers match. + """ + # Reminder: "VHB" (= "Virtuelle Hochschule Bayern") uses MOOCHub for their JSON export! + # The following implementation is therefore MOOCHub-specific + # and NEEDS to be refactored into a separate class hook ASAP! + if self.vhb_oersi_json: + if "data" in self.vhb_oersi_json: + try: + vhb_items: list[dict] = self.vhb_oersi_json["data"] + vhb_item_matched: dict | None = None + for vhb_item in vhb_items: + # since the vhb_item has a different "id", the only way to match the OERSI item + # against the vhb item is by comparing their URLs: + vhb_course_url: str = vhb_item["attributes"]["url"] + if vhb_course_url and vhb_course_url == self.get_item_url(elastic_item): + self.logger.debug( + f"BIRD: Matched 'vhb'-item {vhb_course_url} with OERSI " + f"ElasticSearch item {elastic_item['_id']}" + ) + vhb_item_matched = vhb_item + except KeyError as ke: + raise ke + if vhb_item_matched: + # if we found a match, we're now trying to enrich the item with metadata from both + # sources + course_itemloader: CourseItemLoader = CourseItemLoader() + if "attributes" in vhb_item_matched: + if not in_languages and "languages" in vhb_item_matched["attributes"]: + # beware: the vhb 'languages'-property is a string value! + vhb_language: str | None = vhb_item_matched["attributes"]["languages"] + if vhb_language and isinstance(vhb_language, str): + lom_general_itemloader.add_value("language", vhb_language) + elif vhb_language: + self.logger.warning( + f"Received unexpected vhb 'languages'-type! " f"(Type: {type(vhb_language)}" + ) + if "abstract" in vhb_item_matched["attributes"]: + vhb_abstract: str = vhb_item_matched["attributes"]["abstract"] + if vhb_abstract and isinstance(vhb_abstract, str): + course_itemloader.add_value("course_description_short", vhb_abstract) + if "learningObjectives" in vhb_item_matched["attributes"]: + vhb_learning_objectives: str = vhb_item_matched["attributes"]["learningObjectives"] + if vhb_learning_objectives and isinstance(vhb_learning_objectives, str): + course_itemloader.add_value("course_learningoutcome", vhb_learning_objectives) + if "outline" in vhb_item_matched["attributes"]: + outline_raw: str = vhb_item_matched["attributes"]["outline"] + if outline_raw and isinstance(outline_raw, str): + # ToDo: vhb "outline" -> course_schedule -> "ccm:oeh_course_schedule" + # the vhb attribute "outline" describes a course's schedule (Kursablauf) + # IMPORTANT: "outline" is not part of MOOCHub v2.x nor 3.x! + course_itemloader.add_value("course_schedule", outline_raw) + else: + self.logger.warning( + f"Received vhb 'outline'-property of unexpected type: " f"{outline_raw}" + ) + if "startDate" in vhb_item_matched["attributes"]: + start_date_raw: str = vhb_item_matched["attributes"]["startDate"] + if start_date_raw and isinstance(start_date_raw, str): + # parsing the date string first to check its validity + sdt_parsed: datetime = dateparser.parse(start_date_raw) + if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): + # just to make sure that we don't parse bogus data, we run the string + # through the dateparser module first and convert it to iso 8601 + sd_parsed_iso: str = sdt_parsed.isoformat() + course_itemloader.add_value("course_availability_from", sd_parsed_iso) + else: + self.logger.warning( + f"Could not parse vhb 'start_date' value {start_date_raw} " + f"to datetime. (Please check for new edge-cases " + f"and update the crawler!)" + ) + if "video" in vhb_item_matched["attributes"]: + video_item: dict = vhb_item_matched["attributes"]["video"] + if video_item: + if "url" in video_item: + vhb_course_video_url: str = video_item["url"] + if vhb_course_video_url: + course_itemloader.add_value("course_url_video", vhb_course_video_url) + # ToDo: "video.licenses" is of type list[dict] + # each "license"-dict can have an "id"- and "url"-property + if "workload" in vhb_item_matched["attributes"]: + vhb_workload_raw: str = vhb_item_matched["attributes"]["workload"] + if vhb_workload_raw and isinstance(vhb_workload_raw, str): + # vhb "workload"-values are described as a natural lange (German) + # " "-string, e.g.: "5 Stunden" or "60 Stunden". + # Since edu-sharing expects seconds in "cclom:typicallearningtime", + # we need to parse the string and convert it to seconds. + vhb_workload: str = vhb_workload_raw.strip() + duration_pattern = re.compile(r"""(?P\d+)\s*(?P\w*)""") + # ToDo: refactor into + # "MOOCHub (v2?) workload to BIRD course_duration" method + duration_match: re.Match | None = duration_pattern.search(vhb_workload) + duration_delta: datetime.timedelta = datetime.timedelta() + if duration_match: + duration_result: dict = duration_match.groupdict() + if "duration_number" in duration_result: + duration_number_raw: str = duration_result["duration_number"] + duration_number: int = int(duration_number_raw) + if "duration_unit" in duration_result: + duration_unit: str = duration_result["duration_unit"] + duration_unit = duration_unit.lower() + match duration_unit: + case "sekunden": + duration_delta = duration_delta + datetime.timedelta( + seconds=duration_number + ) + case "minuten": + duration_delta = duration_delta + datetime.timedelta( + minutes=duration_number + ) + case "stunden": + duration_delta = duration_delta + datetime.timedelta( + hours=duration_number + ) + case "tage": + duration_delta = duration_delta + datetime.timedelta( + days=duration_number + ) + case "wochen": + duration_delta = duration_delta + datetime.timedelta( + weeks=duration_number + ) + case "monate": + # timedelta has no parameter for months + # -> X months = X * (4 weeks) + duration_delta = duration_delta + ( + duration_number * datetime.timedelta(weeks=4) + ) + case _: + self.logger.warning( + f"Failed to parse 'workload' time unit" + f"from vhb course: " + f"{vhb_item_matched}" + ) + if duration_delta: + workload_in_seconds: int = int(duration_delta.total_seconds()) + if workload_in_seconds: + # the edu-sharing property 'cclom:typicallearningtime' + # expects values in ms: + workload_in_ms: int = workload_in_seconds * 1000 + course_itemloader.add_value("course_duration", workload_in_ms) + base_itemloader.add_value("course", course_itemloader.load_item()) + def parse(self, response=None, **kwargs): elastic_item: dict = kwargs.get("elastic_item") elastic_item_source: dict = elastic_item.get("_source") @@ -1237,289 +1523,11 @@ def parse(self, response=None, **kwargs): if elastic_item["OERSI_QUERY_PROVIDER_NAME"]: # checking if the "metadata provider name" that was used for the ElasticSearch query needs to be handled query_parameter_provider_name: str = elastic_item["OERSI_QUERY_PROVIDER_NAME"] - # --- "iMoox" metadata hook starts here: - if query_parameter_provider_name and query_parameter_provider_name == "iMoox": - if self.imoox_json: - if "data" in self.imoox_json: - imoox_item_matched: dict | None = None - try: - imoox_items: list[dict] = self.imoox_json["data"] - for imoox_item in imoox_items: - imoox_course_url: str = imoox_item["attributes"]["url"] - if imoox_course_url and imoox_course_url == identifier_url: - self.logger.debug(f"BIRD: Matched 'iMoox'-item {imoox_course_url} with OERSI " - f"ElasticSearch item {elastic_item['_id']} " - f"({elastic_item_source['id']})") - imoox_item_matched = imoox_item - except KeyError as ke: - raise ke - if imoox_item_matched: - course_itemloader: CourseItemLoader = CourseItemLoader() - if "attributes" in imoox_item_matched: - imoox_attributes: dict = imoox_item_matched["attributes"] - # ToDo: MOOCHUb Spec v3 allows a list of (multiple, unique) date strings for - # - "startDate" - # - "endDate" - # -> "CourseItem" needs to be expanded to support multiple values for this field - # (this problem is theoretical in nature at the moment, - # since "iMoox" currently provides only 1 value per property, - # but this might change in the future!) - if "startDate" in imoox_attributes: - start_dates: list[str] = imoox_attributes["startDate"] - if start_dates and isinstance(start_dates, list): - for start_date_raw in start_dates: - if start_date_raw and isinstance(start_date_raw, str): - sdt_parsed: datetime = dateparser.parse(start_date_raw) - if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): - sd_parsed_iso: str = sdt_parsed.isoformat() - course_itemloader.add_value("course_availability_from", - sd_parsed_iso) - if "endDate" in imoox_attributes: - end_dates: list[str] = imoox_attributes["endDate"] - if end_dates and isinstance(end_dates, list): - for end_date_raw in end_dates: - if end_date_raw and isinstance(end_date_raw, str): - edt_parsed: datetime = dateparser.parse(end_date_raw) - if edt_parsed and isinstance(edt_parsed, datetime.datetime): - ed_parsed_iso: str = edt_parsed.isoformat() - course_itemloader.add_value("course_availability_until", - ed_parsed_iso) - if "trailer" in imoox_attributes: - # example data (as of 2024-05-27) - # "trailer": { - # "contentUrl": "https://www.youtube.com/watch?v=DljC8FPpE1s", - # "type": "VideoObject", - # "license": [ - # { - # "identifier": "CC-BY-SA-4.0", - # "url": "https://creativecommons.org/licenses/by-sa/4.0/" - # } - # ] - # }, - if "contentUrl" in imoox_attributes["trailer"]: - imoox_course_trailer_url: str = imoox_attributes["trailer"]["contentUrl"] - if imoox_course_trailer_url and isinstance(imoox_course_trailer_url, str): - course_itemloader.add_value("course_url_video", - imoox_course_trailer_url) - if "duration" in imoox_attributes and "workload" in imoox_attributes: - # ToDo: "duration" and "workload" can currently only be saved as a (coupled) - # value since the destination is "cclom:typicallearningtime" for both fields - # which expects a (total) duration in milliseconds - - # iMoox provides "duration" as ISO-8601 formatted duration (period) strings. - # Typical "duration" values (as of 2024-05-27): "P7W", "P12W" etc. - # see MOOCHub v3 Schema: - # https://github.com/MOOChub/schema/blob/main/moochub-schema.json#L907-L912 - amount_of_weeks: int | None = None - duration_in_weeks_raw: str = imoox_attributes["duration"] - if duration_in_weeks_raw and isinstance(duration_in_weeks_raw, str): - duration_pattern: re.Pattern = re.compile( - r"""^P(?P\d+)W$""") - duration_result: re.Match | None = duration_pattern.search( - duration_in_weeks_raw) - if duration_result: - dura_dict: dict = duration_result.groupdict() - if "amount_of_weeks" in dura_dict: - amount_of_weeks = dura_dict["amount_of_weeks"] - # convert to Integer for further calculations - amount_of_weeks = int(amount_of_weeks) - # ATTENTION: iMoox uses a different structure for "workload"-objects than vhb! - # (due to different MOOCHub versions) - # example data (as of 2024-05-27): - # "workload": { - # "timeValue": 2, - # "timeUnit": "h/week" - # }, - # see MOOCHub v3 Schema - workload: - # (https://github.com/MOOChub/schema/blob/main/moochub-schema.json#L1634-L1662), - time_value: int | None = None - time_unit: str | None = None - if "timeUnit" in imoox_attributes["workload"]: - # "timeUnit" can be one of several values: - # "h/month", "h/week", "h/day" - time_unit: str = imoox_attributes["workload"]["timeUnit"] - if "timeValue" in imoox_attributes["workload"]: - time_value: int = imoox_attributes["workload"]["timeValue"] - if time_unit and time_value and amount_of_weeks: - # "iMoox" provides all their durations / workloads in a week-related way, - # while "cclom:typicallearningtime" expects ms. Therefore: - # 1) we extract the amount of weeks from "duration" - # 2) calculate: * = total duration in h - # 3) convert total duration from h to ms - if time_unit == "h/week": - total_duration_in_hours: int = amount_of_weeks * time_value - duration_delta = datetime.timedelta(hours=total_duration_in_hours) - if duration_delta: - total_duration_in_ms: int = int( - duration_delta.total_seconds() * 1000) - course_itemloader.add_value("course_duration", total_duration_in_ms) - self.logger.debug(f"BIRD: combined iMoox 'duration' " - f"( {duration_in_weeks_raw} ) and 'workload' " - f"( {time_value} {time_unit} ) to " - f"{total_duration_in_ms} ms.") - else: - # ToDo: convert "h/day" and "h/month" in a similar fashion - self.logger.warning(f"BIRD: iMoox provided a time unit {time_unit} " - f"which couldn't be handled. " - f"(Please update the crawler!)") - pass - base.add_value("course", course_itemloader.load_item()) - # --- iMoox hook ends here --- - # --- "vhb" metadata hook starts here: - if query_parameter_provider_name and query_parameter_provider_name == "vhb": - # Reminder: "VHB" (= "Virtuelle Hochschule Bayern") uses MOOCHub for their JSON export! - # The following implementation is therefore MOOCHub-specific - # and NEEDS to be refactored into a separate class hook ASAP! - if self.vhb_oersi_json: - if "data" in self.vhb_oersi_json: - try: - vhb_items: list[dict] = self.vhb_oersi_json["data"] - vhb_item_matched: dict | None = None - for vhb_item in vhb_items: - # since the vhb_item has a different "id", the only way to match the OERSI item - # against the vhb item is by comparing their URLs: - vhb_course_url: str = vhb_item["attributes"]["url"] - if vhb_course_url and vhb_course_url == identifier_url: - self.logger.debug( - f"BIRD: Matched 'vhb'-item {vhb_course_url} with OERSI " - f"ElasticSearch item {elastic_item['_id']}" - ) - vhb_item_matched = vhb_item - except KeyError as ke: - raise ke - if vhb_item_matched: - # if we found a match, we're now trying to enrich the item with metadata from both - # sources - course_itemloader: CourseItemLoader = CourseItemLoader() - if "attributes" in vhb_item_matched: - if not in_languages and "languages" in vhb_item_matched["attributes"]: - # beware: the vhb 'languages'-property is a string value! - vhb_language: str | None = vhb_item_matched["attributes"]["languages"] - if vhb_language and isinstance(vhb_language, str): - general.add_value("language", vhb_language) - elif vhb_language: - self.logger.warning( - f"Received unexpected vhb 'languages'-type! " - f"(Type: {type(vhb_language)}" - ) - if "abstract" in vhb_item_matched["attributes"]: - vhb_abstract: str = vhb_item_matched["attributes"]["abstract"] - if vhb_abstract and isinstance(vhb_abstract, str): - course_itemloader.add_value("course_description_short", vhb_abstract) - if "learningObjectives" in vhb_item_matched["attributes"]: - vhb_learning_objectives: str = vhb_item_matched["attributes"][ - "learningObjectives" - ] - if vhb_learning_objectives and isinstance(vhb_learning_objectives, str): - course_itemloader.add_value( - "course_learningoutcome", vhb_learning_objectives - ) - if "outline" in vhb_item_matched["attributes"]: - outline_raw: str = vhb_item_matched["attributes"]["outline"] - if outline_raw and isinstance(outline_raw, str): - # ToDo: vhb "outline" -> course_schedule -> "ccm:oeh_course_schedule" - # the vhb attribute "outline" describes a course's schedule (Kursablauf) - # IMPORTANT: "outline" is not part of MOOCHub v2.x nor 3.x! - course_itemloader.add_value("course_schedule", outline_raw) - else: - self.logger.warning(f"Received vhb 'outline'-property of unexpected type: " - f"{outline_raw}") - if "startDate" in vhb_item_matched["attributes"]: - start_date_raw: str = vhb_item_matched["attributes"]["startDate"] - if start_date_raw and isinstance(start_date_raw, str): - # parsing the date string first to check its validity - sdt_parsed: datetime = dateparser.parse(start_date_raw) - if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): - # just to make sure that we don't parse bogus data, we run the string - # through the dateparser module first and convert it to iso 8601 - sd_parsed_iso: str = sdt_parsed.isoformat() - course_itemloader.add_value("course_availability_from", sd_parsed_iso) - else: - self.logger.warning( - f"Could not parse vhb 'start_date' value {start_date_raw} " - f"to datetime. (Please check for new edge-cases " - f"and update the crawler!)") - if "video" in vhb_item_matched["attributes"]: - video_item: dict = vhb_item_matched["attributes"]["video"] - if video_item: - if "url" in video_item: - vhb_course_video_url: str = video_item["url"] - if vhb_course_video_url: - course_itemloader.add_value( - "course_url_video", vhb_course_video_url - ) - # ToDo: "video.licenses" is of type list[dict] - # each "license"-dict can have an "id"- and "url"-property - if "workload" in vhb_item_matched["attributes"]: - vhb_workload_raw: str = vhb_item_matched["attributes"]["workload"] - if vhb_workload_raw and isinstance(vhb_workload_raw, str): - # vhb "workload"-values are described as a natural lange (German) - # " "-string, e.g.: "5 Stunden" or "60 Stunden". - # Since edu-sharing expects seconds in "cclom:typicallearningtime", - # we need to parse the string and convert it to seconds. - vhb_workload: str = vhb_workload_raw.strip() - duration_pattern = re.compile( - r"""(?P\d+)\s*(?P\w*)""" - ) - # ToDo: refactor into - # "MOOCHub (v2?) workload to BIRD course_duration" method - duration_match: re.Match | None = duration_pattern.search(vhb_workload) - duration_delta: datetime.timedelta = datetime.timedelta() - if duration_match: - duration_result: dict = duration_match.groupdict() - if "duration_number" in duration_result: - duration_number_raw: str = duration_result["duration_number"] - duration_number: int = int(duration_number_raw) - if "duration_unit" in duration_result: - duration_unit: str = duration_result["duration_unit"] - duration_unit = duration_unit.lower() - match duration_unit: - case "sekunden": - duration_delta = duration_delta + datetime.timedelta( - seconds=duration_number - ) - case "minuten": - duration_delta = duration_delta + datetime.timedelta( - minutes=duration_number - ) - case "stunden": - duration_delta = duration_delta + datetime.timedelta( - hours=duration_number - ) - case "tage": - duration_delta = duration_delta + datetime.timedelta( - days=duration_number - ) - case "wochen": - duration_delta = duration_delta + datetime.timedelta( - weeks=duration_number - ) - case "monate": - # timedelta has no parameter for months - # -> X months = X * (4 weeks) - duration_delta = duration_delta + ( - duration_number * datetime.timedelta(weeks=4) - ) - case _: - self.logger.warning( - f"Failed to parse 'workload' time unit" - f"from vhb course: " - f"{vhb_item_matched}" - ) - if duration_delta: - workload_in_seconds: int = int( - duration_delta.total_seconds() - ) - if workload_in_seconds: - # the edu-sharing property 'cclom:typicallearningtime' - # expects values in ms: - workload_in_ms: int = ( - workload_in_seconds * 1000 - ) - course_itemloader.add_value( - "course_duration", workload_in_ms - ) - base.add_value("course", course_itemloader.load_item()) + if query_parameter_provider_name: + if query_parameter_provider_name == "iMoox": + self.enrich_imoox_metadata(base, elastic_item) + if query_parameter_provider_name == "vhb": + self.enrich_vhb_metadata(base, elastic_item, general, in_languages) # --- BIRD HOOKS END HERE! # noinspection DuplicatedCode From 3bc450c087f2821bb63f5b6a15b1a3aac2052a45 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 28 Jun 2024 12:02:34 +0200 Subject: [PATCH 490/590] chore: update "scrapy"-related dependencies (security bugfixes) Attention: Scrapy's "allowed_domains" custom setting is stricter than previous versions! - see: https://docs.scrapy.org/en/latest/news.html#scrapy-2-11-2-2024-05-14 --- poetry.lock | 992 ++++++++++++++++++++++++++--------------------- pyproject.toml | 8 +- requirements.txt | 84 ++-- 3 files changed, 597 insertions(+), 487 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0996328b..67144a11 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "anyio" -version = "4.2.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -24,13 +24,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "asgiref" -version = "3.7.2" +version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"}, - {file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"}, + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, ] [package.dependencies] @@ -383,17 +383,17 @@ files = [ [[package]] name = "courlan" -version = "0.9.5" +version = "1.2.0" description = "Clean, filter and sample URLs to optimize data collection – includes spam, content type and language filters." optional = false python-versions = ">=3.6" files = [ - {file = "courlan-0.9.5-py3-none-any.whl", hash = "sha256:3c10fb06a26422b5c5e6f5f6d2c16e5d4308026f9dcea783ca6a88dae5922ee5"}, - {file = "courlan-0.9.5.tar.gz", hash = "sha256:38dc35b2e3bf1f5d516d00d51ac12ebde543e3417c6be6f6a2273c0fc5b5b353"}, + {file = "courlan-1.2.0-py3-none-any.whl", hash = "sha256:df9d3735b611e717c52a813a49d17a8b4d3a9d8b87bbace9065171fc5d084397"}, + {file = "courlan-1.2.0.tar.gz", hash = "sha256:0cbc9cac83970c651b937a7823a5d92cbebb6b601454ea0fb6cb4d0ee5d1845d"}, ] [package.dependencies] -langcodes = ">=3.3.0" +babel = ">=2.11.0" tld = {version = ">=0.13", markers = "python_version >= \"3.7\""} urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} @@ -475,15 +475,26 @@ calendars = ["convertdate", "hijri-converter"] fasttext = ["fasttext"] langdetect = ["langdetect"] +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + [[package]] name = "django" -version = "5.0.1" +version = "5.0.6" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ - {file = "Django-5.0.1-py3-none-any.whl", hash = "sha256:f47a37a90b9bbe2c8ec360235192c7fddfdc832206fcf618bb849b39256affc1"}, - {file = "Django-5.0.1.tar.gz", hash = "sha256:8c8659665bc6e3a44fefe1ab0a291e5a3fb3979f9a8230be29de975e57e8f854"}, + {file = "Django-5.0.6-py3-none-any.whl", hash = "sha256:8363ac062bb4ef7c3f12d078f6fa5d154031d129a15170a1066412af49d30905"}, + {file = "Django-5.0.6.tar.gz", hash = "sha256:ff1b61005004e476e0aeea47c7f79b85864c70124030e95146315396f1e7951f"}, ] [package.dependencies] @@ -497,13 +508,13 @@ bcrypt = ["bcrypt"] [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -511,23 +522,23 @@ test = ["pytest (>=6)"] [[package]] name = "extruct" -version = "0.16.0" +version = "0.17.0" description = "Extract embedded metadata from HTML markup" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "extruct-0.16.0-py2.py3-none-any.whl", hash = "sha256:2499ea9e7d22744745ca708acee9542a4aa231871620c4f65f869a1286e64aa8"}, - {file = "extruct-0.16.0.tar.gz", hash = "sha256:d09cb3d86d149a276b277b3bd45b2b867ef3ec78bed9cd58ee0f2ae01ae670c4"}, + {file = "extruct-0.17.0-py2.py3-none-any.whl", hash = "sha256:5f1d8e307fbb0c41f64ce486ddfaf16dc67e4b8f6e9570c57b123409ee37a307"}, + {file = "extruct-0.17.0.tar.gz", hash = "sha256:a94c0be5b5fd95a8370204ecc02687bd27845d536055d8d1c69a0a30da0420c7"}, ] [package.dependencies] html-text = ">=0.5.1" jstyleson = "*" lxml = "*" +lxml-html-clean = "*" mf2py = "*" pyrdfa3 = "*" -rdflib = {version = ">=6.0.0", markers = "python_version >= \"3.7\""} -six = "*" +rdflib = ">=6.0.0" w3lib = "*" [package.extras] @@ -535,18 +546,18 @@ cli = ["requests"] [[package]] name = "filelock" -version = "3.13.1" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -649,17 +660,18 @@ files = [ [[package]] name = "html-text" -version = "0.5.2" +version = "0.6.2" description = "Extract text from HTML" optional = false python-versions = "*" files = [ - {file = "html_text-0.5.2-py2.py3-none-any.whl", hash = "sha256:3f1e063f05eddf3e099a88f0440219c55fdc01c44f1291fe59c66e5228d7fc56"}, - {file = "html_text-0.5.2.tar.gz", hash = "sha256:afd61bbb70651d494a8c32670a29b9140492eccc9690109857beae41c3093ded"}, + {file = "html_text-0.6.2-py2.py3-none-any.whl", hash = "sha256:d83d619ccd4b4d6172e21084d8a46e29e49ce87a08cc02161e7ca8c2918e7bca"}, + {file = "html_text-0.6.2.tar.gz", hash = "sha256:81455b4de5430cf63ce7c45a870fb8629e79ca8518e240f172d62409c2f2ff72"}, ] [package.dependencies] lxml = "*" +lxml-html-clean = "*" [[package]] name = "html2text" @@ -695,13 +707,13 @@ lxml = ["lxml"] [[package]] name = "htmldate" -version = "1.7.0" +version = "1.8.1" description = "Fast and robust extraction of original and updated publication dates from URLs and web pages." optional = false python-versions = ">=3.6" files = [ - {file = "htmldate-1.7.0-py3-none-any.whl", hash = "sha256:d82265ac19571b78985d53585b63917d2d2f2c6b96fc9b5cd1928f2777636832"}, - {file = "htmldate-1.7.0.tar.gz", hash = "sha256:02a800dd224cbf74bf483b042f64e14f57ba0e40c6b4404b284e98bc6c30b68d"}, + {file = "htmldate-1.8.1-py3-none-any.whl", hash = "sha256:b1209dedfa7bc9bb4d0b812a3f0983ea5d39f1bdfe21745659ad26af4f8b7f32"}, + {file = "htmldate-1.8.1.tar.gz", hash = "sha256:caf1686cf75c61dd1f061ede5d7a46e759b15d5f9987cd8e13c8c4237511263d"}, ] [package.dependencies] @@ -717,13 +729,13 @@ speed = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urlli [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -734,7 +746,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.23.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" @@ -776,13 +788,13 @@ idna = ">=2.5" [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -842,24 +854,24 @@ six = "*" [[package]] name = "itemadapter" -version = "0.8.0" +version = "0.9.0" description = "Common interface for data container classes" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "itemadapter-0.8.0-py3-none-any.whl", hash = "sha256:2ac1fbcc363b789a18639935ca322e50a65a0a7dfdd8d973c34e2c468e6c0f94"}, - {file = "itemadapter-0.8.0.tar.gz", hash = "sha256:77758485fb0ac10730d4b131363e37d65cb8db2450bfec7a57c3f3271f4a48a9"}, + {file = "itemadapter-0.9.0-py3-none-any.whl", hash = "sha256:cfd108c9d5205d056fcac402ec8f8e9d799ce9066911eec1cd521ea442f87af1"}, + {file = "itemadapter-0.9.0.tar.gz", hash = "sha256:e4f958a6b6b6f5831fa207373010031a0bd7ed0429ddd09b51979c011475cafd"}, ] [[package]] name = "itemloaders" -version = "1.1.0" +version = "1.3.1" description = "Base library for scrapy's ItemLoader" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "itemloaders-1.1.0-py3-none-any.whl", hash = "sha256:c8c82fe0c11fc4cdd08ec04df0b3c43f3cb7190002edb517e02d55de8efc2aeb"}, - {file = "itemloaders-1.1.0.tar.gz", hash = "sha256:21d81c61da6a08b48e5996288cdf3031c0f92e5d0075920a0242527523e14a48"}, + {file = "itemloaders-1.3.1-py3-none-any.whl", hash = "sha256:70be155cd050b8c532e1054f0241dcc4711bd15e62c0a0174963d1c110d9f0fa"}, + {file = "itemloaders-1.3.1.tar.gz", hash = "sha256:81571c941cc189bb55e211f0cd3476fde7511239d3bf7ff91eb6ed68a1b0ec10"}, ] [package.dependencies] @@ -891,212 +903,324 @@ files = [ [[package]] name = "justext" -version = "3.0.0" +version = "3.0.1" description = "Heuristic based boilerplate removal tool" optional = false python-versions = "*" files = [ - {file = "jusText-3.0.0-py2.py3-none-any.whl", hash = "sha256:86b48f5b1d99505acd072f5831def6cd3f1306043651c524a1c609e62e3544e4"}, - {file = "jusText-3.0.0.tar.gz", hash = "sha256:7640e248218795f6be65f6c35fe697325a3280fcb4675d1525bcdff2b86faadf"}, + {file = "jusText-3.0.1-py2.py3-none-any.whl", hash = "sha256:e0fb882dd7285415709f4b7466aed23d6b98b7b89404c36e8a2e730facfed02b"}, + {file = "justext-3.0.1.tar.gz", hash = "sha256:b6ed2fb6c5d21618e2e34b2295c4edfc0bcece3bd549ed5c8ef5a8d20f0b3451"}, ] [package.dependencies] -lxml = ">=4.4.2" +lxml = {version = ">=4.4.2", extras = ["html-clean"]} [[package]] name = "langcodes" -version = "3.3.0" +version = "3.4.0" description = "Tools for labeling human languages with IETF language tags" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "langcodes-3.3.0-py3-none-any.whl", hash = "sha256:4d89fc9acb6e9c8fdef70bcdf376113a3db09b67285d9e1d534de6d8818e7e69"}, - {file = "langcodes-3.3.0.tar.gz", hash = "sha256:794d07d5a28781231ac335a1561b8442f8648ca07cd518310aeb45d6f0807ef6"}, + {file = "langcodes-3.4.0-py3-none-any.whl", hash = "sha256:10a4cc078b8e8937d8485d3352312a0a89a3125190db9f2bb2074250eef654e9"}, + {file = "langcodes-3.4.0.tar.gz", hash = "sha256:ae5a77d1a01d0d1e91854a671890892b7ce9abb601ab7327fc5c874f899e1979"}, ] [package.dependencies] -language-data = {version = ">=1.1,<2.0", optional = true, markers = "extra == \"data\""} +language-data = ">=1.2" [package.extras] -data = ["language-data (>=1.1,<2.0)"] +build = ["build", "twine"] +test = ["pytest", "pytest-cov"] [[package]] name = "language-data" -version = "1.1" +version = "1.2.0" description = "Supplementary data about languages used by the langcodes module" optional = false -python-versions = ">=3.6" +python-versions = "*" files = [ - {file = "language_data-1.1-py3-none-any.whl", hash = "sha256:f7ba86fafe099ef213ef597eda483d5227b12446604a61f617122d6c925847d5"}, - {file = "language_data-1.1.tar.gz", hash = "sha256:c1f5283c46bba68befa37505857a3f672497aba0c522b37d99367e911232455b"}, + {file = "language_data-1.2.0-py3-none-any.whl", hash = "sha256:77d5cab917f91ee0b2f1aa7018443e911cf8985ef734ca2ba3940770f6a3816b"}, + {file = "language_data-1.2.0.tar.gz", hash = "sha256:82a86050bbd677bfde87d97885b17566cfe75dad3ac4f5ce44b52c28f752e773"}, ] [package.dependencies] -marisa-trie = ">=0.7.7,<0.8.0" +marisa-trie = ">=0.7.7" + +[package.extras] +build = ["build", "twine"] +test = ["pytest", "pytest-cov"] [[package]] name = "lxml" -version = "5.1.0" +version = "5.2.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:704f5572ff473a5f897745abebc6df40f22d4133c1e0a1f124e4f2bd3330ff7e"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9d3c0f8567ffe7502d969c2c1b809892dc793b5d0665f602aad19895f8d508da"}, - {file = "lxml-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5fcfbebdb0c5d8d18b84118842f31965d59ee3e66996ac842e21f957eb76138c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f37c6d7106a9d6f0708d4e164b707037b7380fcd0b04c5bd9cae1fb46a856fb"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2befa20a13f1a75c751f47e00929fb3433d67eb9923c2c0b364de449121f447c"}, - {file = "lxml-5.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22b7ee4c35f374e2c20337a95502057964d7e35b996b1c667b5c65c567d2252a"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf8443781533b8d37b295016a4b53c1494fa9a03573c09ca5104550c138d5c05"}, - {file = "lxml-5.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82bddf0e72cb2af3cbba7cec1d2fd11fda0de6be8f4492223d4a268713ef2147"}, - {file = "lxml-5.1.0-cp310-cp310-win32.whl", hash = "sha256:b66aa6357b265670bb574f050ffceefb98549c721cf28351b748be1ef9577d93"}, - {file = "lxml-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4946e7f59b7b6a9e27bef34422f645e9a368cb2be11bf1ef3cafc39a1f6ba68d"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:14deca1460b4b0f6b01f1ddc9557704e8b365f55c63070463f6c18619ebf964f"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed8c3d2cd329bf779b7ed38db176738f3f8be637bb395ce9629fc76f78afe3d4"}, - {file = "lxml-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:436a943c2900bb98123b06437cdd30580a61340fbdb7b28aaf345a459c19046a"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acb6b2f96f60f70e7f34efe0c3ea34ca63f19ca63ce90019c6cbca6b676e81fa"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af8920ce4a55ff41167ddbc20077f5698c2e710ad3353d32a07d3264f3a2021e"}, - {file = "lxml-5.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cfced4a069003d8913408e10ca8ed092c49a7f6cefee9bb74b6b3e860683b45"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9e5ac3437746189a9b4121db2a7b86056ac8786b12e88838696899328fc44bb2"}, - {file = "lxml-5.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4c9bda132ad108b387c33fabfea47866af87f4ea6ffb79418004f0521e63204"}, - {file = "lxml-5.1.0-cp311-cp311-win32.whl", hash = "sha256:bc64d1b1dab08f679fb89c368f4c05693f58a9faf744c4d390d7ed1d8223869b"}, - {file = "lxml-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5ab722ae5a873d8dcee1f5f45ddd93c34210aed44ff2dc643b5025981908cda"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9aa543980ab1fbf1720969af1d99095a548ea42e00361e727c58a40832439114"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6f11b77ec0979f7e4dc5ae081325a2946f1fe424148d3945f943ceaede98adb8"}, - {file = "lxml-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a36c506e5f8aeb40680491d39ed94670487ce6614b9d27cabe45d94cd5d63e1e"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f643ffd2669ffd4b5a3e9b41c909b72b2a1d5e4915da90a77e119b8d48ce867a"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16dd953fb719f0ffc5bc067428fc9e88f599e15723a85618c45847c96f11f431"}, - {file = "lxml-5.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16018f7099245157564d7148165132c70adb272fb5a17c048ba70d9cc542a1a1"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82cd34f1081ae4ea2ede3d52f71b7be313756e99b4b5f829f89b12da552d3aa3"}, - {file = "lxml-5.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19a1bc898ae9f06bccb7c3e1dfd73897ecbbd2c96afe9095a6026016e5ca97b8"}, - {file = "lxml-5.1.0-cp312-cp312-win32.whl", hash = "sha256:13521a321a25c641b9ea127ef478b580b5ec82aa2e9fc076c86169d161798b01"}, - {file = "lxml-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ad17c20e3666c035db502c78b86e58ff6b5991906e55bdbef94977700c72623"}, - {file = "lxml-5.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:24ef5a4631c0b6cceaf2dbca21687e29725b7c4e171f33a8f8ce23c12558ded1"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d2900b7f5318bc7ad8631d3d40190b95ef2aa8cc59473b73b294e4a55e9f30f"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:601f4a75797d7a770daed8b42b97cd1bb1ba18bd51a9382077a6a247a12aa38d"}, - {file = "lxml-5.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4b68c961b5cc402cbd99cca5eb2547e46ce77260eb705f4d117fd9c3f932b95"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:afd825e30f8d1f521713a5669b63657bcfe5980a916c95855060048b88e1adb7"}, - {file = "lxml-5.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:262bc5f512a66b527d026518507e78c2f9c2bd9eb5c8aeeb9f0eb43fcb69dc67"}, - {file = "lxml-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:e856c1c7255c739434489ec9c8aa9cdf5179785d10ff20add308b5d673bed5cd"}, - {file = "lxml-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c7257171bb8d4432fe9d6fdde4d55fdbe663a63636a17f7f9aaba9bcb3153ad7"}, - {file = "lxml-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9e240ae0ba96477682aa87899d94ddec1cc7926f9df29b1dd57b39e797d5ab5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96f02ba1bcd330807fc060ed91d1f7a20853da6dd449e5da4b09bfcc08fdcf5"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3898ae2b58eeafedfe99e542a17859017d72d7f6a63de0f04f99c2cb125936"}, - {file = "lxml-5.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61c5a7edbd7c695e54fca029ceb351fc45cd8860119a0f83e48be44e1c464862"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3aeca824b38ca78d9ee2ab82bd9883083d0492d9d17df065ba3b94e88e4d7ee6"}, - {file = "lxml-5.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8f52fe6859b9db71ee609b0c0a70fea5f1e71c3462ecf144ca800d3f434f0764"}, - {file = "lxml-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:d42e3a3fc18acc88b838efded0e6ec3edf3e328a58c68fbd36a7263a874906c8"}, - {file = "lxml-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:eac68f96539b32fce2c9b47eb7c25bb2582bdaf1bbb360d25f564ee9e04c542b"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ae15347a88cf8af0949a9872b57a320d2605ae069bcdf047677318bc0bba45b1"}, - {file = "lxml-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c26aab6ea9c54d3bed716b8851c8bfc40cb249b8e9880e250d1eddde9f709bf5"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342e95bddec3a698ac24378d61996b3ee5ba9acfeb253986002ac53c9a5f6f84"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:725e171e0b99a66ec8605ac77fa12239dbe061482ac854d25720e2294652eeaa"}, - {file = "lxml-5.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d184e0d5c918cff04cdde9dbdf9600e960161d773666958c9d7b565ccc60c45"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:98f3f020a2b736566c707c8e034945c02aa94e124c24f77ca097c446f81b01f1"}, - {file = "lxml-5.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d48fc57e7c1e3df57be5ae8614bab6d4e7b60f65c5457915c26892c41afc59e"}, - {file = "lxml-5.1.0-cp38-cp38-win32.whl", hash = "sha256:7ec465e6549ed97e9f1e5ed51c657c9ede767bc1c11552f7f4d022c4df4a977a"}, - {file = "lxml-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b21b4031b53d25b0858d4e124f2f9131ffc1530431c6d1321805c90da78388d1"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52427a7eadc98f9e62cb1368a5079ae826f94f05755d2d567d93ee1bc3ceb354"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a2a2c724d97c1eb8cf966b16ca2915566a4904b9aad2ed9a09c748ffe14f969"}, - {file = "lxml-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843b9c835580d52828d8f69ea4302537337a21e6b4f1ec711a52241ba4a824f3"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b99f564659cfa704a2dd82d0684207b1aadf7d02d33e54845f9fc78e06b7581"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f8b0c78e7aac24979ef09b7f50da871c2de2def043d468c4b41f512d831e912"}, - {file = "lxml-5.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcf86dfc8ff3e992fed847c077bd875d9e0ba2fa25d859c3a0f0f76f07f0c8d"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:49a9b4af45e8b925e1cd6f3b15bbba2c81e7dba6dce170c677c9cda547411e14"}, - {file = "lxml-5.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:280f3edf15c2a967d923bcfb1f8f15337ad36f93525828b40a0f9d6c2ad24890"}, - {file = "lxml-5.1.0-cp39-cp39-win32.whl", hash = "sha256:ed7326563024b6e91fef6b6c7a1a2ff0a71b97793ac33dbbcf38f6005e51ff6e"}, - {file = "lxml-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8d7b4beebb178e9183138f552238f7e6613162a42164233e2bda00cb3afac58f"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9bd0ae7cc2b85320abd5e0abad5ccee5564ed5f0cc90245d2f9a8ef330a8deae"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c1d679df4361408b628f42b26a5d62bd3e9ba7f0c0e7969f925021554755aa"}, - {file = "lxml-5.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2ad3a8ce9e8a767131061a22cd28fdffa3cd2dc193f399ff7b81777f3520e372"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:304128394c9c22b6569eba2a6d98392b56fbdfbad58f83ea702530be80d0f9df"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d74fcaf87132ffc0447b3c685a9f862ffb5b43e70ea6beec2fb8057d5d2a1fea"}, - {file = "lxml-5.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8cf5877f7ed384dabfdcc37922c3191bf27e55b498fecece9fd5c2c7aaa34c33"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:877efb968c3d7eb2dad540b6cabf2f1d3c0fbf4b2d309a3c141f79c7e0061324"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f14a4fb1c1c402a22e6a341a24c1341b4a3def81b41cd354386dcb795f83897"}, - {file = "lxml-5.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:25663d6e99659544ee8fe1b89b1a8c0aaa5e34b103fab124b17fa958c4a324a6"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b9f19df998761babaa7f09e6bc169294eefafd6149aaa272081cbddc7ba4ca3"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e53d7e6a98b64fe54775d23a7c669763451340c3d44ad5e3a3b48a1efbdc96f"}, - {file = "lxml-5.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c3cd1fc1dc7c376c54440aeaaa0dcc803d2126732ff5c6b68ccd619f2e64be4f"}, - {file = "lxml-5.1.0.tar.gz", hash = "sha256:3eea6ed6e6c918e468e693c41ef07f3c3acc310b70ddd9cc72d9ef84bc9564ca"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, ] +[package.dependencies] +lxml-html-clean = {version = "*", optional = true, markers = "extra == \"html-clean\""} + [package.extras] cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.7)"] +source = ["Cython (>=3.0.10)"] + +[[package]] +name = "lxml-html-clean" +version = "0.1.1" +description = "HTML cleaner from lxml project" +optional = false +python-versions = "*" +files = [ + {file = "lxml_html_clean-0.1.1-py3-none-any.whl", hash = "sha256:58c04176593c9caf72ec92e033d2f38859e918b3eff0cc0f8051ad27dc2ab8ef"}, + {file = "lxml_html_clean-0.1.1.tar.gz", hash = "sha256:8a644ed01dbbe132fabddb9467f077f6dad12a1d4f3a6a553e280f3815fa46df"}, +] + +[package.dependencies] +lxml = "*" [[package]] name = "marisa-trie" -version = "0.7.8" +version = "1.2.0" description = "Static memory-efficient and fast Trie-like structures for Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "marisa-trie-0.7.8.tar.gz", hash = "sha256:aee3de5f2836074cfd803f1caf16f68390f262ef09cd7dc7d0e8aee9b6878643"}, - {file = "marisa_trie-0.7.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f1cf9d5ead4471b149fdb93a1c84eddaa941d23e67b0782091adc222d198a87"}, - {file = "marisa_trie-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:73296b4d6d8ce2f6bc3898fe84348756beddb10cb56442391d050bff135e9c4c"}, - {file = "marisa_trie-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:782c1515caa603656e15779bc61d5db3b079fa4270ad77f464908796e0d940aa"}, - {file = "marisa_trie-0.7.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49131e51aad530e4d47c716cef1bbef15a4e5b8f75bddfcdd7903f5043ef2331"}, - {file = "marisa_trie-0.7.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45b0a38e015d0149141f028b8892ab518946b828c7931685199549294f5893ca"}, - {file = "marisa_trie-0.7.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a537e0efff1ec880bc212390e97f1d35832a44bd78c96807ddb685d538875096"}, - {file = "marisa_trie-0.7.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c2a33ede2655f1a6fb840729128cb4bc48829108711f79b7a645b6c0c54b5c2"}, - {file = "marisa_trie-0.7.8-cp310-cp310-win32.whl", hash = "sha256:7200cde8e2040811e98661a60463b296b76a6b224411f8899aa0850085e6af40"}, - {file = "marisa_trie-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:a432607bae139183c7251da7eb22f761440bc07d92eacc9e9f7dc0d87f70c495"}, - {file = "marisa_trie-0.7.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a891d2841da153b98c6c7fbe0a89ea8edbc164bdc96a001f360bdcdd54e2070d"}, - {file = "marisa_trie-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c9ab632c5caef23a59cd43c76ab59e325f9eadd1e9c8b1c34005b9756ae716ee"}, - {file = "marisa_trie-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68087942e95acb5801f2a5e9a874aa57af27a4afb52aca81fe1cbe22b2a2fd38"}, - {file = "marisa_trie-0.7.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef2c4a5023bb6ddbaf1803187b7fb3108e9955aa9c60564504e5f622517c9e7"}, - {file = "marisa_trie-0.7.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24e873619f61bef6a87c669ae459b79d98822270e8a10b21fc52dddf2acc9a46"}, - {file = "marisa_trie-0.7.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:34189c321f30cefb76a6b20c7f055b3f6cd0bc8378c16ba8b7283fd898bf4ac2"}, - {file = "marisa_trie-0.7.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:396555d5f52dc86c65717052573fa2875e10f9e5dd014f825677beadcaec8248"}, - {file = "marisa_trie-0.7.8-cp311-cp311-win32.whl", hash = "sha256:bfe649b02b6318bac572b86d9ddd8276c594411311f8e5ef2edc4bcd7285a06f"}, - {file = "marisa_trie-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:84991b52a187d09b269c4caefc8b857a81156c44997eec7eac0e2862d108cc20"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0555104fe9f414abb12e967322a13df778b21958d1727470f4c8dedfde76a8f2"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f96531013252bca14f7665f67aa642be113b6c348ada5e167ebf8db27b1551b5"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed76391b132c6261cfb402c1a08679e635d09a0a142dae2c1744d816f103c7f"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6232506b4d66da932f70cf359a4c5ba9e086228ccd97b602159e90c6ea53dab"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34f927f2738d0b402b76821895254e6a164d5020042559f7d910f6632829cdfa"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-win32.whl", hash = "sha256:645908879ae8fcadfb51650fc176902b9e68eee9a8c4d4d8c682cf99ce3ff029"}, - {file = "marisa_trie-0.7.8-cp36-cp36m-win_amd64.whl", hash = "sha256:a5bf2912810e135ce1e60a9b56a179ed62258306103bf5dd3186307f5c51b28f"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bd86212d5037973deda057fc29d60e83dca05e68fa1e7ceaf014c513975c7a0d"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f280f059be417cff81ac030db6a002f8a93093c7ca4555e570d43a24ed45514"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ae35c696f3c5b57c5fe4f73725102f3fe884bc658b854d484dfe6d7e72c86f5"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:524c02f398d361aaf85d8f7709b5ac6de68d020c588fb6c087fb171137643c13"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:353113e811ccfa176fbb611b83671f0b3b40f46b3896b096c10e43f65d35916d"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-win32.whl", hash = "sha256:93172a7314d4d5993970dbafb746f23140d3abfa0d93cc174e766a302d125f7d"}, - {file = "marisa_trie-0.7.8-cp37-cp37m-win_amd64.whl", hash = "sha256:579d69981b18f427bd8e540199c4de400a2bd4ae98e96c814a12cbf766e7029b"}, - {file = "marisa_trie-0.7.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:08858920d0e09ca07d239252884fd72db2abb56c35ff463145ffc9c1277a4f34"}, - {file = "marisa_trie-0.7.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1b4d07158a3f9b4e84ee709a1fa86b9e11f3dd3b1e6fc45493195105a029545"}, - {file = "marisa_trie-0.7.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0359f392679774d1ff014f12efdf48da5d661e6241531ff55a3ae5a72a1137e"}, - {file = "marisa_trie-0.7.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c1daaa8c38423fbd119db6654f92740d5ee40d1185a2bbc47afae6712b9ebfc"}, - {file = "marisa_trie-0.7.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:266bf4b6e00b4cff2b8618533919d38b883127f4e5c0af0e0bd78a042093dd99"}, - {file = "marisa_trie-0.7.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fd7e71d8d85d04d2a5d23611663b2d322b60c98c2edab7e9ef9a2019f7435c5b"}, - {file = "marisa_trie-0.7.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:66b13382be3c277f32143e6c814344118721c7954b2bfb57f5cfe93d17e63c9e"}, - {file = "marisa_trie-0.7.8-cp38-cp38-win32.whl", hash = "sha256:d75b5d642b3d1e47a0ab649fb5eb6bf3681a5e1d3793c8ea7546586ab72731fd"}, - {file = "marisa_trie-0.7.8-cp38-cp38-win_amd64.whl", hash = "sha256:07c14c88fde8a0ac55139f9fe763dc0deabc4b7950047719ae986ca62135e1fb"}, - {file = "marisa_trie-0.7.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c8df5238c7b29498f4ee24fd3ee25e0129b3c56beaed1dd1628bce0ebac8ec8c"}, - {file = "marisa_trie-0.7.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db2bdc480d83a1a566b3a64027f9fb34eae98bfe45788c41a45e99d430cbf48a"}, - {file = "marisa_trie-0.7.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:80b22bdbebc3e6677e83db1352e4f6d478364107874c031a34a961437ead4e93"}, - {file = "marisa_trie-0.7.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6412c816be723a0f11dd41225a30a08182cf2b3b7b3c882c44335003bde47003"}, - {file = "marisa_trie-0.7.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fcdb7f802db43857df3825c4c11acd14bb380deb961ff91e260950886531400"}, - {file = "marisa_trie-0.7.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5cf04156f38dc46f0f14423f98559c5def7d83f3a30f8a580c27ad3b0311ce76"}, - {file = "marisa_trie-0.7.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c53b1d02f4974ecb52c6e8c6f4f1dbf3a15e79bc3861f4ad48b14e4e77c82342"}, - {file = "marisa_trie-0.7.8-cp39-cp39-win32.whl", hash = "sha256:75317347f20bf05ab2ce5537a90989b1439b5e1752f558aad7b5d6b43194429b"}, - {file = "marisa_trie-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:82ba3caed5acfdff6a23d6881cc1927776b7320415261b6b24f48d0a190ab890"}, - {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:43abd082a21295b04859705b088d15acac8956587557680850e3149a79e36789"}, - {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d891f0138e5aecc9c5afb7b0a57c758e22c5b5c7c0edb0a1f21ae933259815"}, - {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9031184fe2215b591a6cdefe5d6d4901806fd7359e813c485a7ff25ea69d603c"}, - {file = "marisa_trie-0.7.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8ccb3ba8a2a589b8a7aed693d564f20a6d3bbbb552975f904ba311cea6b85706"}, - {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f49a2cba047e643e5cd295d75de59f1df710c5e919cd376ac06ead513439881b"}, - {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d37ea556bb99d9b0dfbe8fd6bdb17e91b91d04531be9e3b8b1b7b7f76ea55637"}, - {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55a5aea422a4c0c9ef143d3703323f2a43b4a5315fc90bbb6e9ff18544b8d931"}, - {file = "marisa_trie-0.7.8-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d19f363b981fe9b4a302060a8088fd1f00906bc315db24f5d6726b5c309cc47e"}, - {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e0d51c31fb41b6bc76c1abb7cf2d63a6e0ba7feffc96ea3d92b4d5084d71721a"}, - {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71ed6286e9d593dac035b8516e7ec35a1b54a7d9c6451a9319e918a8ef722714"}, - {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc1c1dca06c0fdcca5bb261a09eca2b3bcf41eaeb467caf600ac68e77d3ed2c0"}, - {file = "marisa_trie-0.7.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:891be5569cd6e3a059c2de53d63251aaaef513d68e8d2181f71378f9cb69e1ab"}, +python-versions = ">=3.7" +files = [ + {file = "marisa_trie-1.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:61fab91fef677f0af0e818e61595f2334f7e0b3e122b24ec65889aae69ba468d"}, + {file = "marisa_trie-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f5b3080316de735bd2b07265de5eea3ae176fa2fc60f9871aeaa9cdcddfc8f7"}, + {file = "marisa_trie-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:77bfde3287314e91e28d3a882c7b87519ef0ee104c921df72c7819987d5e4863"}, + {file = "marisa_trie-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4fbb1ec1d9e891060a0aee9f9c243acec63de1e197097a14850ba38ec8a4013"}, + {file = "marisa_trie-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e04e9c86fe8908b61c2aebb8444217cacaed15b93d2dccaac3849e36a6dc660"}, + {file = "marisa_trie-1.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a7c75a508f44e40f7af8448d466045a97534adcbb026e63989407cefb9ebfa6"}, + {file = "marisa_trie-1.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5321211647609869907e81b0230ad2dfdfa7e19fe1ee469b46304a622391e6a1"}, + {file = "marisa_trie-1.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:88660e6ee0f821872aaf63ba4b9a7513428b9cab20c69cc013c368bd72c3a4fe"}, + {file = "marisa_trie-1.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e4535fc5458de2b59789e574cdd55923d63de5612dc159d33941af79cd62786"}, + {file = "marisa_trie-1.2.0-cp310-cp310-win32.whl", hash = "sha256:bdd1d4d430e33abbe558971d1bd57da2d44ca129fa8a86924c51437dba5cb345"}, + {file = "marisa_trie-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:c729e2b8f9699874b1372b5a01515b340eda1292f5e08a3fe4633b745f80ad7a"}, + {file = "marisa_trie-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d62985a0e6f2cfeb36cd6afa0460063bbe83ef4bfd9afe189a99103487547210"}, + {file = "marisa_trie-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1890cc993149db4aa8242973526589e8133c3f92949b0ac74c2c9a6596707ae3"}, + {file = "marisa_trie-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26177cd0dadb7b44f47c17c40e16ac157c4d22ac7ed83b5a47f44713239e10d1"}, + {file = "marisa_trie-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3425dc81d49a374be49e3a063cb6ccdf57973201b0a30127082acea50562a85e"}, + {file = "marisa_trie-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:525b8df41a1a7337ed7f982eb63b704d7d75f047e30970fcfbe9cf6fc22c5991"}, + {file = "marisa_trie-1.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c643c66bbde6a115e4ec8713c087a9fe9cb7b7c684e6af4cf448c120fa427ea4"}, + {file = "marisa_trie-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a83fe83e0eab9154a2dc7c556898c86584b7779ddf4214c606fce4ceff07c13"}, + {file = "marisa_trie-1.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:49701db6bb8f1ec0133abd95f0a4891cfd6f84f3bd019e343037e31a5a5b0210"}, + {file = "marisa_trie-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a3f0562863deaad58c5dc3a51f706da92582bc9084189148a45f7a12fe261a51"}, + {file = "marisa_trie-1.2.0-cp311-cp311-win32.whl", hash = "sha256:b08968ccad00f54f31e38516e4452fae59dd15a3fcee56aea3101ba2304680b3"}, + {file = "marisa_trie-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3ef375491e7dd71a0a7e7bf288c88750942bd1ee0c379dcd6ad43e31af67d00"}, + {file = "marisa_trie-1.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:39b88f126988ea83e8458259297d2b2f9391bfba8f4dc5d7a246813aae1c1def"}, + {file = "marisa_trie-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ec167b006884a90d130ee30518a9aa44cb40211f702bf07031b2d7d4d1db569b"}, + {file = "marisa_trie-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b855e6286faef5411386bf9d676dfb545c09f7d109f197f347c9366aeb12f07"}, + {file = "marisa_trie-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cd287ff323224d87c2b739cba39614aac3737c95a254e0ff70e77d9b8df226d"}, + {file = "marisa_trie-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d8a1c0361165231f4fb915237470afc8cc4803c535f535f4fc42ca72855b124"}, + {file = "marisa_trie-1.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3267f438d258d7d85ee3dde363c4f96c3196ca9cd9e63fe429a59543cc544b15"}, + {file = "marisa_trie-1.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7c87a0c2cccce12b07bfcb70708637c0816970282d966a1531ecda1a24bd1cc8"}, + {file = "marisa_trie-1.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d3c0e38f0501951e2322f7274a39b8e2344bbd91ceaa9da439f46022570ddc9d"}, + {file = "marisa_trie-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cd88a338c87e6dc130b6cea7b697580c21f0c83a8a8b46671cfecbb713d3fe24"}, + {file = "marisa_trie-1.2.0-cp312-cp312-win32.whl", hash = "sha256:5cea60975184f03fbcff51339df0eb44d2abe106a1693983cc64415eb87b897b"}, + {file = "marisa_trie-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:b04a07b99b62b9bdf3eaf1d44571a3293ce249ce8971944e780c9c709593462f"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c11af35d9304de420b359741e12b885d04f11403697efcbbe8cb50f834261ebc"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2db8e74493c3bffb480c54afaa88890a39bf90063ff5b322acf64bf076e4b36e"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bcc6613bc873136dc62609b66aaa27363e2bd46c03fdab62d638f7cf69d5f82"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5cb731581effb3e05258f3ddc2a155475de74bb00f61eb280f991e13b48f783"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:eba1061bbeaeec4149282beab2ae163631606f119f549a10246b014e13f9047b"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:015594427360c6ad0fa94d51ee3d50fb83b0f7278996497fd2d69f877c3de9bd"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:36d65bcbf22a70cdd0202bd8608c2feecc58bdb9e5dd9a2f5a723b651fcab287"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-win32.whl", hash = "sha256:bc138625b383998f5cd0cbf6cd38d66d414f3786ae6d7b4e4a6fc970140ef4e9"}, + {file = "marisa_trie-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:27d270a64eb655754dfb4e352c60a084b16ab999b3a97a0cdc7dbecbca3c0e35"}, + {file = "marisa_trie-1.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fa1fa7f67d317a921315a65e266b9e156ce5a956076ec2b6dbf72d67c7df8216"}, + {file = "marisa_trie-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dccef41d4af11a03558c1d101de58bd723b3039a5bc4e064250008c118037ec"}, + {file = "marisa_trie-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:873efd212dfef2b736ff2ff43e10b348c428d5dbac7b8cb8aa777004bc8c7b0e"}, + {file = "marisa_trie-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8af7a21ac2ba6dc23e4257fc3a40b3070e776275d3d0b5b2ef44473ad92caf3a"}, + {file = "marisa_trie-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7202ba0ca1db5245feaebbeb3d0c776b2da1fffb0abc3500dd505f679686aa1"}, + {file = "marisa_trie-1.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83d90be28c083323909d23ff8e9b4a2764b9e75520d1bae1a277e9fa7ca20d15"}, + {file = "marisa_trie-1.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40e2a374026492ac84232897f1f1d8f92a4a1f8bcf3f0ded1f2b8b708d1acfff"}, + {file = "marisa_trie-1.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7c6e6506bd24a5799b9b4b9cf1e8d6fa281f136396ba018a95d95d4d74715227"}, + {file = "marisa_trie-1.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:437bf6c0d7ba4cf17656a0e3bdd0b3c2c92c01fedfa670904177eef3116a4f45"}, + {file = "marisa_trie-1.2.0-cp38-cp38-win32.whl", hash = "sha256:6aeef7b364fb3b34dbba1cc57b79f1668fad0c3f039738d65a5b0d5ddce15f47"}, + {file = "marisa_trie-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:02f773e85cc566a24c0e0e28c744052db7691c4f13d02e4257bc657a49b9ab14"}, + {file = "marisa_trie-1.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ff705cb3b907bdeacb8c4b3bf0541691f52b101014d189a707ca41ebfacad59"}, + {file = "marisa_trie-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006419c59866979188906babc42ae0918081c18cabc2bdabca027f68c081c127"}, + {file = "marisa_trie-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7196691681ecb8a12629fb6277c33bafdb27cf2b6c18c28bc48fa42a15eab8f"}, + {file = "marisa_trie-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaf052c0a1f4531ee12fd4c637212e77ad2af8c3b38a0d3096622abd01a22212"}, + {file = "marisa_trie-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb95f3ab95ba933f6a2fa2629185e9deb9da45ff2aa4ba8cc8f722528c038ef"}, + {file = "marisa_trie-1.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7459b1e1937e33daed65a6d55f8b95f9a8601f4f8749d01641cf548ecac03840"}, + {file = "marisa_trie-1.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:902ea948677421093651ca98df62d255383f865f7c353f956ef666e92500e79f"}, + {file = "marisa_trie-1.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fdf7a2d066907816726f3bf241b8cb05b698d6ffaa3c5ea2658d4ba69e87ec57"}, + {file = "marisa_trie-1.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3540bb85b38dfc17060263e061c95a0a435681b04543d1ae7e8d7441a9790593"}, + {file = "marisa_trie-1.2.0-cp39-cp39-win32.whl", hash = "sha256:fe1394e1f262e5b45d22d30bd1ef75174d1f2772e86716b5f93f9c29dfc1a779"}, + {file = "marisa_trie-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:84c44cb13803723f0f76aa2ba1a657f762a0bb9d8a9b80dfff249bb1c3218dd6"}, + {file = "marisa_trie-1.2.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:035c4c8f3b313b4d7b7451ddd539da811a11077a9e359c6a0345f816b1bdccb3"}, + {file = "marisa_trie-1.2.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d4f05c2ee218a5ab09d269b640d06f9708b0cf37c842344cbdffb0661c74c472"}, + {file = "marisa_trie-1.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92ac63e1519598de946c7d9346df3bb52ed96968eb3021b4e89b51d79bc72a86"}, + {file = "marisa_trie-1.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:045f32eaeb5dcdb5beadb571ba616d7a34141764b616eebb4decce71b366f5fa"}, + {file = "marisa_trie-1.2.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb60c2f9897ce2bfc31a69ac25a040de4f8643ab2a339bb0ff1185e1a9dedaf8"}, + {file = "marisa_trie-1.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f19c5fcf23c02f1303deb69c67603ee37ed8f01de2d8b19f1716a6cf5afd5455"}, + {file = "marisa_trie-1.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a06a77075240eb83a47b780902322e66c968a06a2b6318cab06757c65ea64190"}, + {file = "marisa_trie-1.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:125016400449e46ec0e5fabd14c8314959c4dfa02ffc2861195c99efa2b5b011"}, + {file = "marisa_trie-1.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c57647dd9f9ba16fc5bb4679c915d7d48d5c0b25134fb10f095ccd839686a027"}, + {file = "marisa_trie-1.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6601e74338fb31e1b20674257706150113463182a01d3a1310df6b8840720b17"}, + {file = "marisa_trie-1.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ce2f68e1000c4c72820c5b2c9d037f326fcf75f036453a5e629f225f99b92cfc"}, + {file = "marisa_trie-1.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:069ac10a133d96b3f3ed1cc071b973a3f28490345e7941c778a1d81cf176f04a"}, + {file = "marisa_trie-1.2.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:de9911480ce2a0513582cb84ee4484e5ee8791e692276c7f5cd7378e114d1988"}, + {file = "marisa_trie-1.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfec001cf233e8853a29e1c2bb74031c217aa61e7bd19389007e04861855731"}, + {file = "marisa_trie-1.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd1f3ef8de89684fbdd6aaead09d53b82e718bad4375d2beb938cbd24b48c51a"}, + {file = "marisa_trie-1.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f5d8c1ecc85283b5b03a1475a5da723b94b3beda752c895b2f748477d8f1b1"}, + {file = "marisa_trie-1.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2e7540f844c1de493a90ad7d0f5bffc6a2cba19fe312d6db7b97aceff11d97f8"}, + {file = "marisa_trie-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2fb9243f66563285677079c9dccc697d35985287bacb36c8e685305687b0e025"}, + {file = "marisa_trie-1.2.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:58e2b84cbb6394f9c567f1f4351fc2995a094e1b684da9b577d4139b145401d6"}, + {file = "marisa_trie-1.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b4a8d3ed1f1b8f551b52e11a1265eaf0718f06bb206654b2c529cecda0913dd"}, + {file = "marisa_trie-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97652c5fbc92f52100afe1c4583625015611000fa81606ad17f1b3bbb9f3bfa"}, + {file = "marisa_trie-1.2.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7183d84da20c89b2a366bf581f0d79d1e248909678f164e8536f291120432e8"}, + {file = "marisa_trie-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c7f4df4163202b0aa5dad3eeddf088ecb61e9101986c8b31f1e052ebd6df9292"}, + {file = "marisa_trie-1.2.0.tar.gz", hash = "sha256:fedfc67497f8aa2757756b5cf493759f245d321fb78914ce125b6d75daa89b5f"}, ] [package.dependencies] @@ -1155,28 +1279,28 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "parsel" -version = "1.8.1" +version = "1.9.1" description = "Parsel is a library to extract data from HTML and XML using XPath and CSS selectors" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "parsel-1.8.1-py2.py3-none-any.whl", hash = "sha256:2708fc74daeeb4ce471e2c2e9089b650ec940c7a218053e57421e69b5b00f82c"}, - {file = "parsel-1.8.1.tar.gz", hash = "sha256:aff28e68c9b3f1a901db2a4e3f158d8480a38724d7328ee751c1a4e1c1801e39"}, + {file = "parsel-1.9.1-py2.py3-none-any.whl", hash = "sha256:c4a777ee6c3ff5e39652b58e351c5cf02c12ff420d05b07a7966aebb68ab1700"}, + {file = "parsel-1.9.1.tar.gz", hash = "sha256:14e00dc07731c9030db620c195fcae884b5b4848e9f9c523c6119f708ccfa9ac"}, ] [package.dependencies] -cssselect = ">=0.9" +cssselect = ">=1.2.0" jmespath = "*" lxml = "*" packaging = "*" @@ -1262,18 +1386,19 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "platformdirs" -version = "4.1.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "playwright" @@ -1297,13 +1422,13 @@ pyee = "11.0.1" [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1312,39 +1437,39 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protego" -version = "0.3.0" +version = "0.3.1" description = "Pure-Python robots.txt parser with support for modern conventions" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Protego-0.3.0-py2.py3-none-any.whl", hash = "sha256:db38f6a945839d8162a4034031a21490469566a2726afb51d668497c457fb0aa"}, - {file = "Protego-0.3.0.tar.gz", hash = "sha256:04228bffde4c6bcba31cf6529ba2cfd6e1b70808fdc1d2cb4301be6b28d6c568"}, + {file = "Protego-0.3.1-py2.py3-none-any.whl", hash = "sha256:2fbe8e9b7a7dbc5016a932b14c98d236aad4c29290bbe457b8d2779666ef7a41"}, + {file = "Protego-0.3.1.tar.gz", hash = "sha256:e94430d0d25cbbf239bc849d86c5e544fbde531fcccfa059953c7da344a1712c"}, ] [[package]] name = "pyasn1" -version = "0.5.1" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, - {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" @@ -1359,13 +1484,13 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -1430,13 +1555,13 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -1520,24 +1645,24 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] name = "queuelib" -version = "1.6.2" +version = "1.7.0" description = "Collection of persistent (disk-based) and non-persistent (memory-based) queues" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "queuelib-1.6.2-py2.py3-none-any.whl", hash = "sha256:4b96d48f650a814c6fb2fd11b968f9c46178b683aad96d68f930fe13a8574d19"}, - {file = "queuelib-1.6.2.tar.gz", hash = "sha256:4b207267f2642a8699a1f806045c56eb7ad1a85a10c0e249884580d139c2fcd2"}, + {file = "queuelib-1.7.0-py2.py3-none-any.whl", hash = "sha256:b07aaa2410caac3a0021ee4f4026acdac992b0fb9a2cbeb34a918617df3c12a7"}, + {file = "queuelib-1.7.0.tar.gz", hash = "sha256:2855162096cf0230510890b354379ea1c0ff19d105d3147d349d2433bb222b08"}, ] [[package]] @@ -1563,104 +1688,90 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "regex" -version = "2023.12.25" +version = "2024.5.15" description = "Alternative regular expression module, to replace re." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, - {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, - {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, - {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, - {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, - {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, - {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, - {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, - {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, - {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, - {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, - {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, - {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, - {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, - {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, - {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, - {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, - {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, - {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, - {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, - {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, - {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, - {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, - {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, - {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, - {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, - {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, - {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, - {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, - {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, - {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, - {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, + {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, + {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, + {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, + {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, + {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, + {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, + {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, + {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, + {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, + {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, + {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, ] [[package]] @@ -1686,33 +1797,33 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "1.5.1" +version = "2.1.0" description = "File transport adapter for Requests" optional = false python-versions = "*" files = [ - {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, - {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, + {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, + {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, ] [package.dependencies] requests = ">=1.0.0" -six = "*" [[package]] name = "scrapy" -version = "2.11.0" +version = "2.11.2" description = "A high-level Web Crawling and Web Scraping framework" optional = false python-versions = ">=3.8" files = [ - {file = "Scrapy-2.11.0-py2.py3-none-any.whl", hash = "sha256:a7f36544d1f5ceb13cff9b7bc904bd7c0fc43a3af0fbe5aa2034fd937cf092d1"}, - {file = "Scrapy-2.11.0.tar.gz", hash = "sha256:3cbdedce0c3f0e0482d61be2d7458683be7cd7cf14b0ee6adfbaddb80f5b36a5"}, + {file = "Scrapy-2.11.2-py2.py3-none-any.whl", hash = "sha256:4be353d6abbb942a9f7e7614ca8b5f3d9037381176ac8d8859c8cac676e74fa0"}, + {file = "scrapy-2.11.2.tar.gz", hash = "sha256:dfbd565384fc3fffeba121f5a3a2d0899ac1f756d41432ca0879933fbfb3401d"}, ] [package.dependencies] cryptography = ">=36.0.0" cssselect = ">=0.9.1" +defusedxml = ">=0.7.1" itemadapter = ">=0.1.0" itemloaders = ">=1.0.1" lxml = ">=4.4.1" @@ -1726,7 +1837,7 @@ queuelib = ">=1.4.2" service-identity = ">=18.1.0" setuptools = "*" tldextract = "*" -Twisted = ">=18.9.0,<23.8.0" +Twisted = ">=18.9.0" w3lib = ">=1.17.0" "zope.interface" = ">=5.1.0" @@ -1767,19 +1878,18 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "69.0.3" +version = "70.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, + {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1794,13 +1904,13 @@ files = [ [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -1816,19 +1926,18 @@ files = [ [[package]] name = "sqlparse" -version = "0.4.4" +version = "0.5.0" description = "A non-validating SQL parser." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"}, - {file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"}, + {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, + {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, ] [package.extras] -dev = ["build", "flake8"] +dev = ["build", "hatch"] doc = ["sphinx"] -test = ["pytest", "pytest-cov"] [[package]] name = "tld" @@ -1843,13 +1952,13 @@ files = [ [[package]] name = "tldextract" -version = "5.1.1" +version = "5.1.2" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false python-versions = ">=3.8" files = [ - {file = "tldextract-5.1.1-py3-none-any.whl", hash = "sha256:b9c4510a8766d377033b6bace7e9f1f17a891383ced3c5d50c150f181e9e1cc2"}, - {file = "tldextract-5.1.1.tar.gz", hash = "sha256:9b6dbf803cb5636397f0203d48541c0da8ba53babaf0e8a6feda2d88746813d4"}, + {file = "tldextract-5.1.2-py3-none-any.whl", hash = "sha256:4dfc4c277b6b97fa053899fcdb892d2dc27295851ab5fac4e07797b6a21b2e46"}, + {file = "tldextract-5.1.2.tar.gz", hash = "sha256:c9e17f756f05afb5abac04fe8f766e7e70f9fe387adb1859f0f52408ee060200"}, ] [package.dependencies] @@ -1859,7 +1968,8 @@ requests = ">=2.1.0" requests-file = ">=1.4" [package.extras] -testing = ["black", "mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "tox", "types-filelock", "types-requests"] +release = ["build", "twine"] +testing = ["black", "mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "types-filelock", "types-requests"] [[package]] name = "tomli" @@ -1898,41 +2008,39 @@ gui = ["Gooey (>=1.0.1)"] [[package]] name = "twisted" -version = "22.10.0" +version = "24.3.0" description = "An asynchronous networking framework written in Python" optional = false -python-versions = ">=3.7.1" +python-versions = ">=3.8.0" files = [ - {file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"}, - {file = "Twisted-22.10.0.tar.gz", hash = "sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31"}, + {file = "twisted-24.3.0-py3-none-any.whl", hash = "sha256:039f2e6a49ab5108abd94de187fa92377abe5985c7a72d68d0ad266ba19eae63"}, + {file = "twisted-24.3.0.tar.gz", hash = "sha256:6b38b6ece7296b5e122c9eb17da2eeab3d98a198f50ca9efd00fb03e5b4fd4ae"}, ] [package.dependencies] -attrs = ">=19.2.0" -Automat = ">=0.8.0" +attrs = ">=21.3.0" +automat = ">=0.8.0" constantly = ">=15.1" hyperlink = ">=17.1.1" -incremental = ">=21.3.0" +incremental = ">=22.10.0" twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} -typing-extensions = ">=3.6.5" -"zope.interface" = ">=4.4.2" +typing-extensions = ">=4.2.0" +zope-interface = ">=5" [package.extras] -all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] -conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] -contextvars = ["contextvars (>=2.4,<3)"] -dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)"] -dev-release = ["pydoctor (>=22.9.0,<22.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)"] -gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +all-non-platform = ["twisted[conch,http2,serial,test,tls]", "twisted[conch,http2,serial,test,tls]"] +conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] +dev = ["coverage (>=6b1,<7)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "twisted[dev-release]", "twistedchecker (>=0.7,<1.0)"] +dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] +gtk-platform = ["pygobject", "pygobject", "twisted[all-non-platform]", "twisted[all-non-platform]"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] -osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +macos-platform = ["pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "twisted[all-non-platform]", "twisted[all-non-platform]"] +mypy = ["mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "twisted[all-non-platform,dev]", "types-pyopenssl", "types-setuptools"] +osx-platform = ["twisted[macos-platform]", "twisted[macos-platform]"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] -test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.0,<7.0)"] +test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +windows-platform = ["pywin32 (!=226)", "pywin32 (!=226)", "twisted[all-non-platform]", "twisted[all-non-platform]"] [[package]] name = "twisted-iocpsupport" @@ -1964,24 +2072,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -2080,47 +2188,47 @@ files = [ [[package]] name = "zope-interface" -version = "6.1" +version = "6.4.post2" description = "Interfaces for Python" optional = false python-versions = ">=3.7" files = [ - {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, - {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, - {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"}, - {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"}, - {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"}, - {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"}, - {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"}, - {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"}, - {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"}, - {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"}, - {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"}, - {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"}, - {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"}, - {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"}, - {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"}, - {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"}, - {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"}, - {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"}, - {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"}, - {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"}, - {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"}, - {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"}, - {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"}, - {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"}, - {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"}, - {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"}, - {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"}, - {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"}, - {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"}, - {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"}, - {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"}, - {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"}, - {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"}, - {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"}, - {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"}, - {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, + {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"}, + {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"}, + {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"}, + {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"}, + {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"}, + {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"}, + {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"}, + {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"}, + {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"}, + {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"}, + {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"}, + {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"}, + {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"}, + {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"}, + {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"}, + {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"}, + {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"}, + {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"}, ] [package.dependencies] @@ -2134,4 +2242,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "a00940952ef5f1880983cf58aa8529703cf887145d9f76d53123a9ddca5ac77f" +content-hash = "245dcc043d384c7a9667a1d6d1100d84c41304739918fc12e224955727a67ce0" diff --git a/pyproject.toml b/pyproject.toml index 5a4be901..cd733b11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,13 +63,13 @@ wheel = "^0.42.0" black = "24.1.1" certifi="2024.2.2" dateparser="1.2" -extruct="0.16.0" +extruct="0.17.0" flake8 = "7.0.0" html2text="2020.1.16" jmespath="1.0.1" image = "1.5.33" -itemadapter="0.8.0" -itemloaders="1.1.0" +itemadapter="0.9.0" +itemloaders="1.3.1" isodate="0.6.1" overrides="3.1.0" Pillow="10.1.0" @@ -80,7 +80,7 @@ python-dateutil="2.8.2" python-dotenv="1.0.1" requests="2.31.0" six="1.16.0" -Scrapy="2.11" +Scrapy="2.11.2" scrapy-splash="0.9.0" urllib3="2.2.0" vobject="0.9.6.1" diff --git a/requirements.txt b/requirements.txt index 0dd1e44f..e4ed4624 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -anyio==4.2.0 ; python_version >= "3.10" and python_version < "4.0" -asgiref==3.7.2 ; python_version >= "3.10" and python_version < "4.0" +anyio==4.4.0 ; python_version >= "3.10" and python_version < "4.0" +asgiref==3.8.1 ; python_version >= "3.10" and python_version < "4.0" async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" @@ -12,87 +12,89 @@ charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" -courlan==0.9.5 ; python_version >= "3.10" and python_version < "4.0" +courlan==1.2.0 ; python_version >= "3.10" and python_version < "4.0" cryptography==41.0.7 ; python_version >= "3.10" and python_version < "4.0" cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" dateparser==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -django==5.0.1 ; python_version >= "3.10" and python_version < "4.0" -exceptiongroup==1.2.0 ; python_version >= "3.10" and python_version < "3.11" -extruct==0.16.0 ; python_version >= "3.10" and python_version < "4.0" -filelock==3.13.1 ; python_version >= "3.10" and python_version < "4.0" +defusedxml==0.7.1 ; python_version >= "3.10" and python_version < "4.0" +django==5.0.6 ; python_version >= "3.10" and python_version < "4.0" +exceptiongroup==1.2.1 ; python_version >= "3.10" and python_version < "3.11" +extruct==0.17.0 ; python_version >= "3.10" and python_version < "4.0" +filelock==3.15.4 ; python_version >= "3.10" and python_version < "4.0" flake8==7.0.0 ; python_version >= "3.10" and python_version < "4.0" greenlet==3.0.3 ; python_version >= "3.10" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" -html-text==0.5.2 ; python_version >= "3.10" and python_version < "4.0" +html-text==0.6.2 ; python_version >= "3.10" and python_version < "4.0" html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" -htmldate==1.7.0 ; python_version >= "3.10" and python_version < "4.0" -httpcore==1.0.2 ; python_version >= "3.10" and python_version < "4.0" +htmldate==1.8.1 ; python_version >= "3.10" and python_version < "4.0" +httpcore==1.0.5 ; python_version >= "3.10" and python_version < "4.0" httpx==0.26.0 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" -idna==3.6 ; python_version >= "3.10" and python_version < "4.0" +idna==3.7 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" incremental==22.10.0 ; python_version >= "3.10" and python_version < "4.0" iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" isodate==0.6.1 ; python_version >= "3.10" and python_version < "4.0" -itemadapter==0.8.0 ; python_version >= "3.10" and python_version < "4.0" -itemloaders==1.1.0 ; python_version >= "3.10" and python_version < "4.0" +itemadapter==0.9.0 ; python_version >= "3.10" and python_version < "4.0" +itemloaders==1.3.1 ; python_version >= "3.10" and python_version < "4.0" jmespath==1.0.1 ; python_version >= "3.10" and python_version < "4.0" jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" -justext==3.0.0 ; python_version >= "3.10" and python_version < "4.0" -langcodes==3.3.0 ; python_version >= "3.10" and python_version < "4.0" -langcodes[data]==3.3.0 ; python_version >= "3.10" and python_version < "4.0" -language-data==1.1 ; python_version >= "3.10" and python_version < "4.0" -lxml==5.1.0 ; python_version >= "3.10" and python_version < "4.0" -marisa-trie==0.7.8 ; python_version >= "3.10" and python_version < "4.0" +justext==3.0.1 ; python_version >= "3.10" and python_version < "4.0" +langcodes[data]==3.4.0 ; python_version >= "3.10" and python_version < "4.0" +language-data==1.2.0 ; python_version >= "3.10" and python_version < "4.0" +lxml-html-clean==0.1.1 ; python_version >= "3.10" and python_version < "4.0" +lxml==5.2.2 ; python_version >= "3.10" and python_version < "4.0" +lxml[html-clean]==5.2.2 ; python_version >= "3.10" and python_version < "4.0" +marisa-trie==1.2.0 ; python_version >= "3.10" and python_version < "4.0" mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==2.0.1 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" -packaging==23.2 ; python_version >= "3.10" and python_version < "4.0" -parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" +packaging==24.1 ; python_version >= "3.10" and python_version < "4.0" +parsel==1.9.1 ; python_version >= "3.10" and python_version < "4.0" pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0" pillow==10.1.0 ; python_version >= "3.10" and python_version < "4.0" -platformdirs==4.1.0 ; python_version >= "3.10" and python_version < "4.0" +platformdirs==4.2.2 ; python_version >= "3.10" and python_version < "4.0" playwright==1.41.2 ; python_version >= "3.10" and python_version < "4.0" -pluggy==1.4.0 ; python_version >= "3.10" and python_version < "4.0" -protego==0.3.0 ; python_version >= "3.10" and python_version < "4.0" -pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" -pyasn1==0.5.1 ; python_version >= "3.10" and python_version < "4.0" +pluggy==1.5.0 ; python_version >= "3.10" and python_version < "4.0" +protego==0.3.1 ; python_version >= "3.10" and python_version < "4.0" +pyasn1-modules==0.4.0 ; python_version >= "3.10" and python_version < "4.0" +pyasn1==0.6.0 ; python_version >= "3.10" and python_version < "4.0" pycodestyle==2.11.1 ; python_version >= "3.10" and python_version < "4.0" -pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0" +pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" pyee==11.0.1 ; python_version >= "3.10" and python_version < "4.0" pyflakes==3.2.0 ; python_version >= "3.10" and python_version < "4.0" pyopenssl==23.3.0 ; python_version >= "3.10" and python_version < "4.0" -pyparsing==3.1.1 ; python_version >= "3.10" and python_version < "4.0" +pyparsing==3.1.2 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" pyrdfa3==3.6.2 ; python_version >= "3.10" and python_version < "4.0" pytest==8.0.0 ; python_version >= "3.10" and python_version < "4.0" python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" -pytz==2023.3.post1 ; python_version >= "3.10" and python_version < "4.0" -queuelib==1.6.2 ; python_version >= "3.10" and python_version < "4.0" +pytz==2024.1 ; python_version >= "3.10" and python_version < "4.0" +queuelib==1.7.0 ; python_version >= "3.10" and python_version < "4.0" rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" -regex==2023.12.25 ; python_version >= "3.10" and python_version < "4.0" -requests-file==1.5.1 ; python_version >= "3.10" and python_version < "4.0" +regex==2024.5.15 ; python_version >= "3.10" and python_version < "4.0" +requests-file==2.1.0 ; python_version >= "3.10" and python_version < "4.0" requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" -scrapy==2.11.0 ; python_version >= "3.10" and python_version < "4.0" +scrapy==2.11.2 ; python_version >= "3.10" and python_version < "4.0" service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==69.0.3 ; python_version >= "3.10" and python_version < "4.0" +setuptools==70.1.1 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" -sniffio==1.3.0 ; python_version >= "3.10" and python_version < "4.0" +sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.10" and python_version < "4.0" -sqlparse==0.4.4 ; python_version >= "3.10" and python_version < "4.0" +sqlparse==0.5.0 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" -tldextract==5.1.1 ; python_version >= "3.10" and python_version < "4.0" +tldextract==5.1.2 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" trafilatura==1.7.0 ; python_version >= "3.10" and python_version < "4.0" twisted-iocpsupport==1.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" -twisted==22.10.0 ; python_version >= "3.10" and python_version < "4.0" -typing-extensions==4.9.0 ; python_version >= "3.10" and python_version < "4.0" -tzdata==2023.4 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") +twisted==24.3.0 ; python_version >= "3.10" and python_version < "4.0" +typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" +tzdata==2024.1 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" urllib3==2.2.0 ; python_version >= "3.10" and python_version < "4.0" vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" @@ -100,4 +102,4 @@ w3lib==2.1.2 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" wheel==0.42.0 ; python_version >= "3.10" and python_version < "4.0" xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" -zope-interface==6.1 ; python_version >= "3.10" and python_version < "4.0" +zope-interface==6.4.post2 ; python_version >= "3.10" and python_version < "4.0" From 0ab56f0d0fd58ed5b939b4f7d5f2b619e3ad6d93 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 28 Jun 2024 14:13:46 +0200 Subject: [PATCH 491/590] chore: update dependencies and drop "pyOpenSSL"-dependency - updated all packages but "overrides" to their latest version - (due to breaking changes in "overrides" between v3.1.0 <-> v7.7.0, updating this package requires additional work within crawlers that used "overrides" in the past) - drop "pyOpenSSL" dependency - according to the PyPi description of this package, "cryptography" should be used instead (cryptography is used by scrapy --- poetry.lock | 392 +++++++++++++++++++++++++---------------------- pyproject.toml | 35 ++--- requirements.txt | 44 +++--- 3 files changed, 248 insertions(+), 223 deletions(-) diff --git a/poetry.lock b/poetry.lock index 67144a11..4d2cd376 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "anyio" @@ -92,13 +92,13 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "babel" -version = "2.14.0" +version = "2.15.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, ] [package.extras] @@ -127,33 +127,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.1.1" +version = "24.4.2" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"}, - {file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"}, - {file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"}, - {file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"}, - {file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"}, - {file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"}, - {file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"}, - {file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"}, - {file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"}, - {file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"}, - {file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"}, - {file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"}, - {file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"}, - {file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"}, - {file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"}, - {file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"}, - {file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"}, - {file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"}, - {file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"}, - {file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"}, - {file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"}, - {file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -173,13 +173,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -399,47 +399,56 @@ urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} [[package]] name = "cryptography" -version = "41.0.7" +version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -562,18 +571,18 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -675,13 +684,12 @@ lxml-html-clean = "*" [[package]] name = "html2text" -version = "2020.1.16" +version = "2024.2.26" description = "Turn HTML into equivalent Markdown-structured text." optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "html2text-2020.1.16-py3-none-any.whl", hash = "sha256:c7c629882da0cf377d66f073329ccf34a12ed2adf0169b9285ae4e63ef54c82b"}, - {file = "html2text-2020.1.16.tar.gz", hash = "sha256:e296318e16b059ddb97f7a8a1d6a5c1d7af4544049a01e261731d2d5cc277bbb"}, + {file = "html2text-2024.2.26.tar.gz", hash = "sha256:05f8e367d15aaabc96415376776cdd11afd5127a77fce6e36afc60c563ca2c32"}, ] [[package]] @@ -750,13 +758,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.27.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, ] [package.dependencies] @@ -1319,70 +1327,89 @@ files = [ [[package]] name = "pillow" -version = "10.1.0" +version = "10.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, - {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, - {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, - {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, - {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, - {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, - {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, - {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, - {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, - {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, - {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, - {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, - {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, - {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, - {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, - {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, - {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, - {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, - {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, - {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, - {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, - {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, - {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, - {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, - {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, - {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] [[package]] name = "platformdirs" @@ -1402,23 +1429,23 @@ type = ["mypy (>=1.8)"] [[package]] name = "playwright" -version = "1.41.2" +version = "1.44.0" description = "A high-level API to automate web browsers" optional = false python-versions = ">=3.8" files = [ - {file = "playwright-1.41.2-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:cf68335a5dfa4038fa797a4ba0105faee0094ebbb372547d7a27feec5b23c672"}, - {file = "playwright-1.41.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:431e3a05f8c99147995e2b3e8475d07818745294fd99f1510b61756e73bdcf68"}, - {file = "playwright-1.41.2-py3-none-macosx_11_0_universal2.whl", hash = "sha256:0608717cbf291a625ba6f751061af0fc0cc9bdace217e69d87b1eb1383b03406"}, - {file = "playwright-1.41.2-py3-none-manylinux1_x86_64.whl", hash = "sha256:4bf214d812092cf5b9b9648ba84611aa35e28685519911342a7da3a3031f9ed6"}, - {file = "playwright-1.41.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa17ab44622c447de26ed8f7d99912719568d8dbc3a9db0e07f0ae1487709d9"}, - {file = "playwright-1.41.2-py3-none-win32.whl", hash = "sha256:edb210a015e70bb0d328bf1c9b65fa3a08361f33e4d7c4ddd1ad2adb6d9b4479"}, - {file = "playwright-1.41.2-py3-none-win_amd64.whl", hash = "sha256:71ead0f33e00f5a8533c037c647938b99f219436a1b27d4ba4de4e6bf0567278"}, + {file = "playwright-1.44.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:c2317a80896796fdeb03d60f06cc229e775ff2e19b80c64b1bb9b29c8a59d992"}, + {file = "playwright-1.44.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54d44fb634d870839301c2326e1e12a178a1be0de76d0caaec230ab075c2e077"}, + {file = "playwright-1.44.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:64b67194e73b47ae72acf25f1a9cfacfef38ca2b52e4bb8b0abd385c5deeaadf"}, + {file = "playwright-1.44.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:29161b1fae71f7c402df5b15f0bd3deaeecd8b3d1ecd9ff01271700c66210e7b"}, + {file = "playwright-1.44.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8c8a3bfea17576d3f94a2363eee195cbda8dbba86975588c7eaac7792b25eee"}, + {file = "playwright-1.44.0-py3-none-win32.whl", hash = "sha256:235e37832deaa9af8a629d09955396259ab757533cc1922f9b0308b4ee0d9cdf"}, + {file = "playwright-1.44.0-py3-none-win_amd64.whl", hash = "sha256:5b8a4a1d4d50f4ff99b47965576322a8c4e34631854b862a25c1feb824be22a8"}, ] [package.dependencies] greenlet = "3.0.3" -pyee = "11.0.1" +pyee = "11.1.0" [[package]] name = "pluggy" @@ -1473,13 +1500,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -1509,20 +1536,20 @@ dev = ["tox"] [[package]] name = "pyee" -version = "11.0.1" +version = "11.1.0" description = "A rough port of Node.js's EventEmitter to Python with a few tricks of its own" optional = false python-versions = ">=3.8" files = [ - {file = "pyee-11.0.1-py3-none-any.whl", hash = "sha256:9bcc9647822234f42c228d88de63d0f9ffa881e87a87f9d36ddf5211f6ac977d"}, - {file = "pyee-11.0.1.tar.gz", hash = "sha256:a642c51e3885a33ead087286e35212783a4e9b8d6514a10a5db4e57ac57b2b29"}, + {file = "pyee-11.1.0-py3-none-any.whl", hash = "sha256:5d346a7d0f861a4b2e6c47960295bd895f816725b27d656181947346be98d7c1"}, + {file = "pyee-11.1.0.tar.gz", hash = "sha256:b53af98f6990c810edd9b56b87791021a8f54fd13db4edd1142438d44ba2263f"}, ] [package.dependencies] typing-extensions = "*" [package.extras] -dev = ["black", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio", "pytest-trio", "toml", "tox", "trio", "trio", "trio-typing", "twine", "twisted", "validate-pyproject[all]"] +dev = ["black", "build", "flake8", "flake8-black", "isort", "jupyter-console", "mkdocs", "mkdocs-include-markdown-plugin", "mkdocstrings[python]", "pytest", "pytest-asyncio", "pytest-trio", "sphinx", "toml", "tox", "trio", "trio", "trio-typing", "twine", "twisted", "validate-pyproject[all]"] [[package]] name = "pyflakes" @@ -1537,21 +1564,21 @@ files = [ [[package]] name = "pyopenssl" -version = "23.3.0" +version = "24.1.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, - {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, + {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, + {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, ] [package.dependencies] -cryptography = ">=41.0.5,<42" +cryptography = ">=41.0.5,<43" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] -test = ["flaky", "pretend", "pytest (>=3.0.1)"] +test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pyparsing" @@ -1595,13 +1622,13 @@ requests = ">=2.25.1" [[package]] name = "pytest" -version = "8.0.0" +version = "8.2.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, ] [package.dependencies] @@ -1609,21 +1636,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -1776,13 +1803,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1984,26 +2011,26 @@ files = [ [[package]] name = "trafilatura" -version = "1.7.0" -description = "Python package and command-line tool designed to gather text on the Web. It includes discovery, extraction and text processing components. Its main applications are web crawling, downloads, scraping, and extraction of main texts, metadata and comments." +version = "1.11.0" +description = "Python package and command-line tool designed to gather text on the Web, includes all necessary discovery and text processing components to perform web crawling, downloads, scraping, and extraction of main texts, metadata and comments." optional = false python-versions = ">=3.6" files = [ - {file = "trafilatura-1.7.0-py3-none-any.whl", hash = "sha256:dd272c51f55c99b44e7c5d76a67dcb17d1cbcadd3f53768f6f7d7bc5ff3280a7"}, - {file = "trafilatura-1.7.0.tar.gz", hash = "sha256:a166e67f005a6a12ef194f48c7c9fa4e1b0e36756fdd2b64e02473c356962f04"}, + {file = "trafilatura-1.11.0-py3-none-any.whl", hash = "sha256:20f016be873a2cf3e02b9798f9537d09808559fcc667d42e1c019560ca45dce7"}, + {file = "trafilatura-1.11.0.tar.gz", hash = "sha256:9334ca101c40b2904af5afcee790f0374fabca3ac388811720be65cc768787a2"}, ] [package.dependencies] certifi = "*" charset-normalizer = {version = ">=3.2.0", markers = "python_version >= \"3.7\""} -courlan = ">=0.9.5" -htmldate = ">=1.7.0" -justext = ">=3.0.0" -lxml = {version = ">=4.9.4,<6", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} +courlan = ">=1.1.0" +htmldate = ">=1.8.1" +justext = ">=3.0.1" +lxml = {version = ">=5.2.2", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} [package.extras] -all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.19)", "htmldate[speed] (>=1.7.0)", "py3langid (>=0.2.2)", "pycurl (>=7.45.2)"] +all = ["brotli", "cchardet (>=2.1.7)", "faust-cchardet (>=2.1.19)", "htmldate[speed] (>=1.8.1)", "py3langid (>=0.2.2)", "pycurl (>=7.45.3)", "zstandard (>=0.20.0)"] gui = ["Gooey (>=1.0.1)"] [[package]] @@ -2111,13 +2138,13 @@ devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3) [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -2128,12 +2155,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "vobject" -version = "0.9.6.1" +version = "0.9.7" description = "A full-featured Python package for parsing and creating iCalendar and vCard files" optional = false python-versions = "*" files = [ - {file = "vobject-0.9.6.1.tar.gz", hash = "sha256:96512aec74b90abb71f6b53898dd7fe47300cc940104c4f79148f0671f790101"}, + {file = "vobject-0.9.7-py2.py3-none-any.whl", hash = "sha256:67ebec81ee39fc60b7355ce077f850d5f13d99d08b110fa1abcfdbb516205e20"}, + {file = "vobject-0.9.7.tar.gz", hash = "sha256:ab727bf81de88984ada5c11f066f1e1649903d3e3d7ec91f1ce968172afd5256"}, ] [package.dependencies] @@ -2141,13 +2169,13 @@ python-dateutil = ">=2.4.0" [[package]] name = "w3lib" -version = "2.1.2" +version = "2.2.1" description = "Library of web-related functions" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "w3lib-2.1.2-py3-none-any.whl", hash = "sha256:c4432926e739caa8e3f49f5de783f336df563d9490416aebd5d39fb896d264e7"}, - {file = "w3lib-2.1.2.tar.gz", hash = "sha256:ed5b74e997eea2abe3c1321f916e344144ee8e9072a6f33463ee8e57f858a4b1"}, + {file = "w3lib-2.2.1-py3-none-any.whl", hash = "sha256:e56d81c6a6bf507d7039e0c95745ab80abd24b465eb0f248af81e3eaa46eb510"}, + {file = "w3lib-2.2.1.tar.gz", hash = "sha256:756ff2d94c64e41c8d7c0c59fea12a5d0bc55e33a531c7988b4a163deb9b07dd"}, ] [[package]] @@ -2163,13 +2191,13 @@ files = [ [[package]] name = "wheel" -version = "0.42.0" +version = "0.43.0" description = "A built-package format for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, - {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, + {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, + {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, ] [package.extras] @@ -2242,4 +2270,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "245dcc043d384c7a9667a1d6d1100d84c41304739918fc12e224955727a67ce0" +content-hash = "67f2e3e5502b8d0e64e2bfe8a5bb3707b0fa86f859b26fb5c8f5355fda2ddda8" diff --git a/pyproject.toml b/pyproject.toml index cd733b11..416f2fd6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,41 +59,38 @@ packages = [{include = "converter"}] [tool.poetry.dependencies] python = "^3.10" -wheel = "^0.42.0" -black = "24.1.1" -certifi="2024.2.2" +wheel = "0.43.0" +black = "24.4.2" +certifi="2024.6.2" dateparser="1.2" extruct="0.17.0" -flake8 = "7.0.0" -html2text="2020.1.16" +flake8 = "7.1.0" +html2text="2024.2.26" jmespath="1.0.1" image = "1.5.33" itemadapter="0.9.0" itemloaders="1.3.1" isodate="0.6.1" overrides="3.1.0" -Pillow="10.1.0" -playwright="1.41.2" -pyOpenSSL="23.3.0" -pytest="8.0.0" -python-dateutil="2.8.2" +Pillow="10.3.0" +playwright="1.44.0" +pytest="8.2.2" +python-dateutil="2.9.0.post0" python-dotenv="1.0.1" -requests="2.31.0" +requests="2.32.3" six="1.16.0" Scrapy="2.11.2" scrapy-splash="0.9.0" -urllib3="2.2.0" -vobject="0.9.6.1" -w3lib="2.1.2" +urllib3="2.2.2" +vobject="0.9.7" +w3lib="2.2.1" xmltodict="0.13.0" -trafilatura = "1.7" -babel = "2.14.0" +trafilatura = "1.11" +babel = "2.15.0" langcodes = {extras = ["data"], version = "^3.3.0"} -httpx = "0.26" +httpx = "0.27" async-lru = "2.0.4" -[tool.poetry.group.dev.dependencies] - [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt index e4ed4624..0719bfa7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,17 +3,17 @@ asgiref==3.8.1 ; python_version >= "3.10" and python_version < "4.0" async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0" automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" -babel==2.14.0 ; python_version >= "3.10" and python_version < "4.0" +babel==2.15.0 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" -black==24.1.1 ; python_version >= "3.10" and python_version < "4.0" -certifi==2024.2.2 ; python_version >= "3.10" and python_version < "4.0" -cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" +black==24.4.2 ; python_version >= "3.10" and python_version < "4.0" +certifi==2024.6.2 ; python_version >= "3.10" and python_version < "4.0" +cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" courlan==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -cryptography==41.0.7 ; python_version >= "3.10" and python_version < "4.0" +cryptography==42.0.8 ; python_version >= "3.10" and python_version < "4.0" cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" dateparser==1.2.0 ; python_version >= "3.10" and python_version < "4.0" defusedxml==0.7.1 ; python_version >= "3.10" and python_version < "4.0" @@ -21,15 +21,15 @@ django==5.0.6 ; python_version >= "3.10" and python_version < "4.0" exceptiongroup==1.2.1 ; python_version >= "3.10" and python_version < "3.11" extruct==0.17.0 ; python_version >= "3.10" and python_version < "4.0" filelock==3.15.4 ; python_version >= "3.10" and python_version < "4.0" -flake8==7.0.0 ; python_version >= "3.10" and python_version < "4.0" +flake8==7.1.0 ; python_version >= "3.10" and python_version < "4.0" greenlet==3.0.3 ; python_version >= "3.10" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" html-text==0.6.2 ; python_version >= "3.10" and python_version < "4.0" -html2text==2020.1.16 ; python_version >= "3.10" and python_version < "4.0" +html2text==2024.2.26 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" htmldate==1.8.1 ; python_version >= "3.10" and python_version < "4.0" httpcore==1.0.5 ; python_version >= "3.10" and python_version < "4.0" -httpx==0.26.0 ; python_version >= "3.10" and python_version < "4.0" +httpx==0.27.0 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" idna==3.7 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" @@ -54,31 +54,31 @@ overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" packaging==24.1 ; python_version >= "3.10" and python_version < "4.0" parsel==1.9.1 ; python_version >= "3.10" and python_version < "4.0" pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0" -pillow==10.1.0 ; python_version >= "3.10" and python_version < "4.0" +pillow==10.3.0 ; python_version >= "3.10" and python_version < "4.0" platformdirs==4.2.2 ; python_version >= "3.10" and python_version < "4.0" -playwright==1.41.2 ; python_version >= "3.10" and python_version < "4.0" +playwright==1.44.0 ; python_version >= "3.10" and python_version < "4.0" pluggy==1.5.0 ; python_version >= "3.10" and python_version < "4.0" protego==0.3.1 ; python_version >= "3.10" and python_version < "4.0" pyasn1-modules==0.4.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1==0.6.0 ; python_version >= "3.10" and python_version < "4.0" -pycodestyle==2.11.1 ; python_version >= "3.10" and python_version < "4.0" -pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" +pycodestyle==2.12.0 ; python_version >= "3.10" and python_version < "4.0" +pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" -pyee==11.0.1 ; python_version >= "3.10" and python_version < "4.0" +pyee==11.1.0 ; python_version >= "3.10" and python_version < "4.0" pyflakes==3.2.0 ; python_version >= "3.10" and python_version < "4.0" -pyopenssl==23.3.0 ; python_version >= "3.10" and python_version < "4.0" +pyopenssl==24.1.0 ; python_version >= "3.10" and python_version < "4.0" pyparsing==3.1.2 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" pyrdfa3==3.6.2 ; python_version >= "3.10" and python_version < "4.0" -pytest==8.0.0 ; python_version >= "3.10" and python_version < "4.0" -python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" +pytest==8.2.2 ; python_version >= "3.10" and python_version < "4.0" +python-dateutil==2.9.0.post0 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" pytz==2024.1 ; python_version >= "3.10" and python_version < "4.0" queuelib==1.7.0 ; python_version >= "3.10" and python_version < "4.0" rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" regex==2024.5.15 ; python_version >= "3.10" and python_version < "4.0" requests-file==2.1.0 ; python_version >= "3.10" and python_version < "4.0" -requests==2.31.0 ; python_version >= "3.10" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" scrapy==2.11.2 ; python_version >= "3.10" and python_version < "4.0" service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" @@ -90,16 +90,16 @@ sqlparse==0.5.0 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" tldextract==5.1.2 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" -trafilatura==1.7.0 ; python_version >= "3.10" and python_version < "4.0" +trafilatura==1.11.0 ; python_version >= "3.10" and python_version < "4.0" twisted-iocpsupport==1.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" twisted==24.3.0 ; python_version >= "3.10" and python_version < "4.0" typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.2.0 ; python_version >= "3.10" and python_version < "4.0" -vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" -w3lib==2.1.2 ; python_version >= "3.10" and python_version < "4.0" +urllib3==2.2.2 ; python_version >= "3.10" and python_version < "4.0" +vobject==0.9.7 ; python_version >= "3.10" and python_version < "4.0" +w3lib==2.2.1 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" -wheel==0.42.0 ; python_version >= "3.10" and python_version < "4.0" +wheel==0.43.0 ; python_version >= "3.10" and python_version < "4.0" xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" zope-interface==6.4.post2 ; python_version >= "3.10" and python_version < "4.0" From dbc3e9c3a0e2ed48e2fa1b1954da0b8c6f3b99b7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 28 Jun 2024 14:41:30 +0200 Subject: [PATCH 492/590] change: drop "allowed_domains" custom setting - (this is necessary since Scrapy's newest versions are stricter with this setting enabled -> Thumbnail Downloads done by the Downloader Middleware would be dropped if the image URLs aren't hosted on oersi.org (which they aren't)) --- converter/spiders/oersi_spider.py | 1 - 1 file changed, 1 deletion(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 3af1e35d..0f3dd6f0 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -43,7 +43,6 @@ class OersiSpider(scrapy.Spider, LomBase): # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" version = "0.2.6" # last update: 2024-05-28 - allowed_domains = "oersi.org" custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, From 5b2bdb31afbb99e086bb02ef93fdfdffb1cad461 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 28 Jun 2024 16:18:03 +0200 Subject: [PATCH 493/590] change: drop "overrides"-package from dependencies and crawlers - the "overrides"-package was outdated (v3.1.0 was released in June 2020!) and made upgrading to newer Python versions harder than necessary - trying to crawl ANY spider at all would throw "overrides"-related errors when scrapy's SpiderLoader tries to fetch a list of available crawlers, even if the specific crawler wasn't using "overrides" at all Background: - the "overrides"-package served a purpose in previous versions of Python, but is not needed anymore in Python 3.12: - Python 3.12 implements a new built-in "@override"-decorator, which basically serves the same purpose - see: https://docs.python.org/3/whatsnew/3.12.html#what-s-new-in-python-3-12 (PEP 698) - details: https://docs.python.org/3/library/typing.html#typing.override - since only 3 classes used the "overrides" decorator anyway, this dependency removal should have no side effects --- .../base_classes/lernprogramme_spider_base.py | 11 +---------- converter/spiders/learning_apps_spider.py | 4 ++-- converter/spiders/youtube_spider.py | 12 ------------ poetry.lock | 12 +----------- pyproject.toml | 1 - requirements.txt | 1 - 6 files changed, 4 insertions(+), 37 deletions(-) diff --git a/converter/spiders/base_classes/lernprogramme_spider_base.py b/converter/spiders/base_classes/lernprogramme_spider_base.py index f59e3848..1c4f27a6 100644 --- a/converter/spiders/base_classes/lernprogramme_spider_base.py +++ b/converter/spiders/base_classes/lernprogramme_spider_base.py @@ -6,7 +6,6 @@ import converter.items as items from .lom_base import LomBase -from overrides import overrides from scrapy.http import Request, Response from scrapy.http.response.text import TextResponse @@ -118,23 +117,19 @@ def __init__(self, name, version, url, static_values, **kwargs): self.static_values = static_values super().__init__(**kwargs) - @overrides # LomBase def getId(self, response: Response) -> str: return response.meta["row"]["url"] - @overrides # LomBase def getHash(self, response: Response) -> str: hash_string = self.version + str(response.meta["row"].items()) return hashlib.sha1(hash_string.encode("utf8")).hexdigest() - @overrides # LomBase def getBase(self, response: Response) -> items.BaseItemLoader: base = LomBase.getBase(self, response) if response.meta["row"]["thumbnail"] is not None: base.add_value("thumbnail", response.meta["row"]["thumbnail"]) return base - @overrides # LomBase def getLOMGeneral(self, response: Response) -> items.LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["row"]["title"]) @@ -145,14 +140,12 @@ def getLOMGeneral(self, response: Response) -> items.LomGeneralItemloader: general.add_value("language", self.static_values["language"]) return general - @overrides # LomBase def getLOMTechnical(self, response: Response) -> items.LomTechnicalItemLoader: technical = LomBase.getLOMTechnical(self, response) technical.add_value("format", self.static_values["format"]) technical.add_value("location", response.meta["row"]["url"]) return technical - @overrides # LomBase def getLOMLifecycle(self, response: Response) -> items.LomLifecycleItemloader: lifecycle = LomBase.getLOMLifecycle(self, response) lifecycle.add_value("role", "author") @@ -161,13 +154,11 @@ def getLOMLifecycle(self, response: Response) -> items.LomLifecycleItemloader: lifecycle.add_value("url", self.url) return lifecycle - @overrides # LomBase def getLicense(self, response: Response) -> items.LicenseItemLoader: license = LomBase.getLicense(self, response) license.add_value("url", self.static_values["licence_url"]) return license - @overrides # LomBase def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) skos = self.static_values["skos"] @@ -197,4 +188,4 @@ def merge(source: dict, overrides: dict) -> dict: result[key] = overrides[key] else: result[key] = value - return result \ No newline at end of file + return result diff --git a/converter/spiders/learning_apps_spider.py b/converter/spiders/learning_apps_spider.py index 83cd4b67..68ca6df3 100644 --- a/converter/spiders/learning_apps_spider.py +++ b/converter/spiders/learning_apps_spider.py @@ -1,4 +1,3 @@ -from overrides import overrides from scrapy.http import Response from converter import items @@ -7,6 +6,7 @@ import html import scrapy + class LearningAppsSpider(scrapy.Spider, LomBase): name = "learning_apps_spider" friendlyName = "LearningApps.org" @@ -16,6 +16,7 @@ class LearningAppsSpider(scrapy.Spider, LomBase): categories = {} subcategories = {} + def __init__(self, **kwargs): LomBase.__init__(self, **kwargs) @@ -83,7 +84,6 @@ def getBase(self, response): base.replace_value("thumbnail", response.meta["item"].xpath("@image").get()) return base - @overrides def getLOMLifecycle(self, response: Response) -> items.LomLifecycleItemloader: lifecycle = LomBase.getLOMLifecycle(self, response) name = response.meta["item"].xpath("@author").get().split(' ') diff --git a/converter/spiders/youtube_spider.py b/converter/spiders/youtube_spider.py index 2f3ff275..d1fa3acd 100644 --- a/converter/spiders/youtube_spider.py +++ b/converter/spiders/youtube_spider.py @@ -6,7 +6,6 @@ from typing import Generator, List from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse -from overrides import overrides from scrapy.http import Request, Response from scrapy.spiders import Spider @@ -85,7 +84,6 @@ def __init__(self, **kwargs): super().__init__(**kwargs) self.lomLoader = YoutubeLomLoader(self.name, self.version, **kwargs) - @overrides # Spider def start_requests(self): if env.get("YOUTUBE_API_KEY", False) == "": logging.error("YOUTUBE_API_KEY is required for youtube_spider. Please check your '.env'-settings!") @@ -312,19 +310,15 @@ def __init__(self, name, version, **kwargs): self.version = version super().__init__(**kwargs) - @overrides # LomBase def getId(self, response: Response) -> str: return YoutubeSpider.get_video_url(response.meta["item"]) - @overrides # LomBase def getHash(self, response: Response) -> str: return self.version + response.meta["item"]["snippet"]["publishedAt"] - @overrides # LomBase async def mapResponse(self, response) -> items.ResponseItemLoader: return await LomBase.mapResponse(self, response, False) - @overrides # LomBase def getBase(self, response: Response) -> items.BaseItemLoader: base = LomBase.getBase(self, response) base.add_value("origin", response.meta["row"]["sourceTitle"].strip()) @@ -362,7 +356,6 @@ def get_fulltext(self, response: Response) -> str: ) return fulltext - @overrides # LomBase def getLOMGeneral(self, response: Response) -> items.LomGeneralItemloader: general = LomBase.getLOMGeneral(self, response) general.add_value("title", response.meta["item"]["snippet"]["title"]) @@ -380,7 +373,6 @@ def get_description(self, response: Response) -> str: or response.meta["playlist"]["snippet"]["title"] ) - @overrides # LomBase def getLOMTechnical(self, response: Response) -> items.LomTechnicalItemLoader: technical = LomBase.getLOMTechnical(self, response) technical.add_value("format", "text/html") @@ -388,7 +380,6 @@ def getLOMTechnical(self, response: Response) -> items.LomTechnicalItemLoader: technical.add_value("duration", response.meta["item"]["contentDetails"]["duration"]) return technical - @overrides # LomBase def getLOMEducational(self, response): educational = LomBase.getLOMEducational(self, response) tar = items.LomAgeRangeItemLoader() @@ -397,7 +388,6 @@ def getLOMEducational(self, response): educational.add_value("typicalAgeRange", tar.load_item()) return educational - @overrides # LomBase def getLOMLifecycle(self, response: Response) -> items.LomLifecycleItemloader: lifecycle = LomBase.getLOMLifecycle(self, response) lifecycle.add_value("role", "author") @@ -413,7 +403,6 @@ def get_channel_url(self, response: Response) -> str: channel_id = response.meta["item"]["snippet"]["channelId"] return "https://www.youtube.com/channel/{}".format(channel_id) - @overrides # LomBase def getLicense(self, response: Response) -> items.LicenseItemLoader: license_loader = LomBase.getLicense(self, response) # there are only two possible values according to https://developers.google.com/youtube/v3/docs/videos: @@ -427,7 +416,6 @@ def getLicense(self, response: Response) -> items.LicenseItemLoader: logging.warning("Youtube element {} has no license".format(self.getId())) return license_loader - @overrides # LomBase def getValuespaces(self, response: Response) -> items.ValuespaceItemLoader: valuespaces = LomBase.getValuespaces(self, response) row = response.meta["row"] diff --git a/poetry.lock b/poetry.lock index 4d2cd376..ed84c748 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1275,16 +1275,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "overrides" -version = "3.1.0" -description = "A decorator to automatically detect mismatch when overriding a method." -optional = false -python-versions = "*" -files = [ - {file = "overrides-3.1.0.tar.gz", hash = "sha256:30f761124579e59884b018758c4d7794914ef02a6c038621123fec49ea7599c6"}, -] - [[package]] name = "packaging" version = "24.1" @@ -2270,4 +2260,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "67f2e3e5502b8d0e64e2bfe8a5bb3707b0fa86f859b26fb5c8f5355fda2ddda8" +content-hash = "9bca50ae86d8d0c367af2f591dc4d01c882e29076e2708ce03e8355e5bb49737" diff --git a/pyproject.toml b/pyproject.toml index 416f2fd6..47e52fb1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,6 @@ image = "1.5.33" itemadapter="0.9.0" itemloaders="1.3.1" isodate="0.6.1" -overrides="3.1.0" Pillow="10.3.0" playwright="1.44.0" pytest="8.2.2" diff --git a/requirements.txt b/requirements.txt index 0719bfa7..d29056ba 100644 --- a/requirements.txt +++ b/requirements.txt @@ -50,7 +50,6 @@ marisa-trie==1.2.0 ; python_version >= "3.10" and python_version < "4.0" mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==2.0.1 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" -overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" packaging==24.1 ; python_version >= "3.10" and python_version < "4.0" parsel==1.9.1 ; python_version >= "3.10" and python_version < "4.0" pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0" From 58bd2ec93de493ad0853a03e7771fc17f58bd6e1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 28 Jun 2024 17:53:04 +0200 Subject: [PATCH 494/590] change: upgrade docker files to Python 3.12.4 (+ browserless v2.14) - upgrade docker container to use Python 3.12.4 - update docker image for "browserless" headless browser from v2.0.x to chromium build v2.14.0 - Changelog: https://github.com/browserless/browserless/blob/main/CHANGELOG.md#v2140 - docker image details: https://github.com/browserless/browserless/pkgs/container/chromium/232403319?tag=v2.14.0 --- Dockerfile | 2 +- docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index e7b60cb9..11e06088 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11.6-slim-bookworm +FROM python:3.12.4-slim-bookworm # ENV CRAWLER wirlernenonline_spider diff --git a/docker-compose.yml b/docker-compose.yml index 966e6c04..778bf5cf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,7 +19,7 @@ services: retries: 3 start_period: 40s headless_chrome: - image: ghcr.io/browserless/chrome@sha256:f27f9fa0d9c2344180c0fc5af7c6ea4a1df6f2a7a3efc555de876dbea6ded7a1 + image: ghcr.io/browserless/chromium:v2.14.0 restart: always environment: - TIMEOUT=120000 From 27a317c8105036bce1ebc164063ba8a1a2085668 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Jul 2024 16:54:04 +0200 Subject: [PATCH 495/590] refactor: "course_availability_..."-properties - feat: CourseItemPipeline (work-in-progress) - handles BIRD-related CourseItem properties - change: MOOCHub "startDate" and "endDate" properties are handled by the CourseItemPipeline from now on - this change shifts responsibility for date string handling from oersi_spider to the pipelines - "course_availability_from" and "_...until"-fields expect a string value - strings are type-checked and converted to ISO-datetime strings by the pipeline - docs: update CourseItem DocStrings for "course_availability_..." to reflect recent changes (expects a "datetime"-string instead of the previous "date"-string) --- converter/items.py | 4 +- converter/pipelines.py | 78 +++++++++++++++++++++ converter/settings.py | 1 + converter/spiders/oersi_spider.py | 111 ++++++++++++++---------------- 4 files changed, 134 insertions(+), 60 deletions(-) diff --git a/converter/items.py b/converter/items.py index 132a8507..27937eae 100644 --- a/converter/items.py +++ b/converter/items.py @@ -325,9 +325,9 @@ class CourseItem(Item): BIRD-specific metadata properties intended only for courses. """ course_availability_from = Field() - """Corresponding edu-sharing property: 'ccm:oeh_event_begin' (date)""" + """Corresponding edu-sharing property: 'ccm:oeh_event_begin' (expects ISO datetime string)""" course_availability_until = Field() - """Corresponding edu-sharing property: 'ccm:oeh_event_end' (date)""" + """Corresponding edu-sharing property: 'ccm:oeh_event_end' (expects ISO datetime string)""" course_description_short = Field() """Corresponding edu-sharing property: 'ccm:oeh_course_description_short'""" course_duration = Field() diff --git a/converter/pipelines.py b/converter/pipelines.py index a0d75da3..2a49d703 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -308,6 +308,84 @@ def process_item(self, raw_item, spider): return raw_item +class CourseItemPipeline(BasicPipeline): + """Pipeline for BIRD-related metadata properties.""" + # ToDo: Expand docs! + # + # ToDo: course description normalization -> 'ccm:oeh_course_description_short' + # - expects a string (with or without HTML formatting) + # + # ToDo: course_duration -> 'cclom:typicallearningtime' (ms) + # + # ToDo: course_learningoutcome -> 'ccm:learninggoal' + # - expects a string (with or without HTML formatting) + # + # ToDo (optional): course_schedule + # - expects a string (either with or without HTML formatting) + # + # ToDo: course_url_video + # - expects a (singular) URL + # + # ToDo: course_workload -> ? + def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: + adapter = ItemAdapter(item) + if "course" in adapter: + course_adapter = adapter["course"] + + # Prepare BIRD "course_availability_from" for "ccm:oeh_event_begin" (ISO-formatted "datetime"-string) + if "course_availability_from" in course_adapter: + course_availability_from: str = course_adapter["course_availability_from"] + # BIRD spec: "verfügbar ab" expects a single-value 'datetime' string + if course_availability_from and isinstance(course_availability_from, str): + caf_parsed: datetime = dateparser.parse(course_availability_from) + # try to parse the string and convert it to a datetime object + if caf_parsed and isinstance(caf_parsed, datetime.datetime): + # convert the parsed string from a 'datetime' object to an ISO-formatted 'datetime'-string + caf_iso: str = caf_parsed.isoformat() + course_adapter["course_availability_from"] = caf_iso + else: + log.warning(f"""Failed to parse "course_availability_from"-property + "{course_availability_from}" to a valid "datetime"-object. + (Please check the object {adapter['sourceId']} or extend the CourseItemPipeline!) + """) + else: + log.warning(f"""Cannot process BIRD 'course_availability_from'-property {course_availability_from} + f"(Expected a string, but received {type(course_availability_from)} instead.""") + + # Prepare BIRD "course_availability_until" for "ccm:oeh_event_end" (-> ISO-formatted "datetime"-string) + if "course_availability_until" in course_adapter: + course_availability_until = course_adapter["course_availability_until"] + # BIRD Spec "verfügbar bis" expects a single-value 'datetime' string + if course_availability_until and isinstance(course_availability_until, str): + cau_parsed: datetime = dateparser.parse(course_availability_until) + if cau_parsed and isinstance(cau_parsed, datetime.datetime): + cau_iso: str = cau_parsed.isoformat() + course_adapter["course_availability_until"] = cau_iso + else: + log.warning(f"""Failed to parse "{course_availability_until}" to a valid 'datetime'-object. + (Please check the object {adapter['sourceId']} for unhandled edge-cases or extend the + CourseItemPipeline!)""") + else: + log.warning(f"""Cannot process BIRD "course_availability_until"-property {course_availability_until} + (Expected a string, but received {type(course_availability_until)} instead.)""") + + if "course_description_short" in course_adapter: + pass + if "course_duration" in course_adapter: + pass + if "course_learningoutcome" in course_adapter: + pass + if "course_schedule" in course_adapter: + pass + if "course_url_video" in course_adapter: + pass + if "course_workload" in course_adapter: + pass + return item + + pass + + class ProcessValuespacePipeline(BasicPipeline): """ generate de_DE / i18n strings for valuespace fields diff --git a/converter/settings.py b/converter/settings.py index a839a3ea..71b05baa 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -131,6 +131,7 @@ "converter.pipelines.NormLanguagePipeline": 150, "converter.pipelines.ConvertTimePipeline": 200, "converter.pipelines.ProcessValuespacePipeline": 250, + "converter.pipelines.CourseItemPipeline": 275, "converter.pipelines.ProcessThumbnailPipeline": 300, ( "converter.pipelines.DummyPipeline" diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 0f3dd6f0..bb14e0f3 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -5,7 +5,6 @@ from collections import Counter from typing import Optional -import dateparser import requests import scrapy @@ -200,8 +199,8 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req ) return None if ( - self.getId(response=None, elastic_item=elastic_item) is not None - and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None + self.getId(response=None, elastic_item=elastic_item) is not None + and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None ): if not self.hasChanged(None, elastic_item=elastic_item): return None @@ -510,12 +509,12 @@ def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: return changed def get_lifecycle_author( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - date_created: Optional[str] = None, - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + date_created: Optional[str] = None, + date_published: Optional[str] = None, ): """ If a "creator"-field is available in the OERSI API for a specific '_source'-item, creates an 'author'-specific @@ -583,11 +582,11 @@ def get_lifecycle_author( return authors def get_affiliation_and_save_to_lifecycle( - self, - affiliation_dict: dict, - lom_base_item_loader: LomBaseItemloader, - organization_fallback: set[str], - lifecycle_role: str, + self, + affiliation_dict: dict, + lom_base_item_loader: LomBaseItemloader, + organization_fallback: set[str], + lifecycle_role: str, ): """ Retrieves metadata from OERSI's "affiliation"-field (which is typically found within a "creator"- or @@ -651,11 +650,11 @@ def validate_academic_title_string(self, honorific_prefix: str) -> str: return honorific_prefix.strip() def get_lifecycle_contributor( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - author_list: Optional[list[str]] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + author_list: Optional[list[str]] = None, ): """ Collects metadata from the OERSI "contributor"-field and stores it within a LomLifecycleItemLoader. @@ -757,11 +756,11 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) def get_lifecycle_publisher( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organizations_from_publisher_fields: set[str], - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organizations_from_publisher_fields: set[str], + date_published: Optional[str] = None, ): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. Successfully @@ -797,7 +796,7 @@ def get_lifecycle_publisher( lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) def get_lifecycle_organization_from_source_organization_fallback( - self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] + self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] ): # ATTENTION: the "sourceOrganization"-field is not part of the AMB draft, therefore this method is currently # used a fallback, so we don't lose any useful metadata (even if that metadata is not part of the AMB spec). @@ -839,7 +838,8 @@ def get_lifecycle_organization_from_source_organization_fallback( lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) def get_lifecycle_publisher_from_source_organization( - self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, previously_collected_publishers: set[str] + self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, + previously_collected_publishers: set[str] ): source_organizations: list[dict] = elastic_item_source.get("sourceOrganization") for so in source_organizations: @@ -860,7 +860,7 @@ def get_lifecycle_publisher_from_source_organization( lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) def lifecycle_determine_type_of_identifier_and_save_uri( - self, item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader + self, item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader ): """ OERSI's "creator"/"contributor"/"affiliation" items might contain an 'id'-field which (optionally) provides @@ -873,10 +873,10 @@ def lifecycle_determine_type_of_identifier_and_save_uri( # "creator.id" can be 'null', therefore we need to explicitly check its type before trying to parse it uri_string: str = item_dictionary.get("id") if ( - "orcid.org" in uri_string - or "/gnd/" in uri_string - or "wikidata.org" in uri_string - or "ror.org" in uri_string + "orcid.org" in uri_string + or "/gnd/" in uri_string + or "wikidata.org" in uri_string + or "ror.org" in uri_string ): if "/gnd/" in uri_string: lifecycle_item_loader.add_value("id_gnd", uri_string) @@ -950,19 +950,22 @@ def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: d if start_dates and isinstance(start_dates, list): for start_date_raw in start_dates: if start_date_raw and isinstance(start_date_raw, str): - sdt_parsed: datetime = dateparser.parse(start_date_raw) - if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): - sd_parsed_iso: str = sdt_parsed.isoformat() - course_itemloader.add_value("course_availability_from", sd_parsed_iso) + course_itemloader.add_value("course_availability_from", start_date_raw) + else: + self.logger.warning( + f"Received unexpected type for \"startDate\" {start_date_raw} . " + f"Expected str, but received {type(start_date_raw)} instead.") if "endDate" in imoox_attributes: end_dates: list[str] = imoox_attributes["endDate"] if end_dates and isinstance(end_dates, list): for end_date_raw in end_dates: if end_date_raw and isinstance(end_date_raw, str): - edt_parsed: datetime = dateparser.parse(end_date_raw) - if edt_parsed and isinstance(edt_parsed, datetime.datetime): - ed_parsed_iso: str = edt_parsed.isoformat() - course_itemloader.add_value("course_availability_until", ed_parsed_iso) + course_itemloader.add_value("course_availability_until", end_date_raw) + else: + self.logger.warning( + f"Received unexpected type for \"endDate\" {end_date_raw}. " + f"Expected str, but received {type(end_date_raw)} instead." + ) if "trailer" in imoox_attributes: # example data (as of 2024-05-27) # "trailer": { @@ -1045,11 +1048,11 @@ def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: d base_itemloader.add_value("course", course_itemloader.load_item()) def enrich_vhb_metadata( - self, - base_itemloader: BaseItemLoader, - elastic_item: dict, - lom_general_itemloader: LomGeneralItemloader, - in_languages: list[str] | None, + self, + base_itemloader: BaseItemLoader, + elastic_item: dict, + lom_general_itemloader: LomGeneralItemloader, + in_languages: list[str] | None, ): """ Combines metadata from OERSI's elastic_item with MOOCHub v2.x metadata from the source (vhb) @@ -1111,19 +1114,11 @@ def enrich_vhb_metadata( if "startDate" in vhb_item_matched["attributes"]: start_date_raw: str = vhb_item_matched["attributes"]["startDate"] if start_date_raw and isinstance(start_date_raw, str): - # parsing the date string first to check its validity - sdt_parsed: datetime = dateparser.parse(start_date_raw) - if sdt_parsed and isinstance(sdt_parsed, datetime.datetime): - # just to make sure that we don't parse bogus data, we run the string - # through the dateparser module first and convert it to iso 8601 - sd_parsed_iso: str = sdt_parsed.isoformat() - course_itemloader.add_value("course_availability_from", sd_parsed_iso) - else: - self.logger.warning( - f"Could not parse vhb 'start_date' value {start_date_raw} " - f"to datetime. (Please check for new edge-cases " - f"and update the crawler!)" - ) + course_itemloader.add_value("course_availability_from", start_date_raw) + else: + self.logger.warning(f"Received unexpected type for \"startDate\" {start_date_raw} . " + f"Expected a string, but received {type(start_date_raw)} instead." + ) if "video" in vhb_item_matched["attributes"]: video_item: dict = vhb_item_matched["attributes"]["video"] if video_item: @@ -1179,7 +1174,7 @@ def enrich_vhb_metadata( # timedelta has no parameter for months # -> X months = X * (4 weeks) duration_delta = duration_delta + ( - duration_number * datetime.timedelta(weeks=4) + duration_number * datetime.timedelta(weeks=4) ) case _: self.logger.warning( From 3f7d4b0ba0f2b83d8d18205b20ebbf8abc0f4163 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Jul 2024 17:01:56 +0200 Subject: [PATCH 496/590] fix: 2 weak warnings (PEP8: E713 & E721) --- converter/pipelines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 2a49d703..e6496c22 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -127,7 +127,7 @@ def process_item(self, raw_item, spider): except KeyError: raise DropItem(f'Item {item} has no lom.technical.location') try: - if "location" not in item["lom"]["technical"] and not "binary" in item: + if "location" not in item["lom"]["technical"] and "binary" not in item: raise DropItem( "Entry {} has no technical location or binary data".format(item["lom"]["general"]["title"]) ) @@ -726,7 +726,7 @@ def create_thumbnails_from_image_bytes(self, image: Image.Image, item, settings) def get_settings_for_crawler(spider) -> scrapy.settings.Settings: all_settings = get_project_settings() crawler_settings = settings.BaseSettings(getattr(spider, "custom_settings") or {}, 'spider') - if type(crawler_settings) == dict: + if isinstance(crawler_settings, dict): crawler_settings = settings.BaseSettings(crawler_settings, 'spider') for key in crawler_settings.keys(): if ( From 5807f6b8d22aca671b8397a96a1da94fe637ad28 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Jul 2024 17:10:40 +0200 Subject: [PATCH 497/590] fix: if "course_availability_..."-property can't be handled, delete it from CourseItem so it won't reach the es_connector --- converter/pipelines.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index e6496c22..83314d90 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -330,7 +330,7 @@ class CourseItemPipeline(BasicPipeline): def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: adapter = ItemAdapter(item) if "course" in adapter: - course_adapter = adapter["course"] + course_adapter: ItemAdapter = adapter["course"] # Prepare BIRD "course_availability_from" for "ccm:oeh_event_begin" (ISO-formatted "datetime"-string) if "course_availability_from" in course_adapter: @@ -348,9 +348,11 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr "{course_availability_from}" to a valid "datetime"-object. (Please check the object {adapter['sourceId']} or extend the CourseItemPipeline!) """) + del course_adapter["course_availability_from"] else: log.warning(f"""Cannot process BIRD 'course_availability_from'-property {course_availability_from} f"(Expected a string, but received {type(course_availability_from)} instead.""") + del course_adapter["course_availability_from"] # Prepare BIRD "course_availability_until" for "ccm:oeh_event_end" (-> ISO-formatted "datetime"-string) if "course_availability_until" in course_adapter: @@ -365,9 +367,11 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr log.warning(f"""Failed to parse "{course_availability_until}" to a valid 'datetime'-object. (Please check the object {adapter['sourceId']} for unhandled edge-cases or extend the CourseItemPipeline!)""") + del course_adapter["course_availability_until"] else: log.warning(f"""Cannot process BIRD "course_availability_until"-property {course_availability_until} (Expected a string, but received {type(course_availability_until)} instead.)""") + del course_adapter["course_availability_until"] if "course_description_short" in course_adapter: pass From be30b2083b2175bbeb66a84285a80c3c69f794fd Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 2 Jul 2024 17:54:56 +0200 Subject: [PATCH 498/590] feat: implement CourseItemPipeline properties (course_description, course_learningoutcome, course_schedule, course_url_video) - implements (rudimentary) type-checks for properties that expect a string value and delete the property if a wrong type was detected - log: changed logging messages from triple-quotes to single-quote strings - style: code cleanup / formatting --- converter/pipelines.py | 88 +++++++++++++++++++++++++++--------------- 1 file changed, 57 insertions(+), 31 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 83314d90..f8bca972 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -311,22 +311,6 @@ def process_item(self, raw_item, spider): class CourseItemPipeline(BasicPipeline): """Pipeline for BIRD-related metadata properties.""" # ToDo: Expand docs! - # - # ToDo: course description normalization -> 'ccm:oeh_course_description_short' - # - expects a string (with or without HTML formatting) - # - # ToDo: course_duration -> 'cclom:typicallearningtime' (ms) - # - # ToDo: course_learningoutcome -> 'ccm:learninggoal' - # - expects a string (with or without HTML formatting) - # - # ToDo (optional): course_schedule - # - expects a string (either with or without HTML formatting) - # - # ToDo: course_url_video - # - expects a (singular) URL - # - # ToDo: course_workload -> ? def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: adapter = ItemAdapter(item) if "course" in adapter: @@ -344,14 +328,14 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr caf_iso: str = caf_parsed.isoformat() course_adapter["course_availability_from"] = caf_iso else: - log.warning(f"""Failed to parse "course_availability_from"-property - "{course_availability_from}" to a valid "datetime"-object. - (Please check the object {adapter['sourceId']} or extend the CourseItemPipeline!) - """) + log.warning(f"Failed to parse \"course_availability_from\"-property " + f"\"{course_availability_from}\" to a valid \"datetime\"-object. \n" + f"(Please check the object {adapter['sourceId']} " + f"or extend the CourseItemPipeline!)") del course_adapter["course_availability_from"] else: - log.warning(f"""Cannot process BIRD 'course_availability_from'-property {course_availability_from} - f"(Expected a string, but received {type(course_availability_from)} instead.""") + log.warning(f"Cannot process BIRD 'course_availability_from'-property {course_availability_from} " + f"(Expected a string, but received {type(course_availability_from)} instead.") del course_adapter["course_availability_from"] # Prepare BIRD "course_availability_until" for "ccm:oeh_event_end" (-> ISO-formatted "datetime"-string) @@ -364,27 +348,69 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr cau_iso: str = cau_parsed.isoformat() course_adapter["course_availability_until"] = cau_iso else: - log.warning(f"""Failed to parse "{course_availability_until}" to a valid 'datetime'-object. - (Please check the object {adapter['sourceId']} for unhandled edge-cases or extend the - CourseItemPipeline!)""") + log.warning(f"Failed to parse \"{course_availability_until}\" to a valid 'datetime'-object. " + f"(Please check the object {adapter['sourceId']} for unhandled edge-cases or " + f"extend the CourseItemPipeline!)") del course_adapter["course_availability_until"] else: - log.warning(f"""Cannot process BIRD "course_availability_until"-property {course_availability_until} - (Expected a string, but received {type(course_availability_until)} instead.)""") + log.warning( + f"Cannot process BIRD \"course_availability_until\"-property {course_availability_until} " + f"(Expected a string, but received {type(course_availability_until)} instead.)") del course_adapter["course_availability_until"] if "course_description_short" in course_adapter: - pass + # course_description_short expects a string (with or without HTML formatting) + course_description_short: str = course_adapter["course_description_short"] + if course_description_short and isinstance(course_description_short, str): + # happy-case: the description is a string + pass + else: + log.warning(f"Cannot process BIRD 'course_description_short'-property for item " + f"{adapter['sourceId']}. Expected a string, but received " + f"{type(course_description_short)} instead.") + del course_adapter["course_description_short"] + if "course_duration" in course_adapter: + # ToDo: course_duration -> 'cclom:typicallearningtime' (ms) pass + if "course_learningoutcome" in course_adapter: - pass + # course_learningoutcome expects a string (with or without HTML formatting) + course_learning_outcome = course_adapter["course_learningoutcome"] + if course_learning_outcome and isinstance(course_learning_outcome, str): + # happy-case + pass + else: + log.warning(f"Cannot process BIRD 'course_learningoutcome'-property for item {adapter['sourceId']} " + f". Expected a string, but received {type(course_learning_outcome)} instead.") + del course_adapter["course_learningoutcome"] + if "course_schedule" in course_adapter: - pass + # course_schedule expects a string (either with or without HTML formatting) + course_schedule: str = course_adapter["course_schedule"] + if course_schedule and isinstance(course_schedule, str): + # happy-case + pass + else: + log.warning(f"Cannot process BIRD 'course_schedule'-property for item {adapter['sourceId']} . " + f"Expected a string, but received {type(course_schedule)} instead.") + del course_adapter["course_schedule"] + if "course_url_video" in course_adapter: - pass + # expects a (singular) URL pointing towards a course-related video (e.g. a short teaser / intro) + course_url_video: str = course_adapter["course_url_video"] + if course_url_video and isinstance(course_url_video, str): + # happy-case + pass + else: + log.warning(f"Cannot process BIRD 'course_url_video'-property for item {adapter['sourceId']} . " + f"Expected a string, but received {type(course_url_video)} instead.") + del course_adapter["course_url_video"] + if "course_workload" in course_adapter: + # ToDo: course_workload -> ? pass + return item pass From 84031f394a7282e991d318521a079c84f10fd473 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 3 Jul 2024 15:17:10 +0200 Subject: [PATCH 499/590] feat: implement remaining "CourseItem"-properties in CourseItem pipeline - docs: clean up solved ToDos - ToDo: BIRD "course_workload" needs to be implemented at a later time when there's clarification which edu-sharing property shall be used in future use-cases --- converter/es_connector.py | 4 --- converter/pipelines.py | 63 ++++++++++++++++++++++++++------------- 2 files changed, 43 insertions(+), 24 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 6150a9c3..5d22422d 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -536,15 +536,11 @@ def transform_item(self, uuid, spider, item): spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] if "course" in item: - # ToDo: activate these fields AFTER confirming that the edu-sharing properties are correct - # ToDo: implement a CourseItemPipeline in pipelines.py BEFORE activating these fields! if "course_availability_from" in item["course"]: # as of 2024-05-14: "ccm:oeh_event_begin" expects a datetime value - # ToDo: implement datetime typecheck for this property in pipelines.py spaces["ccm:oeh_event_begin"] = item["course"]["course_availability_from"] if "course_availability_until" in item["course"]: # as of 2024-05-14: "ccm:oeh_event_end" expects a datetime value - # Todo: implement datetime typecheck for this property in pipelines.py spaces["ccm:oeh_event_end"] = item["course"]["course_availability_until"] if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] diff --git a/converter/pipelines.py b/converter/pipelines.py index f8bca972..20df53e7 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -310,17 +310,16 @@ def process_item(self, raw_item, spider): class CourseItemPipeline(BasicPipeline): """Pipeline for BIRD-related metadata properties.""" - # ToDo: Expand docs! def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: - adapter = ItemAdapter(item) - if "course" in adapter: - course_adapter: ItemAdapter = adapter["course"] + item_adapter = ItemAdapter(item) + if "course" in item_adapter: + course_adapter: ItemAdapter = item_adapter["course"] - # Prepare BIRD "course_availability_from" for "ccm:oeh_event_begin" (ISO-formatted "datetime"-string) if "course_availability_from" in course_adapter: + # Preparing BIRD "course_availability_from" for "ccm:oeh_event_begin" (ISO-formatted "datetime"-string) course_availability_from: str = course_adapter["course_availability_from"] - # BIRD spec: "verfügbar ab" expects a single-value 'datetime' string if course_availability_from and isinstance(course_availability_from, str): + # BIRD spec: "verfügbar ab" expects a single-value 'datetime' string caf_parsed: datetime = dateparser.parse(course_availability_from) # try to parse the string and convert it to a datetime object if caf_parsed and isinstance(caf_parsed, datetime.datetime): @@ -330,7 +329,7 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr else: log.warning(f"Failed to parse \"course_availability_from\"-property " f"\"{course_availability_from}\" to a valid \"datetime\"-object. \n" - f"(Please check the object {adapter['sourceId']} " + f"(Please check the object {item_adapter['sourceId']} " f"or extend the CourseItemPipeline!)") del course_adapter["course_availability_from"] else: @@ -349,13 +348,14 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr course_adapter["course_availability_until"] = cau_iso else: log.warning(f"Failed to parse \"{course_availability_until}\" to a valid 'datetime'-object. " - f"(Please check the object {adapter['sourceId']} for unhandled edge-cases or " + f"(Please check the object {item_adapter['sourceId']} for unhandled edge-cases or " f"extend the CourseItemPipeline!)") del course_adapter["course_availability_until"] else: log.warning( f"Cannot process BIRD \"course_availability_until\"-property {course_availability_until} " - f"(Expected a string, but received {type(course_availability_until)} instead.)") + f"(Expected a string, but received {type(course_availability_until)} instead.) " + f"Deleting property...") del course_adapter["course_availability_until"] if "course_description_short" in course_adapter: @@ -366,13 +366,21 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr pass else: log.warning(f"Cannot process BIRD 'course_description_short'-property for item " - f"{adapter['sourceId']}. Expected a string, but received " - f"{type(course_description_short)} instead.") + f"{item_adapter['sourceId']} . Expected a string, but received " + f"{type(course_description_short)} instead. Deleting property...") del course_adapter["course_description_short"] if "course_duration" in course_adapter: - # ToDo: course_duration -> 'cclom:typicallearningtime' (ms) - pass + # course_duration -> 'cclom:typicallearningtime' (ms) + course_duration: int = course_adapter["course_duration"] + if course_duration and isinstance(course_duration, int): + # happy-case + pass + else: + log.warning(f"Cannot process BIRD 'course_duration'-property for item {item_adapter['sourceId']} . " + f"Expected a single integer value (in milliseconds), " + f"but received {type(course_duration)} instead. Deleting property...") + del course_adapter["course_duration"] if "course_learningoutcome" in course_adapter: # course_learningoutcome expects a string (with or without HTML formatting) @@ -381,8 +389,10 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr # happy-case pass else: - log.warning(f"Cannot process BIRD 'course_learningoutcome'-property for item {adapter['sourceId']} " - f". Expected a string, but received {type(course_learning_outcome)} instead.") + log.warning( + f"Cannot process BIRD 'course_learningoutcome'-property for item {item_adapter['sourceId']} " + f". Expected a string, but received {type(course_learning_outcome)} instead. " + f"Deleting property...") del course_adapter["course_learningoutcome"] if "course_schedule" in course_adapter: @@ -392,8 +402,9 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr # happy-case pass else: - log.warning(f"Cannot process BIRD 'course_schedule'-property for item {adapter['sourceId']} . " - f"Expected a string, but received {type(course_schedule)} instead.") + log.warning(f"Cannot process BIRD 'course_schedule'-property for item {item_adapter['sourceId']} . " + f"Expected a string, but received {type(course_schedule)} instead. " + f"Deleting property...") del course_adapter["course_schedule"] if "course_url_video" in course_adapter: @@ -403,12 +414,24 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr # happy-case pass else: - log.warning(f"Cannot process BIRD 'course_url_video'-property for item {adapter['sourceId']} . " - f"Expected a string, but received {type(course_url_video)} instead.") + log.warning( + f"Cannot process BIRD 'course_url_video'-property for item {item_adapter['sourceId']} . " + f"Expected a string, but received {type(course_url_video)} instead. " + f"Deleting property...") del course_adapter["course_url_video"] if "course_workload" in course_adapter: - # ToDo: course_workload -> ? + # ToDo: course_workload -> edu-sharing: ? -> BIRD: expects a single-value string + # ToDo: currently there's no dedicated edu-sharing property for course workloads yet, + # therefore pipeline handling of such values cannot be implemented yet. + if "course_workload" in course_adapter: + # ToDo: confirm which edu-sharing property shall be used for course_workload + # (and which type is expected) -> implement a type-check! + course_workload: str = course_adapter["course_workload"] + if course_workload: + log.error(f"Cannot process BIRD 'course_workload'-property: this field is not implemented yet! " + f"(Please update the 'CourseItemPipeline' (pipelines.py) and es_connector.py!)") + pass pass return item From 98bfdc1e93f2abd863c7d1e24ee83087e62cad00 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 3 Jul 2024 16:12:14 +0200 Subject: [PATCH 500/590] chore: update pyproject.toml classifiers --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 47e52fb1..a76b6fc2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,12 +12,13 @@ homepage = "https://github.com/openeduhub/oeh-search-etl" repository = "https://github.com/openeduhub/oeh-search-etl" documentation = "https://github.com/openeduhub/oeh-search-etl" -keywords = ["metadata", "oer", "crawl", " wirlernenonline"] +keywords = ["metadata", "oer", "crawl", "wirlernenonline"] classifiers = [ "Framework :: Scrapy", "Development Status :: 4 - Beta", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Education :: Testing", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", ] From da56704425727bf910f8209d6c9813063f36db23 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 4 Jul 2024 14:32:02 +0200 Subject: [PATCH 501/590] change: move responsibility of course_duration transformation (to ms) away from oersi_spider - from now on the "CourseItem.course_duration"-field expects values to be in SECONDS - es_connector.py will use this (int) value and transform it for edu-sharing to MILLISECONDS - (this change was made in preparation for future refactoring of pipelines.py) --- converter/es_connector.py | 11 +++++++++-- converter/items.py | 6 ++++-- converter/pipelines.py | 2 +- converter/spiders/oersi_spider.py | 16 +++++++--------- 4 files changed, 21 insertions(+), 14 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 5d22422d..27f32ba8 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -545,8 +545,15 @@ def transform_item(self, uuid, spider, item): if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] if "course_duration" in item["course"]: - # edu-sharing property 'cclom:typicallearningtime' expects values in ms! - spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] + course_duration: int = item["course"]["course_duration"] + if course_duration and isinstance(course_duration, int): + # edu-sharing property 'cclom:typicallearningtime' expects values in ms! + course_duration_in_ms = int(course_duration * 1000) + item["course"]["course_duration"] = course_duration_in_ms + spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] + else: + log.warning(f"Could not transform 'course_duration' {course_duration} to ms. " + f"Expected int (seconds), but received type {type(course_duration)} instead.") if "course_learningoutcome" in item["course"]: spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] if "course_schedule" in item["course"]: diff --git a/converter/items.py b/converter/items.py index 27937eae..018e116e 100644 --- a/converter/items.py +++ b/converter/items.py @@ -331,8 +331,10 @@ class CourseItem(Item): course_description_short = Field() """Corresponding edu-sharing property: 'ccm:oeh_course_description_short'""" course_duration = Field() - """Corresponding edu-sharing property: 'cclom:typicallearningtime' - (edu-sharing expects 'cclom:typicallearningtime' values in milliseconds!)""" + """Expects a duration in seconds. + Corresponding edu-sharing property: 'cclom:typicallearningtime'. + (ATTENTION: edu-sharing expects 'cclom:typicallearningtime'-values (type: int) in milliseconds! + -> the es_connector will handle transformation from s to ms.)""" course_learningoutcome = Field() """Describes "Lernergebnisse" or "learning objectives". (Expects a string, with or without HTML-formatting!) Corresponding edu-sharing property: 'ccm:learninggoal'""" diff --git a/converter/pipelines.py b/converter/pipelines.py index 20df53e7..6e3ceb71 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -378,7 +378,7 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr pass else: log.warning(f"Cannot process BIRD 'course_duration'-property for item {item_adapter['sourceId']} . " - f"Expected a single integer value (in milliseconds), " + f"Expected a single integer value (in seconds), " f"but received {type(course_duration)} instead. Deleting property...") del course_adapter["course_duration"] diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index bb14e0f3..04a4340f 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -1024,18 +1024,19 @@ def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: d # while "cclom:typicallearningtime" expects ms. Therefore: # 1) we extract the amount of weeks from "duration" # 2) calculate: * = total duration in h - # 3) convert total duration from h to ms + # 3) convert total duration from hours to seconds + # (-> es_connector will handle conversion from s to ms) if time_unit == "h/week": total_duration_in_hours: int = amount_of_weeks * time_value duration_delta = datetime.timedelta(hours=total_duration_in_hours) if duration_delta: - total_duration_in_ms: int = int(duration_delta.total_seconds() * 1000) - course_itemloader.add_value("course_duration", total_duration_in_ms) + total_duration_in_seconds: int = int(duration_delta.total_seconds()) + course_itemloader.add_value("course_duration", total_duration_in_seconds) self.logger.debug( f"BIRD: combined iMoox 'duration' " f"( {duration_in_weeks_raw} ) and 'workload' " f"( {time_value} {time_unit} ) to {total_duration_in_hours} h " - f"(-> {total_duration_in_ms} ms)." + f"(-> {total_duration_in_seconds} s)." ) else: # ToDo: convert "h/day" and "h/month" in a similar fashion @@ -1103,7 +1104,7 @@ def enrich_vhb_metadata( if "outline" in vhb_item_matched["attributes"]: outline_raw: str = vhb_item_matched["attributes"]["outline"] if outline_raw and isinstance(outline_raw, str): - # ToDo: vhb "outline" -> course_schedule -> "ccm:oeh_course_schedule" + # vhb "outline" -> course_schedule -> "ccm:oeh_course_schedule" # the vhb attribute "outline" describes a course's schedule (Kursablauf) # IMPORTANT: "outline" is not part of MOOCHub v2.x nor 3.x! course_itemloader.add_value("course_schedule", outline_raw) @@ -1185,10 +1186,7 @@ def enrich_vhb_metadata( if duration_delta: workload_in_seconds: int = int(duration_delta.total_seconds()) if workload_in_seconds: - # the edu-sharing property 'cclom:typicallearningtime' - # expects values in ms: - workload_in_ms: int = workload_in_seconds * 1000 - course_itemloader.add_value("course_duration", workload_in_ms) + course_itemloader.add_value("course_duration", workload_in_seconds) base_itemloader.add_value("course", course_itemloader.load_item()) def parse(self, response=None, **kwargs): From f7586f906938215cbc6d76b2b647b4cf084b6d3b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 4 Jul 2024 18:51:10 +0200 Subject: [PATCH 502/590] refactor "duration"-handling in pipelines ConvertTimePipeline: - refactored how "duration"-values are handled by the pipeline for "LomEducationalitem.typicalLearningTime" and "LomTechnicalItem.duration", so the same function can be re-used for unknown "duration" objects - fix: fixed rare edge-case where technical durations greater than 24h might have been skipped by the previous implementation of "isodate"-duration-parsing ("hh:mm:ss") - fix: fixed several weak warnings and made variable names easier to grasp CourseItemPipeline: - uses the same function from now on to increase maintainability --- converter/pipelines.py | 119 +++++++++++++++++++++++++++++------------ 1 file changed, 84 insertions(+), 35 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 6e3ceb71..323fb9ea 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -267,47 +267,92 @@ def process_item(self, raw_item, spider): del item["lastModified"] if "typicalLearningTime" in item["lom"]["educational"]: - t = item["lom"]["educational"]["typicalLearningTime"] - mapped = None - # ToDo: typecheck the provided value first and handle it accordingly! - # - strings: check commonly provided "duration" formats (e.g. "hh:mm:ss" or "12 Stunden") - # - convert to int: 'cclom:typicallearningtime' expects values to be in milliseconds! - # - improve error-handling by reworking the bare "except"-clause - # - update es_connector.py and connect this property to the backend - splitted = t.split(":") - if len(splitted) == 3: - mapped = ( - int(splitted[0]) * 60 * 60 - + int(splitted[1]) * 60 - + int(splitted[2]) - ) - if mapped is None: - log.warning( - "Unable to map given typicalLearningTime " - + t - + " to numeric value" - ) - item["lom"]["educational"]["typicalLearningTime"] = mapped + tll_raw = item["lom"]["educational"]["typicalLearningTime"] + tll_duration_in_seconds = ( + determine_duration_and_convert_to_seconds(time_raw=tll_raw, + item_field_name="LomEducationalItem.typicalLearningTime") + ) + # ToDo: update es_connector and connect this property with the backend + item["lom"]["educational"]["typicalLearningTime"] = tll_duration_in_seconds + if "technical" in item["lom"]: if "duration" in item["lom"]["technical"]: raw_duration = item["lom"]["technical"]["duration"] - duration = raw_duration.strip() - if duration: - if len(duration.split(":")) == 3: - duration = isodate.parse_time(duration) - duration = duration.hour * 60 * 60 + duration.minute * 60 + duration.second - elif duration.startswith("PT"): - duration = int(isodate.parse_duration(duration).total_seconds()) - else: - try: - duration = int(duration) - except: - duration = None - log.warning("duration {} could not be normalized to seconds".format(raw_duration)) - item["lom"]["technical"]["duration"] = duration + duration_in_seconds = determine_duration_and_convert_to_seconds( + time_raw=raw_duration, + item_field_name="LomTechnicalItem.duration") + item["lom"]["technical"]["duration"] = duration_in_seconds return raw_item +def determine_duration_and_convert_to_seconds(time_raw: str | int | float, + item_field_name: str) -> int | None: + """ + Tries to convert "duration"-objects (of unknown type) to seconds. + Returns the converted duration as(as total seconds) int value if successful + or None if conversion wasn't possible. + + @param time_raw: the unknown duration object (string or numeric value) + @param item_field_name: scrapy item field-name (required for precise logging messages) + @return: total seconds (int) value of duration or None + """ + time_in_seconds = None + # why are we converting values to int? reason: 'cclom:typicallearningtime' expects values to be in milliseconds! + # (this method converts values to seconds and es_connector.py converts the values to ms) + if time_raw and isinstance(time_raw, str): + # strip whitespace first (just in case -> string values might have typos) + time_raw = time_raw.strip() + if ":" in time_raw: + # handling of "hh:mm:ss"-durations: + t_split: list[str] = time_raw.split(":") + if len(t_split) == 3: + time_in_seconds = ( + int(t_split[0]) * 60 * 60 + + int(t_split[1]) * 60 + + int(t_split[2]) + ) + else: + log.warning(f"Encountered unhandled edge-case in '{item_field_name}': " + f"Expected format 'hh:mm:ss', but received {time_raw} instead.") + if "PT" in time_raw: + # handling of iso-formatted duration strings + # (see: https://en.wikipedia.org/wiki/ISO_8601#Durations) + duration_parsed = isodate.parse_duration(time_raw) + if duration_parsed: + time_in_seconds = duration_parsed.total_seconds() + else: + log.warning(f"Encountered unhandled edge-case in '{item_field_name}': " + f"Expected ISO-8601 duration string, but received {time_raw} instead.") + if "." in time_raw and time_raw.count(".") == 1: + # duration strings might come with float precision (e.g. "600.0" for 10 Minutes) + try: + seconds_float: float = float(time_raw) + if seconds_float: + time_in_seconds = int(seconds_float) + except ValueError: + log.warning( + f"Unable to convert string {time_raw} (type: {type(time_raw)}) to 'int'-value (seconds).") + if time_raw.isnumeric(): + try: + time_in_seconds = int(time_raw) + except ValueError: + log.warning(f"Unable to convert 'duration'-value {time_raw} (type ({type(time_raw)}) " + f"to 'int'-value (seconds).") + # ToDo (optional): implement processing of natural language strings? (e.g. "12 Stunden") + # - this feature would need a rigorous testing suite for common expressions (English and German strings) + else: + try: + time_in_seconds = int(time_raw) + except ValueError: + log.warning(f"'duration' value {time_raw} could not be normalized to seconds. " + f"(Unhandled edge-case: Expected int or float value, " + f"but received {type(time_raw)} instead.") + if not time_in_seconds: + log.warning(f"Unable to convert '{item_field_name}'-value (type: {type(time_raw)}) from {time_raw} " + f"to numeric value (seconds).") + return time_in_seconds + + class CourseItemPipeline(BasicPipeline): """Pipeline for BIRD-related metadata properties.""" def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: @@ -373,6 +418,10 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr if "course_duration" in course_adapter: # course_duration -> 'cclom:typicallearningtime' (ms) course_duration: int = course_adapter["course_duration"] + course_duration = determine_duration_and_convert_to_seconds( + time_raw=course_duration, + item_field_name="CourseItem.course_duration" + ) if course_duration and isinstance(course_duration, int): # happy-case pass From 8763c18604d562550434313c99669aaaa08a7658 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 5 Jul 2024 13:40:38 +0200 Subject: [PATCH 503/590] style: code formatting via black - fixes 10 weak warnings (from 21 -> 11) - PEP8: E127 / E722 / E501 --- converter/pipelines.py | 108 +++++++++++++++++++++-------------------- 1 file changed, 55 insertions(+), 53 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 323fb9ea..4962132d 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -168,6 +168,7 @@ def process_item(self, raw_item, spider): class NormLanguagePipeline(BasicPipeline): """Normalize raw or ambiguous language strings to 2-letter-language-codes (ISO 639-1).""" + def process_item(self, item, spider): item_adapter = ItemAdapter(item) try: @@ -270,7 +271,7 @@ def process_item(self, raw_item, spider): tll_raw = item["lom"]["educational"]["typicalLearningTime"] tll_duration_in_seconds = ( determine_duration_and_convert_to_seconds(time_raw=tll_raw, - item_field_name="LomEducationalItem.typicalLearningTime") + item_field_name="LomEducationalItem.typicalLearningTime") ) # ToDo: update es_connector and connect this property with the backend item["lom"]["educational"]["typicalLearningTime"] = tll_duration_in_seconds @@ -355,6 +356,7 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, class CourseItemPipeline(BasicPipeline): """Pipeline for BIRD-related metadata properties.""" + def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: item_adapter = ItemAdapter(item) if "course" in item_adapter: @@ -626,9 +628,9 @@ async def process_item(self, raw_item, spider): elif _mimetype == "application/octet-stream": # ToDo: special handling for 'application/octet-stream' necessary? log.debug(f"Thumbnail URL of MIME-Type 'image/...' expected, " - f"but received '{_mimetype}' instead. " - f"(If thumbnail conversion throws unexpected errors further down the line, " - f"the Thumbnail-Pipeline needs to be re-visited! URL: {url} )") + f"but received '{_mimetype}' instead. " + f"(If thumbnail conversion throws unexpected errors further down the line, " + f"the Thumbnail-Pipeline needs to be re-visited! URL: {url} )") response = thumbnail_response else: log.warning(f"Thumbnail URL {url} does not seem to be an image! " @@ -1074,47 +1076,47 @@ def process_item(self, item, spider): class LisumPipeline(BasicPipeline): DISCIPLINE_TO_LISUM_SHORTHAND = { - "020": "C-WAT", # Arbeitslehre -> Wirtschaft, Arbeit, Technik - "060": "C-KU", # Bildende Kunst - "080": "C-BIO", # Biologie - "100": "C-CH", # Chemie - "120": "C-DE", # Deutsch - "160": "C-Eth", # Ethik - "200": "C-FS", # Fremdsprachen - "220": "C-GEO", # Geographie, - "240": "C-GE", # Geschichte - "260": "B-GES", # Gesundheit -> Gesundheitsförderung - "320": "C-Inf", # Informatik - "380": "C-MA", # Mathematik - "400": "B-BCM", # Medienerziehung / Medienpädagogik -> Basiscurriculum Medienbildung - "420": "C-MU", # Musik - "450": "C-Phil", # Philosophie - "460": "C-Ph", # Physik - "480": "C-PB", # Politische Bildung - "510": "C-Psy", # Psychologie - "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde - "560": "B-SE", # Sexualerziehung + "020": "C-WAT", # Arbeitslehre -> Wirtschaft, Arbeit, Technik + "060": "C-KU", # Bildende Kunst + "080": "C-BIO", # Biologie + "100": "C-CH", # Chemie + "120": "C-DE", # Deutsch + "160": "C-Eth", # Ethik + "200": "C-FS", # Fremdsprachen + "220": "C-GEO", # Geographie, + "240": "C-GE", # Geschichte + "260": "B-GES", # Gesundheit -> Gesundheitsförderung + "320": "C-Inf", # Informatik + "380": "C-MA", # Mathematik + "400": "B-BCM", # Medienerziehung / Medienpädagogik -> Basiscurriculum Medienbildung + "420": "C-MU", # Musik + "450": "C-Phil", # Philosophie + "460": "C-Ph", # Physik + "480": "C-PB", # Politische Bildung + "510": "C-Psy", # Psychologie + "520": "C-LER", # Religion -> Lebensgestaltung-Ethik-Religionskunde + "560": "B-SE", # Sexualerziehung # "600": "", # ToDo: "Sport" is not available as a Lisum Rahmenlehrplan shorthand - "660": "B-MB", # Verkehrserziehung -> "Mobilitätsbildung und Verkehrserziehung" - "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" - "900": "B-BCM", # Medienbildung -> "Basiscurriculum Medienbildung" - "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater - "20001": "C-EN", # Englisch - "20002": "C-FR", # Französisch - "20003": "C-AGR", # Griechisch -> Altgriechisch - "20004": "C-IT", # Italienisch - "20005": "C-La", # Latein - "20006": "C-RU", # Russisch - "20007": "C-ES", # Spanisch - "20008": "C-TR", # Türkisch - "20011": "C-PL", # Polnisch - "20014": "C-PT", # Portugiesisch - "20041": "C-ZH", # Chinesisch - "28010": "C-SU", # Sachkunde -> Sachunterricht - "32002": "C-Inf", # Informatik - "46014": "C-AS", # Astronomie - "48005": "C-GEWIWI", # Gesellschaftspolitische Gegenwartsfragen -> Gesellschaftswissenschaften - "2800506": "C-PL", # Polnisch + "660": "B-MB", # Verkehrserziehung -> "Mobilitätsbildung und Verkehrserziehung" + "700": "C-SOWI", # Wirtschaftskunde -> "Sozialwissenschaft/Wirtschaftswissenschaft" + "900": "B-BCM", # Medienbildung -> "Basiscurriculum Medienbildung" + "12002": "C-Thea", # Darstellendes Spiel, Schultheater -> Theater + "20001": "C-EN", # Englisch + "20002": "C-FR", # Französisch + "20003": "C-AGR", # Griechisch -> Altgriechisch + "20004": "C-IT", # Italienisch + "20005": "C-La", # Latein + "20006": "C-RU", # Russisch + "20007": "C-ES", # Spanisch + "20008": "C-TR", # Türkisch + "20011": "C-PL", # Polnisch + "20014": "C-PT", # Portugiesisch + "20041": "C-ZH", # Chinesisch + "28010": "C-SU", # Sachkunde -> Sachunterricht + "32002": "C-Inf", # Informatik + "46014": "C-AS", # Astronomie + "48005": "C-GEWIWI", # Gesellschaftspolitische Gegenwartsfragen -> Gesellschaftswissenschaften + "2800506": "C-PL", # Polnisch } EAFCODE_EXCLUSIONS = [ @@ -1203,7 +1205,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # due to having the 'custom'-field as a (raw) list of all eafCodes, this mainly serves # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum log.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " - f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") + f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") match discipline_eaf_code: # catching edge-cases where OEH 'discipline'-vocab-keys don't line up with eafsys.txt values case "320": @@ -1217,24 +1219,24 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy if eaf_code_digits_only_regex.search(discipline_eaf_code): # each numerical eafCode must have a length of (minimum) 3 digits to be considered valid log.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " - f"used later for 'ccm:taxonentry').") + f"used later for 'ccm:taxonentry').") if discipline_eaf_code not in self.EAFCODE_EXCLUSIONS: # making sure to only save eafCodes that are part of the standard eafsys.txt discipline_eafcodes.add(discipline_eaf_code) else: log.debug(f"LisumPipeline: eafCode {discipline_eaf_code} is not part of 'EAF " - f"Sachgebietssystematik' (see: eafsys.txt), therefore skipping this " - f"value.") + f"Sachgebietssystematik' (see: eafsys.txt), therefore skipping this " + f"value.") else: # our 'discipline.ttl'-vocab holds custom keys (e.g. 'niederdeutsch', 'oeh04010') which # shouldn't be saved into 'ccm:taxonentry' (since they are not part of the regular # "EAF Sachgebietssystematik" log.debug(f"LisumPipeline eafCode fallback for {discipline_eaf_code} to " - f"'ccm:taxonentry' was not possible. Only eafCodes with a minimum length " - f"of 3+ digits are valid. (Please confirm if the provided value is part of " - f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))") + f"'ccm:taxonentry' was not possible. Only eafCodes with a minimum length " + f"of 3+ digits are valid. (Please confirm if the provided value is part of " + f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))") log.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " - f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") + f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") valuespaces["discipline"] = list() # clearing 'discipline'-field, so we don't accidentally write the # remaining OEH w3id-URLs to Lisum's 'ccm:taxonid'-field @@ -1255,7 +1257,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy educational_context_lisum_keys.add(educational_context_w3id_key) case _: log.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key} " - f"not found in mapping table.") + f"not found in mapping table.") educational_context_list = list(educational_context_lisum_keys) educational_context_list.sort() valuespaces["educationalContext"] = educational_context_list From de63dbd2647e05a916b1de020059507726577ff5 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 11 Jul 2024 18:09:41 +0200 Subject: [PATCH 504/590] fix/test: duration conversion - test: implements a few basic test-cases for the "duration"-conversion (see: pipelines.py) - fix: detection of ISO-8601 formatted duration strings (see: https://en.wikipedia.org/wiki/ISO_8601#Durations) - ToDo: due to limitations of the "isodate"-package (and the underlying "datetime.timedelta"-objects) edge-cases like "P6M" and "P1Y" can't be converted to total_seconds and another solution is necessary - in the meantime these edge-cases will throw a warning (at least until a decision how to handle such cases has been made) --- converter/pipelines.py | 17 ++++++++++++++-- tests/test_duration_conversion.py | 32 +++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 tests/test_duration_conversion.py diff --git a/converter/pipelines.py b/converter/pipelines.py index 4962132d..da7eaaa5 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -315,12 +315,25 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, else: log.warning(f"Encountered unhandled edge-case in '{item_field_name}': " f"Expected format 'hh:mm:ss', but received {time_raw} instead.") - if "PT" in time_raw: - # handling of iso-formatted duration strings + if time_raw.startswith("P"): + # handling of iso-formatted duration strings, e.g. "P14DT22H" or "P7W" # (see: https://en.wikipedia.org/wiki/ISO_8601#Durations) duration_parsed = isodate.parse_duration(time_raw) if duration_parsed: time_in_seconds = duration_parsed.total_seconds() + if time_in_seconds == 0.0: + # months and years are no standardized time duration units + # -> isodate.parse_duration() will return 0.0 seconds for these input values because the underlying + # timedelta object can't handle conversion from months to .total_seconds() + # see: https://github.com/gweis/isodate/issues/44 + # and https://docs.python.org/3/library/datetime.html#datetime.timedelta + log.warning(f"Unhandled value detected: Cannot transform {time_raw} to total seconds!" + f"(months (M) or years (Y) aren't standardized duration units)") + time_in_seconds = None + # ToDo: choose an acceptable solution + # 1) either approximate the total seconds (inaccurate: "P6M" becomes 6 x 4W = 24W) + # -> this would require RegEx parsing and string replacement of the month/year parts + # 2) or keep the string representation AND find a better suited edu-sharing property for durations else: log.warning(f"Encountered unhandled edge-case in '{item_field_name}': " f"Expected ISO-8601 duration string, but received {time_raw} instead.") diff --git a/tests/test_duration_conversion.py b/tests/test_duration_conversion.py new file mode 100644 index 00000000..00b04276 --- /dev/null +++ b/tests/test_duration_conversion.py @@ -0,0 +1,32 @@ +import pytest + +from converter.pipelines import determine_duration_and_convert_to_seconds + + +@pytest.mark.parametrize("test_input, expected_result", + [ + ("", None), + ("12:30:55", 45055), # 43200s + 1800s + 55s = 45055s + ("08:25:24", 30324), # 28800s + 1500s + 24s = 30324s + ("8:8:8", 29288), # 28800s + 480s + 8s = 29288 + ("86", 86), # typically found in RSS feeds + (" 120 ", 120), # input contains unnecessary whitespace + ("120.0", 120), # float edge-case + ("P7W", 4233600), # MOOCHub (v3) 'duration'-attribute uses a ISO-8601 format + ("P12W", 7257600), + ("P0.5D", 43200), # one decimal fraction is allowed according to ISO 8601 durations + ("P0,5D", 43200), + ("P1Y", None), + ("P6M", None), + (30.5, 30), + (30.0, 30), + ] + ) +def test_determine_duration_and_convert_to_seconds(test_input, expected_result): + """ + Test the conversion from "duration"-values (of unknown type) to seconds (int). + """ + # ToDo: + # - ISO-8601 edge-cases: "P6M" or "P1Y" cannot be converted to total seconds! + # - NLP not yet implemented: "12 Stunden" / "6 Monate" etc. (German or English strings) + assert determine_duration_and_convert_to_seconds(time_raw=test_input, item_field_name="TestItem") == expected_result From 6f664edc500bdd482eef695b9ba67d7539f2c012 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 12 Jul 2024 17:57:55 +0200 Subject: [PATCH 505/590] feat: introduce (physical) "address"-properties to LomLifecycleItem - if learning objects are provided with metadata in regard to (physical / postal) addresses (e.g. the publisher's address), we can now store these values according to the vCard v3 "ADR"-attribute specification (see: https://datatracker.ietf.org/doc/html/rfc2426#section-3.2.1) - addresses will use the RFC2426 recommendation for "TYPE": ["intl", "postal", "parcel", "work"] by default ToDos: - "address"-pipeline (pipelines.py) to type-check input values - oeh_spider needs an update to be able to handle these new properties --- converter/es_connector.py | 38 ++++++++++++++++++++++++++++++++++++++ converter/items.py | 13 +++++++++++++ 2 files changed, 51 insertions(+) diff --git a/converter/es_connector.py b/converter/es_connector.py index 27f32ba8..950f42de 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -465,9 +465,47 @@ def transform_item(self, uuid, spider, item): id_orcid: str = person["id_orcid"] if "id_orcid" in person else "" id_ror: str = person["id_ror"] if "id_ror" in person else "" id_wikidata: str = person["id_wikidata"] if "id_wikidata" in person else "" + address_city: str = person["address_city"] if "address_city" in person else "" + address_country: str = person["address_country"] if "address_country" in person else "" + address_postal_code: str = person["address_postal_code"] if "address_postal_code" in person else "" + address_region: str = person["address_region"] if "address_region" in person else "" + address_street: str = person["address_street"] if "address_street" in person else "" + address_type: str = person["address_type"] if "address_type" in person else "" + # create the vCard object first, then add attributes on-demand / if available vcard = vobject.vCard() vcard.add("n").value = vobject.vcard.Name(family=lastName, given=firstName) vcard.add("fn").value = organization if organization else (firstName + " " + lastName).strip() + # only the "fn"-attribute is required to serialize the vCard. (all other properties are optional) + if address_city or address_country or address_postal_code or address_region or address_street: + # The vCard v3 "ADR" property is used for physical addresses + # (for reference: https://datatracker.ietf.org/doc/html/rfc2426#section-3.2.1) + # To set "ADR"-attributes and values, we need to create an "Address"-object first + # see: https://github.com/py-vobject/vobject/blob/master/vobject/vcard.py#L54-L66 + # ToDo: implement "address"-pipeline + # (the vobject package expects a str or list[str] for these proeprties!) + address_object: vobject.vcard.Address = vobject.vcard.Address(street=address_street, + city=address_city, + region=address_region, + code=address_postal_code, + country=address_country) + vcard.add("ADR").value = address_object + if address_type: + # under normal circumstances, we only have to manually check the address types + # if we transfer learning objects via "oeh_spdier" + # or if a crawler sets the type manually + rfc2426_valid_address_types = ["dom", "intl", "postal", "parcel", "home", "work", "pref"] + if address_type and isinstance(address_type, str): + if address_type in rfc2426_valid_address_types: + vcard.adr.type_param = address_type + if address_type and isinstance(address_type, list): + address_type_clean: list[str] | None = None + for at_item in address_type: + if at_item in rfc2426_valid_address_types: + address_type_clean.append(at_item) + if address_type_clean: + vcard.adr.type_param = address_type_clean + else: + vcard.adr.type_param = ["intl", "postal", "parcel", "work"] # RFC2426 recommended default value if id_gnd: vcard.add("X-GND-URI").value = id_gnd if id_orcid: diff --git a/converter/items.py b/converter/items.py index 018e116e..1ab0d778 100644 --- a/converter/items.py +++ b/converter/items.py @@ -101,6 +101,19 @@ class LomLifecycleItem(Item): id_wikidata = Field() """The Wikidata identifier (URI) of an ORGANIZATION, e.g. "https://www.wikidata.org/wiki/". Values will be written into the vCard namespace 'X-Wikidata'.""" + address_city = Field() + """vCard v3 "ADR"-attribute for city strings.""" + address_country = Field() + """vCard v3 "ADR"-attribute for country strings.""" + address_postal_code = Field() + """vCard v3 "ADR"-attribute for postal code strings.""" + address_region = Field() + """vCard v3 "ADR"-attribute for region strings.""" + address_street = Field() + """vCard v3 "ADR"-attribute for street strings.""" + address_type = Field(output_processor=JoinMultivalues()) + """vCard v3 "ADR"-attribute type. Expects a single string or a list[str] from the following values: + ["dom", "intl", "postal", "parcel", "home", "work", "pref"]""" class LomTechnicalItem(Item): From ec725641c482d88d207b0d3316d741ee378275d0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 12 Jul 2024 17:59:18 +0200 Subject: [PATCH 506/590] todo: add "address"-related ToDos --- converter/spiders/base_classes/edu_sharing_base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/converter/spiders/base_classes/edu_sharing_base.py b/converter/spiders/base_classes/edu_sharing_base.py index 80a595f6..d586bac7 100644 --- a/converter/spiders/base_classes/edu_sharing_base.py +++ b/converter/spiders/base_classes/edu_sharing_base.py @@ -217,6 +217,9 @@ def get_lifecycle_from_vcard_string(lifecycle: LomLifecycleItemloader, role, vca if hasattr(vcard, "org"): vcard_org: str = vcard.org.value lifecycle.add_value("organization", vcard_org) + if hasattr(vcard, "adr"): + # ToDo: implement "address"-metadata (city, country, region, postal_code, street, type) + pass if hasattr(vcard, "x-es-lom-contribute-date"): # copy the contribution date only if available vcard_es_date: list = vcard.contents.get("x-es-lom-contribute-date") # edu-sharing contributor date From 5be831edb5d2cacb09f30a96b2c44f07f757c2ac Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Tue, 16 Jul 2024 10:50:42 +0200 Subject: [PATCH 507/590] Remove unused GitHub Actions workflow file --- .github/workflows/publish.yaml | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100644 .github/workflows/publish.yaml diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml deleted file mode 100644 index 7400dc0f..00000000 --- a/.github/workflows/publish.yaml +++ /dev/null @@ -1,27 +0,0 @@ -name: publish - -on: - push: - branches: - - develop - - master - - '**' - tags: - - v[0-9]+.[0-9]+.[0-9]+ - -jobs: - build-and-publish: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - submodules: true - - uses: azure/docker-login@v1 - with: - username: ${{ github.repository_owner }} - password: ${{ secrets.DOCKERHUB_PASSWORD }} - - uses: rlespinasse/github-slug-action@v2.x - - name: Build Docker image - run: docker build --tag ${{ github.repository }}:${{ env.GITHUB_REF_SLUG }} . - - name: Publish to DockerHub - run: docker push ${{ github.repository }}:${{ env.GITHUB_REF_SLUG }} From 432728c33299f0607a45dd2f231f0c276288b7a8 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Tue, 16 Jul 2024 12:27:58 +0200 Subject: [PATCH 508/590] Re-add `LomAnnotationItem` and missing dependencies --- converter/items.py | 8 ++++++++ requirements.txt | 2 ++ 2 files changed, 10 insertions(+) diff --git a/converter/items.py b/converter/items.py index 399206f2..1d8cc6aa 100644 --- a/converter/items.py +++ b/converter/items.py @@ -206,6 +206,14 @@ class LomRelationItem(Item): kind = Field() resource = Field(serializer=LomRelationResourceItem) +class LomAnnotationItem(Item): + """ + Following the LOM-DE.doc#8 (Annotation) specifications: http://sodis.de/lom-de/LOM-DE.doc . + """ + entity = Field() + date = Field() + description = Field() + class LomBaseItem(Item): """ LomBaseItem provides the nested structure for LOM (Sub-)Elements. No metadata is saved here. diff --git a/requirements.txt b/requirements.txt index 0dd1e44f..0db5e8d8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,6 +6,7 @@ automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" babel==2.14.0 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" black==24.1.1 ; python_version >= "3.10" and python_version < "4.0" +boto3==1.26.129 ; python_version >= "3.10" and python_version < "4.0" certifi==2024.2.2 ; python_version >= "3.10" and python_version < "4.0" cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" @@ -48,6 +49,7 @@ marisa-trie==0.7.8 ; python_version >= "3.10" and python_version < "4.0" mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==2.0.1 ; python_version >= "3.10" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" +openpyxl==3.1.2 ; python_version >= "3.10" and python_version < "4.0" overrides==3.1.0 ; python_version >= "3.10" and python_version < "4.0" packaging==23.2 ; python_version >= "3.10" and python_version < "4.0" parsel==1.8.1 ; python_version >= "3.10" and python_version < "4.0" From 4f9780ae239db55b9b0e24db8139e279c98d27ff Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Tue, 16 Jul 2024 12:33:20 +0200 Subject: [PATCH 509/590] "loosen" used `urllib3` version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0db5e8d8..b4ffbdd5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -96,7 +96,7 @@ twisted==22.10.0 ; python_version >= "3.10" and python_version < "4.0" typing-extensions==4.9.0 ; python_version >= "3.10" and python_version < "4.0" tzdata==2023.4 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.2.0 ; python_version >= "3.10" and python_version < "4.0" +urllib3 ; python_version >= "3.10" and python_version < "4.0" vobject==0.9.6.1 ; python_version >= "3.10" and python_version < "4.0" w3lib==2.1.2 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" From 59fec86f6f5a4884e02b30d6b7f563fad09ba291 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 17 Jul 2024 10:01:39 +0200 Subject: [PATCH 510/590] refactor `OehImporter` to use `async` for sending items to pipeline --- schulcloud/oeh_importer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py index 0a8ee68d..0d2da369 100644 --- a/schulcloud/oeh_importer.py +++ b/schulcloud/oeh_importer.py @@ -125,7 +125,7 @@ def process_node(self, node: dict): self.log.error(traceback.format_exc()) break - def send_to_pipeline(self, item: scrapy.Item): + async def send_to_pipeline(self, item: scrapy.Item): for pipeline in self.pipeline: # spider has to be an object with a "name" attribute item = pipeline.process_item(item, self) From 7ada697ebd71c320942a9fd62d84756efba0a1b8 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 18 Jul 2024 14:16:37 +0200 Subject: [PATCH 511/590] fix async / await issues --- run.py | 17 +++++++++-------- schulcloud/oeh_importer.py | 24 +++++++++++++----------- 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/run.py b/run.py index 83e157fe..95d6defe 100644 --- a/run.py +++ b/run.py @@ -1,4 +1,5 @@ +import asyncio import sys import datetime as dt import time @@ -126,9 +127,9 @@ def __init__(self, name: str, function: Callable, schedule: list[str]): if not self.schedule_rules: raise ValueError('No schedule') - def run_schedule(self): + async def run_schedule(self): if not self.schedule_rules: - self.run() + await self.run() return while True: now = dt.datetime.now() @@ -146,14 +147,14 @@ def run_schedule(self): time.sleep(min(time_remaining.total_seconds(), check_interval_seconds)) continue - self.run() + await self.run() break - def run(self): - self.function() + async def run(self): + await self.function() -def main(): +async def main(): env = Environment(env_vars=needed_env_vars) schedule = env['SCHEDULE'].split(';') crawler = env['CRAWLER'].lower() @@ -178,9 +179,9 @@ def main(): print(f'Unexpected execution target "{crawler}"', file=sys.stderr) return 1 - job.run_schedule() + await job.run_schedule() return 0 if __name__ == '__main__': - sys.exit(main()) + asyncio.run(main()) diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py index 0d2da369..439c9ade 100644 --- a/schulcloud/oeh_importer.py +++ b/schulcloud/oeh_importer.py @@ -1,6 +1,5 @@ +import asyncio import datetime -import json -import os import sys import logging import time @@ -54,7 +53,7 @@ def __init__(self, **kwargs): self.fake_request = scrapy.http.Request(self.API_URL) self.fake_response = scrapy.http.Response(self.API_URL, request=self.fake_request) - def run(self): + async def run(self): i = 0 while True: nodes = self.request(i, 100) @@ -65,7 +64,7 @@ def run(self): if ending in ('mp4', 'h5p'): self.log.info('skipped') continue - self.process_node(node) + await self.process_node(node) i += len(nodes) if i >= self.total: break @@ -101,14 +100,14 @@ def request(self, offset: int, count: int): print(response.text) raise RuntimeError(f'Unexpected response: {response.status_code} {response.text}') - def process_node(self, node: dict): + async def process_node(self, node: dict): response_copy = self.fake_response.replace(url=node['content']['url']) self.fake_response.meta['item'] = node while True: try: if self.hasChanged(response_copy): - item = super(OehImporter, self).parse(response_copy) - self.send_to_pipeline(item) + item = await LomBase.parse(self, response_copy) + await self.send_to_pipeline(item) except ApiException as exc: # sometimes edusharing will return 401 "admin rights required" for all bulk.find requests if exc.status in (401, 503, 504): @@ -125,7 +124,7 @@ def process_node(self, node: dict): self.log.error(traceback.format_exc()) break - async def send_to_pipeline(self, item: scrapy.Item): + def send_to_pipeline(self, item: scrapy.Item): for pipeline in self.pipeline: # spider has to be an object with a "name" attribute item = pipeline.process_item(item, self) @@ -174,8 +173,8 @@ def getBase(self, response): return base # fulltext is handled in base, response is not necessary - def mapResponse(self, response, fetchData=True): - return LomBase.mapResponse(self, response, False) + async def mapResponse(self, response, fetchData=True): + return await LomBase.mapResponse(self, response, False) def getId(self, response=None) -> str: return response.meta["item"]["ref"]["id"] @@ -280,5 +279,8 @@ def shouldImport(self, response=None): return "ccm:collection_io_reference" not in response.meta["item"]["aspects"] +async def main(): + await OehImporter().run() + if __name__ == '__main__': - OehImporter().run() + asyncio.run(main()) From 6a5496030ae66e0d009e9a206aac1cda07888277 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 18 Jul 2024 14:24:37 +0200 Subject: [PATCH 512/590] remove unnecessary `await` --- schulcloud/oeh_importer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py index 439c9ade..826c6865 100644 --- a/schulcloud/oeh_importer.py +++ b/schulcloud/oeh_importer.py @@ -107,7 +107,7 @@ async def process_node(self, node: dict): try: if self.hasChanged(response_copy): item = await LomBase.parse(self, response_copy) - await self.send_to_pipeline(item) + self.send_to_pipeline(item) except ApiException as exc: # sometimes edusharing will return 401 "admin rights required" for all bulk.find requests if exc.status in (401, 503, 504): From 38033ef5a7fe3a8e876f68c6ddddc4c87414de1f Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 18 Jul 2024 15:08:12 +0200 Subject: [PATCH 513/590] add missing field `relation` to `LomBaseItem` --- converter/items.py | 1 + 1 file changed, 1 insertion(+) diff --git a/converter/items.py b/converter/items.py index 1d8cc6aa..c54d19c9 100644 --- a/converter/items.py +++ b/converter/items.py @@ -226,6 +226,7 @@ class LomBaseItem(Item): lifecycle = Field(serializer=LomLifecycleItem, output_processor=JoinMultivalues()) # rights = Field(serializer=LomRightsItem) technical = Field(serializer=LomTechnicalItem) + relation = Field(serializer=LomRelationItem, output_processor=JoinMultivalues()) class ResponseItem(Item): From 0e7fdfffab4fa812c38b16968b7db4aec89305c4 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 18 Jul 2024 15:09:02 +0200 Subject: [PATCH 514/590] Add fix for SkoHub "altLabel" processing in pipelines.py - see https://github.com/openeduhub/oeh-search-etl/commit/dd4502dd3388e9ade52f4dc30bbf2eec4b3e8b13 for more information --- converter/pipelines.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index c3fabc91..9a53644c 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -319,12 +319,20 @@ def process_item(self, raw_item, spider): mapped = [] for entry in json[key]: _id = {} - valuespace = self.valuespaces.data[key] + valuespace: list[dict] = self.valuespaces.data[key] found = False for v in valuespace: labels = list(v["prefLabel"].values()) if "altLabel" in v: - labels = labels + list(v["altLabel"].values()) + # the Skohub update on 2024-04-19 generates altLabels as a list[str] per language ("de", "en) + # (for details, see: https://github.com/openeduhub/oeh-metadata-vocabs/pull/65) + alt_labels: list[list[str]] = list(v["altLabel"].values()) + if alt_labels and isinstance(alt_labels, list): + for alt_label in alt_labels: + if alt_label and isinstance(alt_label, list): + labels.extend(alt_label) + if alt_label and isinstance(alt_label, str): + labels.append(alt_label) labels = list(map(lambda x: x.casefold(), labels)) if v["id"].endswith(entry) or entry.casefold() in labels: _id = v["id"] From c8d4456130715e5fd2c6ea6dfca3688fc4fbc2ac Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Fri, 19 Jul 2024 07:45:46 +0200 Subject: [PATCH 515/590] only use await on `process_item` coroutines --- schulcloud/oeh_importer.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py index 826c6865..266fa4fa 100644 --- a/schulcloud/oeh_importer.py +++ b/schulcloud/oeh_importer.py @@ -107,7 +107,7 @@ async def process_node(self, node: dict): try: if self.hasChanged(response_copy): item = await LomBase.parse(self, response_copy) - self.send_to_pipeline(item) + await self.send_to_pipeline(item) except ApiException as exc: # sometimes edusharing will return 401 "admin rights required" for all bulk.find requests if exc.status in (401, 503, 504): @@ -124,10 +124,13 @@ async def process_node(self, node: dict): self.log.error(traceback.format_exc()) break - def send_to_pipeline(self, item: scrapy.Item): + async def send_to_pipeline(self, item: scrapy.Item): for pipeline in self.pipeline: # spider has to be an object with a "name" attribute - item = pipeline.process_item(item, self) + if asyncio.iscoroutinefunction(pipeline.process_item): + item = await pipeline.process_item(item, self) + else: + item = pipeline.process_item(item, self) def getProperty(self, name, response): return ( From ffe96d1db4431339dc698e4542f6c85fccff73d6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Jul 2024 14:59:29 +0200 Subject: [PATCH 516/590] log/tests: improve logging messages related to handling of "0"-durations - while debugging twillo API responses, there were several items that provided a duration value of "0" for their "cclom:typicallearningtime" property - for such items with zero duration values, the frontend doesn't show the "duration"-property at all - our pipeline assumes that a valid duration value is greater than 0 - improved clarity of logging messages with an additional check, so crawling items with "0"-durations won't throw unnecessary warnings - tests: added a test-case for duration parsing of "0"-strings --- converter/pipelines.py | 23 +++++++++++++++++------ tests/test_duration_conversion.py | 1 + 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index da7eaaa5..e450b715 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -362,8 +362,12 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, f"(Unhandled edge-case: Expected int or float value, " f"but received {type(time_raw)} instead.") if not time_in_seconds: - log.warning(f"Unable to convert '{item_field_name}'-value (type: {type(time_raw)}) from {time_raw} " - f"to numeric value (seconds).") + if isinstance(time_in_seconds, int) and time_in_seconds == 0: + log.debug(f"Detected zero duration for '{item_field_name}'. " + f"Received raw value: {time_raw} of type {type(time_raw)} .") + else: + log.warning(f"Unable to convert '{item_field_name}'-value (type: {type(time_raw)}) from {time_raw} " + f"to numeric value (seconds).") return time_in_seconds @@ -437,12 +441,19 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr time_raw=course_duration, item_field_name="CourseItem.course_duration" ) - if course_duration and isinstance(course_duration, int): - # happy-case - pass + if isinstance(course_duration, int): + if course_duration: + # happy-case: a duration greater than 0 + pass + elif course_duration == 0: + # a duration of zero seconds is not a valid time duration, but most likely just a limitation + # of different backend systems how they store "empty" values for this metadata property. + log.debug(f"Received zero duration value within 'course_duration'-property of item " + f"{item_adapter['sourceId']}. Deleting property ...") + del course_adapter["course_duration"] else: log.warning(f"Cannot process BIRD 'course_duration'-property for item {item_adapter['sourceId']} . " - f"Expected a single integer value (in seconds), " + f"Expected a single (positive) integer value (in seconds), " f"but received {type(course_duration)} instead. Deleting property...") del course_adapter["course_duration"] diff --git a/tests/test_duration_conversion.py b/tests/test_duration_conversion.py index 00b04276..bf0114b0 100644 --- a/tests/test_duration_conversion.py +++ b/tests/test_duration_conversion.py @@ -6,6 +6,7 @@ @pytest.mark.parametrize("test_input, expected_result", [ ("", None), + ("0", 0), ("12:30:55", 45055), # 43200s + 1800s + 55s = 45055s ("08:25:24", 30324), # 28800s + 1500s + 24s = 30324s ("8:8:8", 29288), # 28800s + 480s + 8s = 29288 From 6afa8380a7c3f114509ba76e5a8056049f7d2b70 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Jul 2024 19:12:57 +0200 Subject: [PATCH 517/590] feat: 'CourseItem.course_learning_outcome'-pipeline handling for lists of strings - change: enable output_processor for 'course_learning_outcome'-field to allow correct parsing of individual string entries to a list[str] by calling the ".add_value()"-method within a crawler --- converter/items.py | 2 +- converter/pipelines.py | 21 +++++++++++++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/converter/items.py b/converter/items.py index 1ab0d778..e0b79c6a 100644 --- a/converter/items.py +++ b/converter/items.py @@ -348,7 +348,7 @@ class CourseItem(Item): Corresponding edu-sharing property: 'cclom:typicallearningtime'. (ATTENTION: edu-sharing expects 'cclom:typicallearningtime'-values (type: int) in milliseconds! -> the es_connector will handle transformation from s to ms.)""" - course_learningoutcome = Field() + course_learningoutcome = Field(output_processor=JoinMultivalues()) """Describes "Lernergebnisse" or "learning objectives". (Expects a string, with or without HTML-formatting!) Corresponding edu-sharing property: 'ccm:learninggoal'""" course_schedule = Field() diff --git a/converter/pipelines.py b/converter/pipelines.py index e450b715..6f923f0b 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -459,10 +459,23 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr if "course_learningoutcome" in course_adapter: # course_learningoutcome expects a string (with or without HTML formatting) - course_learning_outcome = course_adapter["course_learningoutcome"] - if course_learning_outcome and isinstance(course_learning_outcome, str): - # happy-case - pass + course_learning_outcome: list[str] | str | None = course_adapter["course_learningoutcome"] + if course_learning_outcome: + if isinstance(course_learning_outcome, str): + # happy-case: there's a single string value in course_learningoutcome + pass + elif isinstance(course_learning_outcome, list): + course_learning_outcome_clean: list[str] = list() + for clo_candidate in course_learning_outcome: + if clo_candidate and isinstance(clo_candidate, str): + # happy case: this list value is a string + course_learning_outcome_clean.append(clo_candidate) + else: + # if the list item isn't a string, we won't save it to the cleaned up list + log.warning(f"Received unexpected type as part of 'course_learningoutcome': " + f"Expected list[str], but received a {type(clo_candidate)} " + f"instead. Raw value: {clo_candidate}") + course_adapter["course_learningoutcome"] = course_learning_outcome_clean else: log.warning( f"Cannot process BIRD 'course_learningoutcome'-property for item {item_adapter['sourceId']} " From b34d15758e47d1fa07b768464f8039d04a6246a0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 23 Jul 2024 19:16:41 +0200 Subject: [PATCH 518/590] oersi_spider v0.2.7 (twillo metadata enrichment) (squashed) - feat: oersi_spider tries to match items from OERSI with twillo's edu-sharing API and enrich the following metadata properties: - twillo "Duration" -> BIRD "course_duration" - twillo "Function" -> BIRD "course_learningoutcome" - feat: save twillo "Field Report"-metadata to "LomEducationalItem.description" - retrieve twillo's "Field Report"-metadata from twillo's edu-sharing repository ("cclom:educational_description") and store it within LomEducationalItem.description (same property-name in WLO) squashed commits: - change/refactor: use "scrapy.Request" for twillo metadata enrichment - to decrease burst spikes of HTTP requests, reworked/refactored the way "twillo"-metadata is retrieved for OERSI items - style: code formatting via black - fix: twillo URL lookup in elastic_item - fix: accidental double processing of twillo items --- converter/spiders/oersi_spider.py | 315 +++++++++++++++++++++++++----- 1 file changed, 271 insertions(+), 44 deletions(-) diff --git a/converter/spiders/oersi_spider.py b/converter/spiders/oersi_spider.py index 04a4340f..2e2ca03f 100644 --- a/converter/spiders/oersi_spider.py +++ b/converter/spiders/oersi_spider.py @@ -41,7 +41,7 @@ class OersiSpider(scrapy.Spider, LomBase): name = "oersi_spider" # start_urls = ["https://oersi.org/"] friendlyName = "OERSI" - version = "0.2.6" # last update: 2024-05-28 + version = "0.2.7" # last update: 2024-07-30 custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, @@ -199,8 +199,8 @@ def check_item_and_yield_to_parse_method(self, elastic_item: dict) -> scrapy.Req ) return None if ( - self.getId(response=None, elastic_item=elastic_item) is not None - and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None + self.getId(response=None, elastic_item=elastic_item) is not None + and self.getHash(response=None, elastic_item_source=elastic_item["_source"]) is not None ): if not self.hasChanged(None, elastic_item=elastic_item): return None @@ -509,12 +509,12 @@ def hasChanged(self, response=None, elastic_item: dict = dict) -> bool: return changed def get_lifecycle_author( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - date_created: Optional[str] = None, - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + date_created: Optional[str] = None, + date_published: Optional[str] = None, ): """ If a "creator"-field is available in the OERSI API for a specific '_source'-item, creates an 'author'-specific @@ -582,11 +582,11 @@ def get_lifecycle_author( return authors def get_affiliation_and_save_to_lifecycle( - self, - affiliation_dict: dict, - lom_base_item_loader: LomBaseItemloader, - organization_fallback: set[str], - lifecycle_role: str, + self, + affiliation_dict: dict, + lom_base_item_loader: LomBaseItemloader, + organization_fallback: set[str], + lifecycle_role: str, ): """ Retrieves metadata from OERSI's "affiliation"-field (which is typically found within a "creator"- or @@ -650,11 +650,11 @@ def validate_academic_title_string(self, honorific_prefix: str) -> str: return honorific_prefix.strip() def get_lifecycle_contributor( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organization_fallback: set[str], - author_list: Optional[list[str]] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organization_fallback: set[str], + author_list: Optional[list[str]] = None, ): """ Collects metadata from the OERSI "contributor"-field and stores it within a LomLifecycleItemLoader. @@ -756,11 +756,11 @@ def get_lifecycle_metadata_provider(lom_base_item_loader: LomBaseItemloader, oer lom_base_item_loader.add_value("lifecycle", lifecycle_metadata_provider.load_item()) def get_lifecycle_publisher( - self, - lom_base_item_loader: LomBaseItemloader, - elastic_item_source: dict, - organizations_from_publisher_fields: set[str], - date_published: Optional[str] = None, + self, + lom_base_item_loader: LomBaseItemloader, + elastic_item_source: dict, + organizations_from_publisher_fields: set[str], + date_published: Optional[str] = None, ): """ Collects metadata from OERSI's "publisher"-field and stores it within a LomLifecycleItemLoader. Successfully @@ -796,7 +796,7 @@ def get_lifecycle_publisher( lom_base_item_loader.add_value("lifecycle", lifecycle_publisher.load_item()) def get_lifecycle_organization_from_source_organization_fallback( - self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] + self, elastic_item_source: dict, lom_item_loader: LomBaseItemloader, organization_fallback: set[str] ): # ATTENTION: the "sourceOrganization"-field is not part of the AMB draft, therefore this method is currently # used a fallback, so we don't lose any useful metadata (even if that metadata is not part of the AMB spec). @@ -838,8 +838,7 @@ def get_lifecycle_organization_from_source_organization_fallback( lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) def get_lifecycle_publisher_from_source_organization( - self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, - previously_collected_publishers: set[str] + self, lom_item_loader: LomBaseItemloader, elastic_item_source: dict, previously_collected_publishers: set[str] ): source_organizations: list[dict] = elastic_item_source.get("sourceOrganization") for so in source_organizations: @@ -860,7 +859,7 @@ def get_lifecycle_publisher_from_source_organization( lom_item_loader.add_value("lifecycle", lifecycle_org.load_item()) def lifecycle_determine_type_of_identifier_and_save_uri( - self, item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader + self, item_dictionary: dict, lifecycle_item_loader: LomLifecycleItemloader ): """ OERSI's "creator"/"contributor"/"affiliation" items might contain an 'id'-field which (optionally) provides @@ -873,10 +872,10 @@ def lifecycle_determine_type_of_identifier_and_save_uri( # "creator.id" can be 'null', therefore we need to explicitly check its type before trying to parse it uri_string: str = item_dictionary.get("id") if ( - "orcid.org" in uri_string - or "/gnd/" in uri_string - or "wikidata.org" in uri_string - or "ror.org" in uri_string + "orcid.org" in uri_string + or "/gnd/" in uri_string + or "wikidata.org" in uri_string + or "ror.org" in uri_string ): if "/gnd/" in uri_string: lifecycle_item_loader.add_value("id_gnd", uri_string) @@ -953,8 +952,9 @@ def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: d course_itemloader.add_value("course_availability_from", start_date_raw) else: self.logger.warning( - f"Received unexpected type for \"startDate\" {start_date_raw} . " - f"Expected str, but received {type(start_date_raw)} instead.") + f'Received unexpected type for "startDate" {start_date_raw} . ' + f"Expected str, but received {type(start_date_raw)} instead." + ) if "endDate" in imoox_attributes: end_dates: list[str] = imoox_attributes["endDate"] if end_dates and isinstance(end_dates, list): @@ -963,7 +963,7 @@ def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: d course_itemloader.add_value("course_availability_until", end_date_raw) else: self.logger.warning( - f"Received unexpected type for \"endDate\" {end_date_raw}. " + f'Received unexpected type for "endDate" {end_date_raw}. ' f"Expected str, but received {type(end_date_raw)} instead." ) if "trailer" in imoox_attributes: @@ -1049,11 +1049,11 @@ def enrich_imoox_metadata(self, base_itemloader: BaseItemLoader, elastic_item: d base_itemloader.add_value("course", course_itemloader.load_item()) def enrich_vhb_metadata( - self, - base_itemloader: BaseItemLoader, - elastic_item: dict, - lom_general_itemloader: LomGeneralItemloader, - in_languages: list[str] | None, + self, + base_itemloader: BaseItemLoader, + elastic_item: dict, + lom_general_itemloader: LomGeneralItemloader, + in_languages: list[str] | None, ): """ Combines metadata from OERSI's elastic_item with MOOCHub v2.x metadata from the source (vhb) @@ -1117,9 +1117,10 @@ def enrich_vhb_metadata( if start_date_raw and isinstance(start_date_raw, str): course_itemloader.add_value("course_availability_from", start_date_raw) else: - self.logger.warning(f"Received unexpected type for \"startDate\" {start_date_raw} . " - f"Expected a string, but received {type(start_date_raw)} instead." - ) + self.logger.warning( + f'Received unexpected type for "startDate" {start_date_raw} . ' + f"Expected a string, but received {type(start_date_raw)} instead." + ) if "video" in vhb_item_matched["attributes"]: video_item: dict = vhb_item_matched["attributes"]["video"] if video_item: @@ -1175,7 +1176,7 @@ def enrich_vhb_metadata( # timedelta has no parameter for months # -> X months = X * (4 weeks) duration_delta = duration_delta + ( - duration_number * datetime.timedelta(weeks=4) + duration_number * datetime.timedelta(weeks=4) ) case _: self.logger.warning( @@ -1189,6 +1190,211 @@ def enrich_vhb_metadata( course_itemloader.add_value("course_duration", workload_in_seconds) base_itemloader.add_value("course", course_itemloader.load_item()) + def look_for_twillo_url_in_elastic_item(self, elastic_item: dict) -> str | None: + """ + Look for a twillo.de URL with an "/edu-sharing/"-path within OERSI's "_source.id" and "mainEntityOfPage.id" + properties. + Returns the twillo URL string if successful, otherwise returns None. + """ + twillo_url: str | None = None + twillo_url_from_source_id: str = self.getId(response=None, elastic_item=elastic_item) + twillo_url_from_maeop_id: str | None = None + twillo_edu_sharing_url_path: str = "twillo.de/edu-sharing/components/render/" + if "_source" in elastic_item: + elastic_item_source: dict = elastic_item["_source"] + if "mainEntityOfPage" in elastic_item_source: + main_entity_of_page: list[dict] = elastic_item_source["mainEntityOfPage"] + for maeop_item in main_entity_of_page: + if "id" in maeop_item: + maeop_id: str = maeop_item["id"] + if maeop_id and twillo_edu_sharing_url_path in maeop_id: + twillo_url_from_maeop_id = maeop_id + if twillo_url_from_source_id and twillo_edu_sharing_url_path in twillo_url_from_source_id: + twillo_url = twillo_url_from_source_id + elif twillo_url_from_maeop_id: + twillo_url = twillo_url_from_maeop_id + return twillo_url + + @staticmethod + def extract_twillo_node_id_from_url(twillo_url: str) -> str | None: + """ + Extract the twillo nodeId from a provided URL string. + """ + if twillo_url and isinstance(twillo_url, str): + twillo_node_id: str | None = None + if "twillo.de/edu-sharing/components/render/" in twillo_url: + potential_twillo_node_id = twillo_url.split("/")[-1] + if potential_twillo_node_id and isinstance(potential_twillo_node_id, str): + twillo_node_id = potential_twillo_node_id + if twillo_node_id: + return twillo_node_id + else: + return None + + def request_metadata_for_twillo_node_id( + self, + base_itemloader: BaseItemLoader, + lom_base_itemloader: LomBaseItemloader, + lom_classification_itemloader: LomClassificationItemLoader, + lom_educational_itemloader: LomEducationalItemLoader, + lom_general_itemloader: LomGeneralItemloader, + lom_technical_itemloader: LomTechnicalItemLoader, + license_itemloader: LicenseItemLoader, + valuespaces_itemloader: ValuespaceItemLoader, + elastic_item: dict, + twillo_node_id: str, + ): + """ + Query the edu-sharing repository of twillo.de for metadata of a specific nodeId. + If the request was successful, return the response dictionary. + """ + twillo_api_request_url = ( + f"https://www.twillo.de/edu-sharing/rest/rendering/v1/details/-home-/" f"{twillo_node_id}" + ) + # note: we NEED to use the "rendering/v1/details/..."-API-endpoint because + # https://www.twillo.de/edu-sharing/rest/node/v1/nodes/-home-/{twillo_node_id}/metadata?propertyFilter=-all- + # throws "org.edu_sharing.restservices.DAOSecurityException"-errors (for hundreds of objects!), + # even though the queried learning objects are publicly available and reachable + twillo_request = scrapy.Request( + url=twillo_api_request_url, + priority=2, + callback=self.enrich_oersi_item_with_twillo_metadata, + cb_kwargs={ + "elastic_item": elastic_item, + "twillo_node_id": twillo_node_id, + "base_itemloader": base_itemloader, + "lom_base_itemloader": lom_base_itemloader, + "lom_classification_itemloader": lom_classification_itemloader, + "lom_educational_itemloader": lom_educational_itemloader, + "lom_general_itemloader": lom_general_itemloader, + "lom_technical_itemloader": lom_technical_itemloader, + "license_itemloader": license_itemloader, + "valuespaces_itemloader": valuespaces_itemloader, + }, + ) + yield twillo_request + + def enrich_oersi_item_with_twillo_metadata( + self, + response: scrapy.http.TextResponse, + base_itemloader: BaseItemLoader, + lom_base_itemloader: LomBaseItemloader, + lom_classification_itemloader: LomClassificationItemLoader, + lom_educational_itemloader: LomEducationalItemLoader, + lom_general_itemloader: LomGeneralItemloader, + lom_technical_itemloader: LomTechnicalItemLoader, + license_itemloader: LicenseItemLoader, + valuespaces_itemloader: ValuespaceItemLoader, + elastic_item: dict = None, + twillo_node_id: str = None, + ): + """ + Process the twillo API response and enrich the OERSI item with twillo metadata properties (if possible). + If the twillo API response was invalid (or didn't provide the metadata we were looking for), yield the + (complete) BaseItem. + + @param response: the twillo API response (JSON) + @param base_itemloader: BaseItemLoader object + @param lom_base_itemloader: LomBaseItemloader object + @param lom_classification_itemloader: LomClassificationItemloader object + @param lom_educational_itemloader: LomEducationalItemloader object + @param lom_general_itemloader: LomGeneralItemloader object + @param lom_technical_itemloader: LomTechnicalItemloader object + @param license_itemloader: LicenseItemloader object + @param valuespaces_itemloader: ValuespacesItemloader object + @param elastic_item: the ElasticSearch item from the OERSI API + @param twillo_node_id: the twillo "nodeId" + @return: the complete BaseItem object + """ + twillo_response: scrapy.http.TextResponse = response + twillo_response_json: dict | None = None + twillo_metadata: dict | None = None + try: + twillo_response_json: dict = twillo_response.json() + except requests.exceptions.JSONDecodeError: + self.logger.warning(f"BIRD: Received invalid JSON response from {response.url} :" f"{twillo_response}") + if twillo_response_json and isinstance(twillo_response_json, dict): + if "node" in twillo_response_json: + # we assume that the response is valid if we receive a dictionary containing + # "node" as the main key + twillo_metadata = twillo_response_json + else: + self.logger.warning( + f"BIRD: twillo API response for nodeId {twillo_node_id} " f"was invalid: {twillo_response_json}" + ) + else: + self.logger.warning( + f"BIRD: Failed to extract additional metadata for twillo " + f"nodeId {twillo_node_id} ! " + f"Received HTTP Response Status {twillo_response.status}." + ) + + if twillo_metadata and isinstance(twillo_metadata, dict): + # the API response should contain a "node"-key which contains all properties within + node_dict: dict = twillo_metadata["node"] + if node_dict and "properties" in node_dict: + node_properties: dict = node_dict["properties"] + if node_properties: + twillo_typical_learning_time: list[str] | None = None + twillo_cclom_context: list[str] | None = None + # ToDo: + # - twillo "Level" ("cclom:interactivitylevel") -> BIRD + # - twillo "Event Format" ("cclom:interactivitytype") -> BIRD + # - twillo "Technical Requirements" ("cclom:otherplatformrequirements") -> BIRD + if "cclom:typicallearningtime" in node_properties: + # twillo "Duration" ("cclom:typicallearningtime") -> BIRD "course_duration" + twillo_typical_learning_time: list[str] = node_properties["cclom:typicallearningtime"] + if "cclom:context" in node_properties: + # twillo "Function" ("cclom:context") -> BIRD "course_learningoutcome" + twillo_cclom_context: list[str] = node_properties["cclom:context"] + if "cclom:educational_description" in node_properties: + educational_description_raw: list[str] = node_properties["cclom:educational_description"] + if educational_description_raw and isinstance(educational_description_raw, list): + # twillo Frontend: "Field Report" (= "cclom:educational_description") + for edu_desc_item in educational_description_raw: + if edu_desc_item and isinstance(edu_desc_item, str): + # strip whitespace and sort out the invalid (empty) strings first + edu_desc_item: str = edu_desc_item.strip() + # ToDo: move responsibility of cleaning up "description"-strings + # into its own pipeline + if edu_desc_item: + lom_educational_itemloader.add_value("description", edu_desc_item) + if twillo_typical_learning_time or twillo_cclom_context: + course_item_loader = CourseItemLoader() + if twillo_typical_learning_time: + course_item_loader.add_value("course_duration", twillo_typical_learning_time) + if twillo_cclom_context: + context_cleaned: list[str] = list() + if twillo_cclom_context and isinstance(twillo_cclom_context, list): + for context_value in twillo_cclom_context: + if context_value and isinstance(context_value, str): + context_value = context_value.strip() + # whitespace typos and empty string values (" ") are removed + if context_value: + context_cleaned.append(context_value) + if context_cleaned: + course_item_loader.add_value("course_learningoutcome", context_cleaned) + base_itemloader.add_value("course", course_item_loader.load_item()) + + # noinspection DuplicatedCode + lom_base_itemloader.add_value("general", lom_general_itemloader.load_item()) + lom_base_itemloader.add_value("technical", lom_technical_itemloader.load_item()) + lom_base_itemloader.add_value("educational", lom_educational_itemloader.load_item()) + lom_base_itemloader.add_value("classification", lom_classification_itemloader.load_item()) + base_itemloader.add_value("lom", lom_base_itemloader.load_item()) + base_itemloader.add_value("valuespaces", valuespaces_itemloader.load_item()) + base_itemloader.add_value("license", license_itemloader.load_item()) + + permissions = super().getPermissions(response) + base_itemloader.add_value("permissions", permissions.load_item()) + + response_loader = ResponseItemLoader() + identifier_url: str = self.get_item_url(elastic_item=elastic_item) + response_loader.add_value("url", identifier_url) + base_itemloader.add_value("response", response_loader.load_item()) + + yield base_itemloader.load_item() + def parse(self, response=None, **kwargs): elastic_item: dict = kwargs.get("elastic_item") elastic_item_source: dict = elastic_item.get("_source") @@ -1518,6 +1724,27 @@ def parse(self, response=None, **kwargs): if query_parameter_provider_name: if query_parameter_provider_name == "iMoox": self.enrich_imoox_metadata(base, elastic_item) + if query_parameter_provider_name == "twillo": + twillo_url: str | None = self.look_for_twillo_url_in_elastic_item(elastic_item) + # a typical twillo URL could look like this example: + # https://www.twillo.de/edu-sharing/components/render/106ed8e7-1d07-4a77-8ca2-19c9e28782ed + # we need the nodeId (the last part of the url) to create an API request for its metadata + twillo_node_id: str | None = self.extract_twillo_node_id_from_url(twillo_url) + if twillo_node_id: + # if a twillo nodeId was found, the complete item will be yielded by its own method + yield from self.request_metadata_for_twillo_node_id( + base_itemloader=base, + lom_base_itemloader=lom, + lom_classification_itemloader=classification, + lom_educational_itemloader=educational, + lom_general_itemloader=general, + lom_technical_itemloader=technical, + license_itemloader=license_loader, + valuespaces_itemloader=vs, + elastic_item=elastic_item, + twillo_node_id=twillo_node_id, + ) + return None # necessary to not accidentally parse the same twillo item twice! if query_parameter_provider_name == "vhb": self.enrich_vhb_metadata(base, elastic_item, general, in_languages) # --- BIRD HOOKS END HERE! From b42d8b5ac3f2622e3e95508586e9daca41cceb55 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:23:28 +0200 Subject: [PATCH 519/590] fix: convert course_learningoutcome to string - edu-sharing expects 'course_learningoutcome' to be a single-value string, therefore we need to make sure that we convert the scrapy Field (list[str]) to a string in the es_connector.py --- converter/es_connector.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 950f42de..a3e32ce6 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -593,7 +593,13 @@ def transform_item(self, uuid, spider, item): log.warning(f"Could not transform 'course_duration' {course_duration} to ms. " f"Expected int (seconds), but received type {type(course_duration)} instead.") if "course_learningoutcome" in item["course"]: - spaces["ccm:learninggoal"] = item["course"]["course_learningoutcome"] + course_learning_outcome: list[str] = item["course"]["course_learningoutcome"] + if course_learning_outcome and isinstance(course_learning_outcome, list): + # convert the array of strings to a single string, divided by semicolons + course_learning_outcome: str = "; ".join(course_learning_outcome) + if course_learning_outcome and isinstance(course_learning_outcome, str): + # edu-sharing expects a string value for this field + spaces["ccm:learninggoal"] = course_learning_outcome if "course_schedule" in item["course"]: spaces["ccm:oeh_course_schedule"] = item["course"]["course_schedule"] if "course_url_video" in item["course"]: From 6205744787b1ded8e31ca7a5cce805eecab91407 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 24 Jul 2024 11:46:46 +0200 Subject: [PATCH 520/590] fix: course_duration handling of strings - type-check and convert raw values first before trying to save the int value to "cclom:typicallearningtime" - this fixes a problem where numeric string values would not get properly converted to integer values - style: add missing TypeHints --- converter/es_connector.py | 7 +++++-- converter/pipelines.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index a3e32ce6..aa556809 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -583,10 +583,13 @@ def transform_item(self, uuid, spider, item): if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] if "course_duration" in item["course"]: - course_duration: int = item["course"]["course_duration"] + course_duration: int | str = item["course"]["course_duration"] + if course_duration and isinstance(course_duration, str) and course_duration.isnumeric(): + # convert strings to int values + course_duration = int(course_duration) if course_duration and isinstance(course_duration, int): # edu-sharing property 'cclom:typicallearningtime' expects values in ms! - course_duration_in_ms = int(course_duration * 1000) + course_duration_in_ms: int = int(course_duration * 1000) item["course"]["course_duration"] = course_duration_in_ms spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] else: diff --git a/converter/pipelines.py b/converter/pipelines.py index 6f923f0b..79487107 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -297,7 +297,7 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, @param item_field_name: scrapy item field-name (required for precise logging messages) @return: total seconds (int) value of duration or None """ - time_in_seconds = None + time_in_seconds: int | None = None # why are we converting values to int? reason: 'cclom:typicallearningtime' expects values to be in milliseconds! # (this method converts values to seconds and es_connector.py converts the values to ms) if time_raw and isinstance(time_raw, str): From e821734e7981244c57988e9366c6c8637e537710 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 25 Jul 2024 19:04:37 +0200 Subject: [PATCH 521/590] feat: connect "LomEducationalItem.description" with the edu-sharing backend ("cclom:educational_description") --- converter/es_connector.py | 22 +++++++++++++++------- converter/items.py | 4 ++-- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index aa556809..0ec916b9 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -482,7 +482,7 @@ def transform_item(self, uuid, spider, item): # To set "ADR"-attributes and values, we need to create an "Address"-object first # see: https://github.com/py-vobject/vobject/blob/master/vobject/vcard.py#L54-L66 # ToDo: implement "address"-pipeline - # (the vobject package expects a str or list[str] for these proeprties!) + # (the vobject package expects a str or list[str] for these properties!) address_object: vobject.vcard.Address = vobject.vcard.Address(street=address_street, city=address_city, region=address_region, @@ -566,12 +566,20 @@ def transform_item(self, uuid, spider, item): splitted = valuespaceMapping[key].split(":") splitted[0] = "virtual" spaces[":".join(splitted)] = item["valuespaces_raw"][key] - if "typicalAgeRange" in item["lom"]["educational"]: - tar = item["lom"]["educational"]["typicalAgeRange"] - if "fromRange" in tar: - spaces["ccm:educationaltypicalagerange_from"] = tar["fromRange"] - if "toRange" in tar: - spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] + + if "educational" in item["lom"]: + if "description" in item["lom"]["educational"]: + educational_description: list[str] = item["lom"]["educational"]["description"] + if educational_description: + # ToDo: implement "description"-pipeline (in pipelines.py) + spaces["cclom:educational_description"] = educational_description + pass + if "typicalAgeRange" in item["lom"]["educational"]: + tar = item["lom"]["educational"]["typicalAgeRange"] + if "fromRange" in tar: + spaces["ccm:educationaltypicalagerange_from"] = tar["fromRange"] + if "toRange" in tar: + spaces["ccm:educationaltypicalagerange_to"] = tar["toRange"] if "course" in item: if "course_availability_from" in item["course"]: diff --git a/converter/items.py b/converter/items.py index e0b79c6a..dbe92621 100644 --- a/converter/items.py +++ b/converter/items.py @@ -159,8 +159,8 @@ class LomEducationalItem(Item): - context (see: 'valuespaces.educationalContext') """ - description = Field() - # ToDo: 'description' isn't mapped to any field in edu-sharing + description = Field(output_processor=JoinMultivalues()) + """Corresponding edu-sharing property: 'cclom:educational_description'""" difficulty = Field() """Corresponding edu-sharing property: 'ccm:educationaldifficulty'""" # ToDo: 'ccm:educationaldifficulty' is currently not used in edu-sharing / WLO From 661c54e4c492ba0ac0de4e08f81d4f8fa2d8b2b8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 26 Jul 2024 22:05:26 +0200 Subject: [PATCH 522/590] fix: LomLifecycle contributor 'date' parsing when encountering 'datetime'-objects - the 'dateparser'-package expects a string to parse and the previous implementation threw errors when the 'date'-object was already of type 'datetime' - added type-checks and an additional logging message to catch future edge-cases --- converter/pipelines.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 79487107..58e6a212 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -237,9 +237,18 @@ def process_item(self, raw_item, spider): if "expirationDate" in item["license"]: item["license"]["expirationDate"] = dateparser.parse(item["license"]["expirationDate"]) if "lifecycle" in item["lom"]: - for contribute in item["lom"]["lifecycle"]: - if "date" in contribute: - contribute["date"] = dateparser.parse(contribute["date"]) + for lifecycle_contributor in item["lom"]["lifecycle"]: + # there can be multiple LomLifecycleItems within a LomBaseItem + if "date" in lifecycle_contributor: + lifecycle_date: str | datetime.datetime = lifecycle_contributor["date"] + if lifecycle_date and isinstance(lifecycle_date, str): + lifecycle_contributor["date"] = dateparser.parse(lifecycle_date) + elif lifecycle_date and isinstance(lifecycle_date, datetime.datetime): + # happy-case: the 'date' property is of type datetime + pass + elif lifecycle_date: + log.warning(f"Lifecycle Pipeline received invalid 'date'-value: {lifecycle_date} !" + f"Expected type 'str' or 'datetime', but received: {type(lifecycle_date)} instead.") return raw_item From dedce7b5ff2403f2f8f3bac1485e24a04489e659 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 31 Jul 2024 17:25:48 +0200 Subject: [PATCH 523/590] bne_portal_spider v0.0.1 (squashed) - first work-in-progress draft that handles "Lernmaterialien" from BNE-Portal.de - some mappings might need additional feedback or corrections --- converter/spiders/bne_portal_spider.py | 389 +++++++++++++++++++++++++ 1 file changed, 389 insertions(+) create mode 100644 converter/spiders/bne_portal_spider.py diff --git a/converter/spiders/bne_portal_spider.py b/converter/spiders/bne_portal_spider.py new file mode 100644 index 00000000..b1dd2ef8 --- /dev/null +++ b/converter/spiders/bne_portal_spider.py @@ -0,0 +1,389 @@ +import datetime +import re +from typing import Iterable + +import scrapy +import trafilatura +from scrapy import Request + +from converter.constants import Constants +from converter.items import ( + BaseItemLoader, + LomBaseItemloader, + LomGeneralItemloader, + LomTechnicalItemLoader, + LomLifecycleItemloader, + LomEducationalItemLoader, + ValuespaceItemLoader, + LicenseItemLoader, + ResponseItemLoader, + PermissionItemLoader, + LomClassificationItemLoader, +) +from converter.spiders.base_classes import LomBase +from converter.web_tools import WebEngine + + +class BnePortalSpider(scrapy.Spider, LomBase): + name = "bne_portal_spider" + friendlyName = "BNE-Portal" + version = "0.0.1" + custom_settings = { + "AUTOTHROTTLE_ENABLED": True, + "AUTOTHROTTLE_DEBUG": True, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5, + "AUTOTHROTTLE_MAX_DELAY": 120, + "WEB_TOOLS": WebEngine.Playwright, + "ROBOTSTXT_OBEY": False, + # "COOKIES_DEBUG": True, + "USER_AGENT": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0", + } + + # to see a list of possible strings that need to be mapped, check the drop-down menus at: + # https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html?nn=140004 + FORMAT_TO_NEW_LRT: dict = { + # "Arbeitsblatt": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt + "Artikel": "b98c0c8c-5696-4537-82fa-dded7236081e", # Artikel und Einzelpublikation + "Broschüre/ Buch/ Zeitschrift": "0cef3ce9-e106-47ae-836a-48f9ed04384e", # Dokumente und textbasierte Inhalte + "Datenbank/ Materialsammlung": "04693b11-8b39-42aa-964f-578be063a851", # Kollektion, Sammlung oder Kanal + "Datenbank-Webseite": "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9", # Webseite + "Digitale Lehr-/ Lerneinheit": "588efe4f-976f-48eb-84aa-8bcb45679f85", # Lehr- und Lernmaterial + "Film": "7a6e9608-2554-4981-95dc-47ab9ba924de", # Video (Material) + "Poster": "c382a478-74e0-42f1-96dd-fcfb5c27f746", # Poster und Plakat + "Spiel": "b0495f44-b05d-4bde-9dc5-34d7b5234d76", # Lernspiel + "Spiel/ Aktion": "68a43516-889e-4ce9-8e03-248307bd99ff", # offene und kreative Aktivität + } + + BILDUNGSBEREICH_TO_EDUCATIONAL_CONTEXT: dict = { + # "Berufliche Bildung": "berufliche_bildung", + # "bildungsbereichübergreifend": "", # will be added to keywords due to impossible mapping + "Frühkindliche Bildung": "elementarbereich", + # "Hochschule": "hochschule", + # "non-formale/ informelle Bildung": "", # will be added to keywords due to impossible mapping + "Primarbereich": "grundschule", + "Sekundarbereich I": "sekundarstufe_1", + "Sekundarbereich II": "sekundarstufe_2", + } + + THEMEN_TO_DISCIPLINE: dict = { + "Ernährung": "04006", # Ernährung und Hauswirtschaft + "Gesellschaftslehre": "48005", # Gesellschaftskunde + "Interkulturelles Lernen": "340", # Interkulturelle Bildung + "Mobilität und Verkehr": "660", # Verkehrserziehung # ToDo confirm mapping + } + + def start_requests(self) -> Iterable[Request]: + # ToDo: + # - document why we ignore the robots.txt + # - document why we need to spoof the User Agent + start_url: str = "https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html" + yield scrapy.Request( + url=start_url, + # headers=headers_firefox, + callback=self.gather_urls_from_first_page_of_search_results, + ) + + def gather_urls_from_first_page_of_search_results(self, response: scrapy.http.HtmlResponse, **kwargs): + # https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html + # at the bottom of the search results should be a page navigation element ("Seite 1 ... X") + # we're looking for the last page number to build the list of available URLs + last_page_link: str | None = response.xpath("//nav[@class='c-nav-index']/ul/li[last()]/a/@href").get() + last_page_number: int | None = None + page_number_pattern = re.compile(r"""D(?P\d+)#searchResults""") + if last_page_link: + # the relative URl of the last page currently looks like this: + # "SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html?gtp=33528_list%253D59#searchResults" + # the page number is controlled via the "D#searchResults" part of the URL + page_number_result = page_number_pattern.search(string=last_page_link) + if page_number_result and "page_number" in page_number_result.groupdict(): + last_page_str: str = page_number_result.groupdict()["page_number"] + last_page_number: int = int(last_page_str) + else: + self.logger.warning( + f"Failed to retrieve last page URL from response {response.url} . " + f"Cannot proceed with page iteration. (Please debug the XPath Expression!)" + ) + if last_page_number: + overview_relative_url_without_page_param = re.sub( + pattern=page_number_pattern, repl="", string=last_page_link + ) + for overview_page_nr in range(2, last_page_number + 1): + # since we are already on page 1, we iterate from page 2 to the last page + overview_relative_url: str = ( + f"{overview_relative_url_without_page_param}" f"D{overview_page_nr}#searchResults" + ) + overview_absolute_url: str = response.urljoin(overview_relative_url) + # ToDo: set priority higher than individual material requests + yield scrapy.Request( + url=overview_absolute_url, + callback=self.yield_request_for_each_search_result, + # priority=1 + ) + # the first page should contain 15 search results that need to be yielded to the parse()-method + yield from self.yield_request_for_each_search_result(response) + + def yield_request_for_each_search_result(self, response: scrapy.http.HtmlResponse): + search_result_relative_urls: list[str] = response.xpath( + "//div[@class='c-pub-teaser__text-wrapper']/h2/a[@class='c-pub-teaser__title-link']/@href" + ).getall() + if search_result_relative_urls and isinstance(search_result_relative_urls, list): + self.logger.debug(f"Detected {len(search_result_relative_urls)} search results on {response.url}") + for url_item in search_result_relative_urls: + search_result_absolute_url: str = response.urljoin(url_item) + # ToDo: remove priority setting of this callback after debugging + yield scrapy.Request(url=search_result_absolute_url, callback=self.parse, priority=2) + + @staticmethod + def clean_up_and_split_list_of_strings(raw_list_of_strings: list[str]): + """ + Cleans up a (raw) list of strings by + 1) removing whitespace chars from the beginning / end of strings + 2) removing empty strings (that only consist of newlines / whitespaces) + 3) splitting multiple string values (which are separated by comma on BNE-Portal) into individual strings + and returns the result as a list[str] if successful. + + @param raw_list_of_strings: a list of strings that might contain unnecessary whitespace chars or empty strings + @return: cleaned up list[str] if successful, otherwise None. + """ + temporary_list_of_strings: list[str] = list() + clean_list_of_strings: list[str] = list() + if raw_list_of_strings and isinstance(raw_list_of_strings, list): + for raw_string in raw_list_of_strings: + if isinstance(raw_string, str): + cleaned_string: str | None = raw_string.strip() + if cleaned_string: + temporary_list_of_strings.append(cleaned_string) + if temporary_list_of_strings: + for temp_str in temporary_list_of_strings: + if ", " in temp_str: + individual_strings: list[str] = temp_str.split(", ") + if individual_strings: + clean_list_of_strings.extend(individual_strings) + else: + clean_list_of_strings.append(temp_str) + if clean_list_of_strings: + return clean_list_of_strings + else: + return None + + def getId(self, response=None) -> str: + # BNE-Portal.de does not provide an identifier for their items, therefore we resort to the URL + return response.url + + def getHash(self, response=None) -> str: + # BNE-Portal.de does not provide a publication or modification date, + # therefore we need to resort to the timestamp of the crawl + now = datetime.datetime.now().isoformat() + hash_str: str = f"{now}v{self.version}" + return hash_str + + def parse(self, response=None, **kwargs): + trafilatura_text: str | None = trafilatura.extract(response.body) + + # Metadata (Header) + title: str | None = response.xpath("//meta[@name='title']/@content").get() + og_title: str = response.xpath("//meta[@property='og:title']/@content").get() + + description: str | None = response.xpath("//meta[@name='description']/@content").get() + og_description: str | None = response.xpath("//meta[@property='og:description']/@content").get() + + keywords_from_header: str | None = response.xpath("//meta[@name='keywords']/@content").get() + keyword_set: set[str] = set() + if keywords_from_header and isinstance(keywords_from_header, str) and ", " in keywords_from_header: + # keyword values are typically split by comma in the DOM header + keyword_list: list[str] = keywords_from_header.split(", ") + if keyword_list: + keyword_set.update(keyword_list) + elif keywords_from_header and isinstance(keywords_from_header, str): + # this case should only happen if there is only a single keyword available in the header + keyword_set.add(keywords_from_header) + + # og_type: str | None = response.xpath("//meta[@property='og:type']/@content").get() + og_image: str | None = response.xpath("//meta[@property='og:image']/@content").get() + # og_image_type: str | None = response.xpath("//meta[@property='og:image:type']/@content").get() + og_locale: str | None = response.xpath("//meta[@property='og:locale']/@content").get() + og_url: str | None = response.xpath("//meta[@property='og:url']/@content").get() + + # metadata (DOM) + # - optional: "Mehr Informationen" -> URL to source website + bne_format_raw: list[str] | None = response.xpath("//strong[contains(text(),'Format')]/../text()").getall() + bne_thema_raw: list[str] | None = response.xpath("//strong[contains(text(),'Thema')]/../text()").getall() + bne_bildungsbereich_raw: list[str] | None = response.xpath( + "//strong[contains(text(),'Bildungsbereich')]/../text()" + ).getall() + bne_kosten_raw: list[str] | None = response.xpath("//strong[contains(text(),'Kosten')]/../text()").getall() + + bne_format_clean: list[str] | None = self.clean_up_and_split_list_of_strings(bne_format_raw) + bne_thema_clean: list[str] | None = self.clean_up_and_split_list_of_strings(bne_thema_raw) + bne_bildungsbereich_clean: list[str] | None = self.clean_up_and_split_list_of_strings(bne_bildungsbereich_raw) + bne_kosten_clean: list[str] | None = self.clean_up_and_split_list_of_strings(bne_kosten_raw) + + new_lrt_set: set[str] = set() + if bne_format_clean and isinstance(bne_format_clean, list): + for format_str in bne_format_clean: + if format_str in self.FORMAT_TO_NEW_LRT: + # manually mapping BNE "Format"-strings (e.g. 'Film', 'Poster') to our new_lrt Vocab for those cases + # where the raw strings would not match with our vocab + new_lrt_mapped: str = self.FORMAT_TO_NEW_LRT.get(format_str) + new_lrt_set.add(new_lrt_mapped) + elif format_str: + # this case typically happens for perfect matches (e.g. "Arbeitsblatt") or uncovered edge-cases + new_lrt_set.add(format_str) + + disciplines: set[str] = set() + if bne_thema_clean and isinstance(bne_thema_clean, list): + for thema_str in bne_thema_clean: + if thema_str in self.THEMEN_TO_DISCIPLINE: + thema_mapped: str = self.THEMEN_TO_DISCIPLINE.get(thema_str) + disciplines.add(thema_mapped) + elif thema_str: + # at this point we cannot know if the "Thema"-string is a discipline or not, therefore we treat it + # as additional keywords because the majority of values that we receive from "Thema" are keywords. + disciplines.add(thema_str) + keyword_set.add(thema_str) + + educational_context_set: set[str] = set() + if bne_bildungsbereich_clean and isinstance(bne_bildungsbereich_clean, list): + # Mapping BNE "Bildungsbereich" values to educationalContext + for bb_str in bne_bildungsbereich_clean: + if bb_str in self.BILDUNGSBEREICH_TO_EDUCATIONAL_CONTEXT: + bb_mapped: str = self.BILDUNGSBEREICH_TO_EDUCATIONAL_CONTEXT.get(bb_str) + educational_context_set.add(bb_mapped) + elif bb_str: + educational_context_set.add(bb_str) + if "bildungsbereichübergreifend" in bb_str or "non-formale/ informelle Bildung" in bb_str: + # these two edge-cases can't be mapped to educationalContext. To not lose out on these values, + # we save them to the keywords instead. + keyword_set.add(bb_str) + + price: str | None = None + if bne_kosten_clean and isinstance(bne_kosten_clean, list): + # there should be exactly 1 string value in this list by now + if len(bne_kosten_clean) == 1: + for cost_str in bne_kosten_clean: + if cost_str and isinstance(cost_str, str): + cost_str: str = cost_str.lower() + if "kostenfrei" in cost_str: + price = "no" + pass + elif "kostenpflichtig" in cost_str: + price = "yes" + elif "€" in cost_str: + # there is currently (as of 2024-08-01) one single item that has an actual price string. + # example: + # https://www.bne-portal.de/bne/shareddocs/lernmaterialien/de/praxisbuch-mobilitaetsbildung.html#searchFacets + price = "yes" + elif "null" in cost_str: + # observed 5 edge-cases where free materials carried the "Kosten"-string: "null" + # this might be a typo in their CMS, but handling these edge-cases as free seems reasonable. + # example: + # https://www.bne-portal.de/bne/shareddocs/lernmaterialien/de/nachhaltige-stadtentwicklung.html#searchFacets + price = "no" + else: + self.logger.warning( + f"Failed to map BNE 'Kosten'-value to 'price'-vocab. Please update the mapping " + f"for this edge-case: '{cost_str}'" + ) + else: + # if BNE-Portal starts using more than two values in the future, we need to update our mapping + self.logger.warning( + f"Mapping edge-case for BNE 'Kosten' detected: Expected exactly 1 value in " + f"{bne_kosten_clean}, but received {len(bne_kosten_clean)}. " + f"Values received: {bne_kosten_clean}" + ) + + base_itemloader: BaseItemLoader = BaseItemLoader() + base_itemloader.add_value("sourceId", self.getId(response)) + base_itemloader.add_value("hash", self.getHash(response)) + if og_image: + base_itemloader.add_value("thumbnail", og_image) + if trafilatura_text: + base_itemloader.add_value("fulltext", trafilatura_text) + + lom_base_itemloader: LomBaseItemloader = LomBaseItemloader() + + lom_classification_itemloader: LomClassificationItemLoader = LomClassificationItemLoader() + + lom_general_itemloader: LomGeneralItemloader = LomGeneralItemloader() + lom_general_itemloader.add_value("identifier", response.url) + if title: + lom_general_itemloader.add_value("title", title) + elif og_title: + lom_general_itemloader.add_value("title", og_title) + if description: + lom_general_itemloader.add_value("description", description) + elif og_description: + lom_general_itemloader.add_value("description", og_description) + if og_locale: + lom_general_itemloader.add_value("language", og_locale) + if keyword_set: + keyword_list = list(keyword_set) + lom_general_itemloader.add_value("keyword", keyword_list) + + lom_educational_itemloader: LomEducationalItemLoader = LomEducationalItemLoader() + + lom_technical_itemloader: LomTechnicalItemLoader = LomTechnicalItemLoader() + if og_url and og_url != response.url: + lom_technical_itemloader.add_value("location", og_url) + lom_technical_itemloader.add_value("location", response.url) + else: + lom_technical_itemloader.add_value("location", response.url) + lom_technical_itemloader.add_value("format", "text/html") + + lom_lifecycle_itemloader: LomLifecycleItemloader = LomLifecycleItemloader() + lom_lifecycle_itemloader.add_value("role", "publisher") + lom_lifecycle_itemloader.add_value("organization", "Bundesministerium für Bildung und Forschung") + lom_lifecycle_itemloader.add_value( + "url", "https://www.bne-portal.de/bne/de/services/impressum/impressum_node.html" + ) + lom_base_itemloader.add_value("lifecycle", lom_lifecycle_itemloader.load_item()) + + license_itemloader: LicenseItemLoader = LicenseItemLoader() + license_itemloader.add_value("author", "Bundesministerium für Bildung und Forschung") + license_itemloader.add_value("internal", Constants.LICENSE_COPYRIGHT_FREE) + license_description: str = ( + "Das Copyright für Texte liegt, soweit nicht anders vermerkt, " + "beim Bundesministerium für Bildung und Forschung (nachfolgend BMBF). Das " + "Copyright für Bilder liegt, soweit nicht anders vermerkt, beim Bundesministerium " + "für Bildung und Forschung oder bei der Bundesbildstelle des Presse- und " + "Informationsamtes der Bundesregierung. Auf den BMBF-Webseiten zur Verfügung " + "gestellte Texte, Textteile, Grafiken, Tabellen oder Bildmaterialien dürfen ohne " + "vorherige Zustimmung des BMBF nicht vervielfältigt, nicht verbreitet und nicht " + "ausgestellt werden." + ) + # see: https://www.bne-portal.de/bne/de/services/impressum/impressum_node.html + license_itemloader.add_value("description", license_description) + + permission_itemloader: PermissionItemLoader = self.getPermissions(response=response) + + valuespace_itemloader: ValuespaceItemLoader = ValuespaceItemLoader() + valuespace_itemloader.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) + if disciplines: + discipline_list: list[str] = list(disciplines) + valuespace_itemloader.add_value("discipline", discipline_list) + if educational_context_set: + edu_context_list: list[str] = list(educational_context_set) + valuespace_itemloader.add_value("educationalContext", edu_context_list) + if new_lrt_set: + new_lrt_list: list[str] = list(new_lrt_set) + valuespace_itemloader.add_value("new_lrt", new_lrt_list) + if price: + valuespace_itemloader.add_value("price", price) + + response_itemloader: ResponseItemLoader = ResponseItemLoader() + response_itemloader.add_value("headers", response.headers) + response_itemloader.add_value("status", response.status) + response_itemloader.add_value("text", trafilatura_text) + response_itemloader.add_value("url", response.url) + + lom_base_itemloader.add_value("classification", lom_classification_itemloader.load_item()) + lom_base_itemloader.add_value("educational", lom_educational_itemloader.load_item()) + lom_base_itemloader.add_value("general", lom_general_itemloader.load_item()) + lom_base_itemloader.add_value("technical", lom_technical_itemloader.load_item()) + + base_itemloader.add_value("lom", lom_base_itemloader.load_item()) + base_itemloader.add_value("license", license_itemloader.load_item()) + base_itemloader.add_value("valuespaces", valuespace_itemloader.load_item()) + base_itemloader.add_value("permissions", permission_itemloader.load_item()) + base_itemloader.add_value("response", response_itemloader.load_item()) + yield base_itemloader.load_item() From 03186e507feb93605f1082e8b8c7d4a6cfd4198e Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Fri, 2 Aug 2024 13:42:29 +0200 Subject: [PATCH 524/590] DMED-119 - update import paths in brb_sportinhalte files --- schulcloud/brb_sportinhalte/convert_to_collection.py | 2 +- schulcloud/brb_sportinhalte/modify_sportinhalt.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/schulcloud/brb_sportinhalte/convert_to_collection.py b/schulcloud/brb_sportinhalte/convert_to_collection.py index c93c12c2..80960e07 100644 --- a/schulcloud/brb_sportinhalte/convert_to_collection.py +++ b/schulcloud/brb_sportinhalte/convert_to_collection.py @@ -1,6 +1,6 @@ from tqdm import tqdm -from schulcloud.upload_sportinhalt_brandenburg.sportinhalt_utils import get_api +from schulcloud.brb_sportinhalte.sportinhalt_utils import get_api def identify_collections_in_directory(directory_node_id: str): diff --git a/schulcloud/brb_sportinhalte/modify_sportinhalt.py b/schulcloud/brb_sportinhalte/modify_sportinhalt.py index fe9cad8f..b1a4a840 100644 --- a/schulcloud/brb_sportinhalte/modify_sportinhalt.py +++ b/schulcloud/brb_sportinhalte/modify_sportinhalt.py @@ -4,10 +4,10 @@ from tqdm import tqdm from converter.es_connector import EduSharing -from schulcloud.upload_sportinhalt_brandenburg.convert_to_collection import \ +from schulcloud.brb_sportinhalte.convert_to_collection import \ identify_collections_in_directory, get_directory_elements, convert_to_collection -from schulcloud.upload_sportinhalt_brandenburg.sportinhalt_utils import get_configuration, get_api +from schulcloud.brb_sportinhalte.sportinhalt_utils import get_configuration, get_api def add_attributes_to_nodes(node): From f1cc630e71fc10b201277071c69c84c7de9188b9 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Fri, 2 Aug 2024 13:43:40 +0200 Subject: [PATCH 525/590] DMED-119 - fix issues in spiders to be able to use `scapy crawl ...` --- converter/spiders/merlin_spider.py | 4 ++++ converter/spiders/oeh_spider.py | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 123928e4..56e03caf 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -1,6 +1,7 @@ import xmltodict as xmltodict from lxml import etree from scrapy.spiders import CrawlSpider +import scrapy as scrapy from converter.constants import Constants from converter.items import * @@ -81,6 +82,9 @@ async def parse(self, response: scrapy.http.Response): # In case JSON string representation is preferred: # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) copyResponse._set_body(element_xml_str) + + except Exception: + pass # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. await LomBase.parse(self, copyResponse) diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index d6f5da41..d7443efa 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -2,7 +2,7 @@ import converter.env as env from .base_classes import EduSharingBase -from ..items import LomLifecycleItemloader +from ..items import LomLifecycleItemloader, LomAnnotationItemLoader class OEHSpider(EduSharingBase): @@ -45,7 +45,7 @@ def getLOMGeneral(self, response): return general def getLOMAnnotation(self, response=None) -> LomAnnotationItemLoader: - annotation = LomBase.getLOMAnnotation(self, response) + annotation = EduSharingBase.getLOMAnnotation(self, response) # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. annotation.add_value("entity", "crawler") @@ -110,7 +110,7 @@ def shouldImport(self, response=None): return True def getPermissions(self, response): - permissions = LomBase.getPermissions(self, response) + permissions = EduSharingBase.getPermissions(self, response) permissions.replace_value("public", False) permissions.add_value("autoCreateGroups", True) From f9cfd9db4fb303cf238621e26a428a733cb8a1c9 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Fri, 2 Aug 2024 13:45:03 +0200 Subject: [PATCH 526/590] DMED-119 - apply nest_asyncio to improve asyncio compatibility as well as improve initialization of `oeh_importer` --- run.py | 2 ++ schulcloud/oeh_importer.py | 23 ++++++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/run.py b/run.py index 95d6defe..0d51ff92 100644 --- a/run.py +++ b/run.py @@ -13,6 +13,8 @@ from schulcloud.permission_updater import PermissionUpdater from schulcloud.oeh_importer import OehImporter +import nest_asyncio +nest_asyncio.apply() needed_env_vars = [ 'CRAWLER', diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py index 266fa4fa..8a87f531 100644 --- a/schulcloud/oeh_importer.py +++ b/schulcloud/oeh_importer.py @@ -7,23 +7,27 @@ import requests import scrapy as scrapy -import vobject +from scrapy.crawler import Crawler from scrapy.exceptions import DropItem +from scrapy.spiders import Spider +from scrapy.utils.project import get_project_settings +import vobject from edu_sharing_client.rest import ApiException from converter.items import LomAnnotationItemLoader -from converter.spiders.base_classes.lom_base import LomAgeRangeItemLoader -from converter.spiders.base_classes import LomBase +from converter.spiders.base_classes.lom_base import LomBase, LomAgeRangeItemLoader from converter.es_connector import EduSharingConstants from converter.pipelines import EduSharingCheckPipeline, FilterSparsePipeline, LOMFillupPipeline, NormLicensePipeline,\ ConvertTimePipeline, ProcessValuespacePipeline, ProcessThumbnailPipeline, EduSharingStorePipeline, BasicPipeline from schulcloud.edusharing import EdusharingAPI, RequestTimeoutException +import nest_asyncio +nest_asyncio.apply() -class OehImporter(LomBase): - name = "oeh_spider" +class OehImporter(LomBase, Spider): + name = "oeh_importer" friendlyName = "Open Edu Hub" API_URL = 'https://redaktion.openeduhub.net/edu-sharing/' MDS_ID = 'mds_oeh' @@ -53,6 +57,15 @@ def __init__(self, **kwargs): self.fake_request = scrapy.http.Request(self.API_URL) self.fake_response = scrapy.http.Response(self.API_URL, request=self.fake_request) + self.crawler = Crawler(OehImporter) + self.crawler._apply_settings() + self.crawler.engine = self.crawler._create_engine() + start_requests = iter(self.start_requests()) + self.crawler.engine.open_spider(self, start_requests) + self.crawler.engine.start() + + asyncio.run(self.run()) + async def run(self): i = 0 while True: From 58f282c2fc8f55d86157718c7e5bc5617e291260 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:47:00 +0200 Subject: [PATCH 527/590] bne_portal_spider v0.0.2 (squashed) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - feat: enable crawling of "Publikationen" within the BNE "Infothek" - publications in this section of the website receive the "new_lrt"-value "Artikel und Einzelpublikation" according to their URL path - feat: detect "BNE-Gütesiegel" (-> saved to keywords) - docs: additional explanations w.r.t. why crawler settings are chosen, why we need to disobey the robots.txt and spoof a user-agent - change: add mapping suggestion from Jan ("Stundenentwurf") - change: set discipline to sustainability ("Nachhaltigkeit") for all materials --- converter/spiders/bne_portal_spider.py | 84 ++++++++++++++++++-------- 1 file changed, 59 insertions(+), 25 deletions(-) diff --git a/converter/spiders/bne_portal_spider.py b/converter/spiders/bne_portal_spider.py index b1dd2ef8..44d2c3d9 100644 --- a/converter/spiders/bne_portal_spider.py +++ b/converter/spiders/bne_portal_spider.py @@ -25,29 +25,39 @@ class BnePortalSpider(scrapy.Spider, LomBase): + """ + Crawler for "Lernmaterialien" from BNE-Portal.de. ("Infothek" -> "Lernmaterialien": + https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html?nn=140004) + """ + name = "bne_portal_spider" friendlyName = "BNE-Portal" - version = "0.0.1" + version = "0.0.2" # last update: 2024-08-02 custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5, + # "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5, "AUTOTHROTTLE_MAX_DELAY": 120, "WEB_TOOLS": WebEngine.Playwright, "ROBOTSTXT_OBEY": False, # "COOKIES_DEBUG": True, "USER_AGENT": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0", } + # NOTICE: Custom Settings + # We need to disobey the robots.txt in this case because the crawler would not be able to parse the learning + # materials in a timely manner (and requests to the required search form would be disallowed). + # We also NEED to spoof the User Agent, because Scrapy Requests without a User Agent always return a "403"-Response. + # Spider-specific Mappings: # to see a list of possible strings that need to be mapped, check the drop-down menus at: # https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html?nn=140004 FORMAT_TO_NEW_LRT: dict = { - # "Arbeitsblatt": "36e68792-6159-481d-a97b-2c00901f4f78", # Arbeitsblatt "Artikel": "b98c0c8c-5696-4537-82fa-dded7236081e", # Artikel und Einzelpublikation "Broschüre/ Buch/ Zeitschrift": "0cef3ce9-e106-47ae-836a-48f9ed04384e", # Dokumente und textbasierte Inhalte "Datenbank/ Materialsammlung": "04693b11-8b39-42aa-964f-578be063a851", # Kollektion, Sammlung oder Kanal "Datenbank-Webseite": "d8c3ef03-b3ab-4a5e-bcc9-5a546fefa2e9", # Webseite - "Digitale Lehr-/ Lerneinheit": "588efe4f-976f-48eb-84aa-8bcb45679f85", # Lehr- und Lernmaterial + "Digitale Lehr-/ Lerneinheit": ["588efe4f-976f-48eb-84aa-8bcb45679f85", "0d23ff13-9d92-4944-92fa-2b5fe1dde80b"], + # Lehr- und Lernmaterial; Stundenentwurf "Film": "7a6e9608-2554-4981-95dc-47ab9ba924de", # Video (Material) "Poster": "c382a478-74e0-42f1-96dd-fcfb5c27f746", # Poster und Plakat "Spiel": "b0495f44-b05d-4bde-9dc5-34d7b5234d76", # Lernspiel @@ -55,10 +65,8 @@ class BnePortalSpider(scrapy.Spider, LomBase): } BILDUNGSBEREICH_TO_EDUCATIONAL_CONTEXT: dict = { - # "Berufliche Bildung": "berufliche_bildung", # "bildungsbereichübergreifend": "", # will be added to keywords due to impossible mapping "Frühkindliche Bildung": "elementarbereich", - # "Hochschule": "hochschule", # "non-formale/ informelle Bildung": "", # will be added to keywords due to impossible mapping "Primarbereich": "grundschule", "Sekundarbereich I": "sekundarstufe_1", @@ -69,19 +77,24 @@ class BnePortalSpider(scrapy.Spider, LomBase): "Ernährung": "04006", # Ernährung und Hauswirtschaft "Gesellschaftslehre": "48005", # Gesellschaftskunde "Interkulturelles Lernen": "340", # Interkulturelle Bildung - "Mobilität und Verkehr": "660", # Verkehrserziehung # ToDo confirm mapping } def start_requests(self) -> Iterable[Request]: - # ToDo: - # - document why we ignore the robots.txt - # - document why we need to spoof the User Agent - start_url: str = "https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html" - yield scrapy.Request( - url=start_url, - # headers=headers_firefox, - callback=self.gather_urls_from_first_page_of_search_results, + start_url_lernmaterialien: str = ( + "https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html" + ) + start_url_publikationen: str = ( + "https://www.bne-portal.de/SiteGlobals/Forms/bne/publikationen/suche_formular.html?nn=139986" ) + start_urls: list[str] = [ + start_url_lernmaterialien, + start_url_publikationen + ] + for start_url in start_urls: + yield scrapy.Request( + url=start_url, + callback=self.gather_urls_from_first_page_of_search_results, + ) def gather_urls_from_first_page_of_search_results(self, response: scrapy.http.HtmlResponse, **kwargs): # https://www.bne-portal.de/SiteGlobals/Forms/bne/lernmaterialien/suche_formular.html @@ -113,11 +126,8 @@ def gather_urls_from_first_page_of_search_results(self, response: scrapy.http.Ht f"{overview_relative_url_without_page_param}" f"D{overview_page_nr}#searchResults" ) overview_absolute_url: str = response.urljoin(overview_relative_url) - # ToDo: set priority higher than individual material requests yield scrapy.Request( - url=overview_absolute_url, - callback=self.yield_request_for_each_search_result, - # priority=1 + url=overview_absolute_url, callback=self.yield_request_for_each_search_result, priority=1 ) # the first page should contain 15 search results that need to be yielded to the parse()-method yield from self.yield_request_for_each_search_result(response) @@ -130,8 +140,7 @@ def yield_request_for_each_search_result(self, response: scrapy.http.HtmlRespons self.logger.debug(f"Detected {len(search_result_relative_urls)} search results on {response.url}") for url_item in search_result_relative_urls: search_result_absolute_url: str = response.urljoin(url_item) - # ToDo: remove priority setting of this callback after debugging - yield scrapy.Request(url=search_result_absolute_url, callback=self.parse, priority=2) + yield scrapy.Request(url=search_result_absolute_url, callback=self.parse) @staticmethod def clean_up_and_split_list_of_strings(raw_list_of_strings: list[str]): @@ -180,10 +189,16 @@ def getHash(self, response=None) -> str: def parse(self, response=None, **kwargs): trafilatura_text: str | None = trafilatura.extract(response.body) + if self.shouldImport(response) is False: + self.logger.debug(f"Skipping entry {self.getId(response)} because shouldImport() returned False") + return None + if self.getId(response) is not None and self.getHash(response) is not None: + if not self.hasChanged(response): + return None + # Metadata (Header) title: str | None = response.xpath("//meta[@name='title']/@content").get() og_title: str = response.xpath("//meta[@property='og:title']/@content").get() - description: str | None = response.xpath("//meta[@name='description']/@content").get() og_description: str | None = response.xpath("//meta[@property='og:description']/@content").get() @@ -200,13 +215,17 @@ def parse(self, response=None, **kwargs): # og_type: str | None = response.xpath("//meta[@property='og:type']/@content").get() og_image: str | None = response.xpath("//meta[@property='og:image']/@content").get() + # attention: a big amount of (older) learning materials do not have a thumbnail! + # ToDo: the pipeline-fallback to a website screenshot for these URLs mostly just shows a cookie-banner # og_image_type: str | None = response.xpath("//meta[@property='og:image:type']/@content").get() og_locale: str | None = response.xpath("//meta[@property='og:locale']/@content").get() og_url: str | None = response.xpath("//meta[@property='og:url']/@content").get() # metadata (DOM) - # - optional: "Mehr Informationen" -> URL to source website + # - optional: "Mehr Informationen" should contain the URL to source website bne_format_raw: list[str] | None = response.xpath("//strong[contains(text(),'Format')]/../text()").getall() + # this XPath Expression looks for the bold text "Format", which shares the same

-element as the desired + # strings. Therefore, we select its parent with ".." and grab all text strings for further metadata extraction. bne_thema_raw: list[str] | None = response.xpath("//strong[contains(text(),'Thema')]/../text()").getall() bne_bildungsbereich_raw: list[str] | None = response.xpath( "//strong[contains(text(),'Bildungsbereich')]/../text()" @@ -218,18 +237,32 @@ def parse(self, response=None, **kwargs): bne_bildungsbereich_clean: list[str] | None = self.clean_up_and_split_list_of_strings(bne_bildungsbereich_raw) bne_kosten_clean: list[str] | None = self.clean_up_and_split_list_of_strings(bne_kosten_raw) + bne_guetesiegel: list | None = response.xpath("//div[@class='c-pub-teaser__stamp']").getall() + # the "BNE Gütesiegel" is displayed at the bottom right of curated materials. + # (as of 2024-08-02 there are 6 of those materials in total) + # If this

-container shows up, we'll add the "erfüllt BNE-Gütekriterien" keyword to our list. + if bne_guetesiegel: + keyword_set.add("erfüllt BNE-Gütekriterien") + new_lrt_set: set[str] = set() if bne_format_clean and isinstance(bne_format_clean, list): for format_str in bne_format_clean: if format_str in self.FORMAT_TO_NEW_LRT: # manually mapping BNE "Format"-strings (e.g. 'Film', 'Poster') to our new_lrt Vocab for those cases # where the raw strings would not match with our vocab - new_lrt_mapped: str = self.FORMAT_TO_NEW_LRT.get(format_str) - new_lrt_set.add(new_lrt_mapped) + new_lrt_mapped: list[str] | str = self.FORMAT_TO_NEW_LRT.get(format_str) + if isinstance(new_lrt_mapped, str): + new_lrt_set.add(new_lrt_mapped) + if isinstance(new_lrt_mapped, list): + for mapped_value in new_lrt_mapped: + new_lrt_set.add(mapped_value) elif format_str: # this case typically happens for perfect matches (e.g. "Arbeitsblatt") or uncovered edge-cases new_lrt_set.add(format_str) + if "/Publikationen/" in response.url: + new_lrt_set.add("b98c0c8c-5696-4537-82fa-dded7236081e") # Artikel und Einzelpublikation + disciplines: set[str] = set() if bne_thema_clean and isinstance(bne_thema_clean, list): for thema_str in bne_thema_clean: @@ -358,6 +391,7 @@ def parse(self, response=None, **kwargs): valuespace_itemloader: ValuespaceItemLoader = ValuespaceItemLoader() valuespace_itemloader.add_value("new_lrt", Constants.NEW_LRT_MATERIAL) + disciplines.add("64018") # Nachhaltigkeit is hard-coded for all BNE materials if disciplines: discipline_list: list[str] = list(disciplines) valuespace_itemloader.add_value("discipline", discipline_list) From 5927a2ec4df5ec359e092e7654298dae475749bd Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 6 Aug 2024 11:02:12 +0200 Subject: [PATCH 528/590] add pyCharm run configuration --- .run/bne_portal_spider.run.xml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .run/bne_portal_spider.run.xml diff --git a/.run/bne_portal_spider.run.xml b/.run/bne_portal_spider.run.xml new file mode 100644 index 00000000..7b1289da --- /dev/null +++ b/.run/bne_portal_spider.run.xml @@ -0,0 +1,26 @@ + + + + + + \ No newline at end of file From b8ecb8bb2330282b30c2b22d4a5a7db1baa9b84c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 6 Aug 2024 20:11:56 +0200 Subject: [PATCH 529/590] change: allow higher image resolutions during image-to-thumbnail-conversion Why this change was necessary: - some thumbnails from BNE-Portal are quite huge (e.g. a poster with 17.717 x 17.717 pixels -> 313.9 megapixels) and caused the thumbnail pipeline to throw "Image.DecompressionBombError"s, therefore causing the whole item to be dropped - (this also happened for some OERSI items where the images were way larger than expected) - implemented a slightly more graceful handling of these edge-cases by doubling the megapixel limit (from default: 89,5 MP to 179MP) - if downloaded images are bigger than 179 MP and smaller than 357,9 MP, the pipeline will throw a DecompressionBombWarning, but thumbnail conversion will complete nonetheless - if downloaded images exceed twice the megapixel limit (when the image is bigger than 357,9 MP), the pipeline will catch the "Image.DecompressionBombError", delete the thumbnail and try to take a website screenshot instead - docs: increased readability of some DosStrings and comments --- converter/pipelines.py | 51 +++++++++++++++++++++++++++++------------- 1 file changed, 35 insertions(+), 16 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 58e6a212..a4633f59 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -589,6 +589,10 @@ class ProcessThumbnailPipeline(BasicPipeline): """ generate thumbnails """ + pixel_limit: int = 178956970 # ~179 Megapixel + pixel_limit_in_mp: float = pixel_limit / 1000000 + Image.MAX_IMAGE_PIXELS = pixel_limit # doubles the Pillow default (89,478,485) → from 89,5 MegaPixels to 179 MP + # see: https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.MAX_IMAGE_PIXELS @staticmethod def scale_image(img, max_size): @@ -603,14 +607,15 @@ async def process_item(self, raw_item, spider): """ By default, the thumbnail-pipeline handles several cases: - if there is a URL-string inside the "BaseItem.thumbnail"-field: - -- download image from URL; rescale it into different sizes (small/large); - --- save the thumbnails as base64 within - ---- "BaseItem.thumbnail.small", "BaseItem.thumbnail.large" - --- (afterward delete the URL from "BaseItem.thumbnail") + - download image from URL; rescale it into different sizes (small/large); + - save the thumbnails as base64 within + - "BaseItem.thumbnail.small" + - "BaseItem.thumbnail.large" + - (afterward delete the URL from "BaseItem.thumbnail") - if there is NO "BaseItem.thumbnail"-field: - -- default: take a screenshot of the URL from "technical.location" with Splash, rescale and save (as above) - -- alternatively, on-demand: use Playwright to take a screenshot, rescale and save (as above) + - default: take a screenshot of the URL from "technical.location" (with Splash), rescale and save (as above) + - alternatively, on-demand: use Playwright to take a screenshot, rescale and save (as above) """ item = ItemAdapter(raw_item) response: scrapy.http.Response | None = None @@ -653,9 +658,9 @@ async def process_item(self, raw_item, spider): log.debug(f"Loading thumbnail from {url} took {time_end - time_start} (incl. awaiting).") log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") if thumbnail_response.status != 200: - log.debug(f"Thumbnail-Pipeline received a unexpected response (status: {thumbnail_response.status}) " + log.debug(f"Thumbnail-Pipeline received an unexpected response (status: {thumbnail_response.status}) " f"from {url} (-> resolved URL: {thumbnail_response.url}") - # fall back to website screenshot + # falling back to website screenshot: del item["thumbnail"] return await self.process_item(raw_item, spider) else: @@ -777,15 +782,29 @@ async def process_item(self, raw_item, spider): response.body ).decode() else: - img = Image.open(BytesIO(response.body)) - self.create_thumbnails_from_image_bytes(img, item, settings_crawler) - except PIL.UnidentifiedImageError: - # this error can be observed when a website serves broken / malformed images - if url: - log.warning(f"Thumbnail download of image file {url} failed: image file could not be identified " + try: + img = Image.open(BytesIO(response.body)) + self.create_thumbnails_from_image_bytes(img, item, settings_crawler) + except PIL.UnidentifiedImageError: + # this error can be observed when a website serves broken / malformed images + if url: + log.warning( + f"Thumbnail download of image file {url} failed: image file could not be identified " f"(Image might be broken or corrupt). Falling back to website-screenshot.") - del item["thumbnail"] - return await self.process_item(raw_item, spider) + del item["thumbnail"] + return await self.process_item(raw_item, spider) + except Image.DecompressionBombError: + # Pillow throws a "DecompressionBombError" if the downloaded image exceeds twice the + # "Image.MAX_IMAGE_PIXELS"-setting. + # If such an error is thrown, the image object won't be available. + # Therefore, we need to fall back to a website screenshot. + absolute_pixel_limit_in_mp = (self.pixel_limit * 2) / 1000000 + log.warning(f"Thumbnail download of {url} triggered a 'PIL.Image.DecompressionBombError'! " + f"The image either exceeds the max size of {absolute_pixel_limit_in_mp} " + f"megapixels or might have been a DoS attempt. " + f"Falling back to website screenshot...") + del item["thumbnail"] + return await self.process_item(raw_item, spider) except Exception as e: if url is not None: log.warning(f"Could not read thumbnail at {url}: {str(e)} (falling back to screenshot)") From 88fa333c57fa8a90244e34df4e15ab1475ce1122 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:07:26 +0200 Subject: [PATCH 530/590] feat: enable spiders to have more control over playwright HTTP requests (adblocking and cookies) Cookie Data: - by using a spider's "custom_settings"-attribute, you can pass (custom-tailored) cookies to the headless browser: - "PLAYWRIGHT_COOKIES" expects a list[dict], where each dict should contain a "name" and "value" - (the necessary "url"-attribute will be automatically added by the pipeline) Adblocking: - by using a spider's "custom_setting"-attribute, you can enable adblocking for specific HTTP Requests via the headless browser: - by setting "PLAYWRIGHT_ADBLOCKER" to True (type: boolean), the built-in adblocker of the "browserless" chromium container (uBlock origin) can be turned on - (adblocking is disabled by default) --- converter/pipelines.py | 15 +++++++++- converter/web_tools.py | 68 +++++++++++++++++++++++++++++++++++++----- 2 files changed, 74 insertions(+), 9 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index a4633f59..5607a6fd 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -745,8 +745,21 @@ async def process_item(self, raw_item, spider): # this edge-case is necessary for spiders that only need playwright to gather a screenshot, # but don't use playwright within the spider itself target_url: str = item["lom"]["technical"]["location"][0] + + playwright_cookies = None + playwright_adblock_enabled = False + if spider.custom_settings: + # some spiders might require setting specific cookies to take "clean" website screenshots + # (= without cookie banners or ads). + if "PLAYWRIGHT_COOKIES" in spider.custom_settings: + playwright_cookies = spider.custom_settings.get("PLAYWRIGHT_COOKIES") + if "PLAYWRIGHT_ADBLOCKER" in spider.custom_settings: + playwright_adblock_enabled: bool = spider.custom_settings["PLAYWRIGHT_ADBLOCKER"] + playwright_dict = await WebTools.getUrlData(url=target_url, - engine=WebEngine.Playwright) + engine=WebEngine.Playwright, + cookies=playwright_cookies, + adblock=playwright_adblock_enabled) screenshot_bytes = playwright_dict.get("screenshot_bytes") img = Image.open(BytesIO(screenshot_bytes)) self.create_thumbnails_from_image_bytes(img, item, settings_crawler) diff --git a/converter/web_tools.py b/converter/web_tools.py index b23cf714..9aa753db 100644 --- a/converter/web_tools.py +++ b/converter/web_tools.py @@ -77,6 +77,8 @@ class WebTools: # reminder: if you increase this Semaphore value, you NEED to change the "browserless v2"-docker-container # configuration accordingly! (e.g., by increasing the MAX_CONCURRENT_SESSIONS and MAX_QUEUE_LENGTH configuration # settings, see: https://www.browserless.io/docs/docker) + _playwright_cookies: list[dict] = list() + _playwright_adblocker: bool = False @classmethod async def __safely_get_splash_response(cls, url: str): @@ -128,7 +130,19 @@ def url_cant_be_rendered_by_headless_browsers(cls, url: str) -> bool: return False @classmethod - async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Playwright): + async def getUrlData(cls, url: str, + engine: WebEngine = WebEngine.Playwright, + adblock: bool = None, + cookies: list[dict] = None): + """ + Sends an HTTP request through one of the (dockerized) headless browsers for JavaScript-enabled HTML rendering. + @param url: the to-be-rendered URL + @param engine: the WebEngine of choice (either "Splash" or "Playwright") + @param adblock: (playwright only!) block ads for this HTTP Request (via uBlock Origin) if set to True + @param cookies: (playwright only!) a list of cookies (type: list[dict]) that shall be transmitted during the + HTTP request + @return: + """ url_contains_problematic_file_extension: bool = cls.url_cant_be_rendered_by_headless_browsers(url=url) if url_contains_problematic_file_extension: # most binary files cannot be rendered by Playwright or Splash and would cause unexpected behavior in the @@ -140,7 +154,12 @@ async def getUrlData(cls, url: str, engine: WebEngine = WebEngine.Playwright): f"Skipping WebTools rendering for this url..." ) return - + if cookies: + # sets the spider-specific cookies for Playwright requests (e.g., to skip a cookie banner) + cls._playwright_cookies = cookies + if adblock: + # controls if the built-in adblocker of "browserless" should be enabled for Playwright + cls._playwright_adblocker = adblock if engine == WebEngine.Splash: return await cls.__safely_get_splash_response(url) elif engine == WebEngine.Playwright: @@ -199,16 +218,49 @@ async def __getUrlDataSplash(url: str): else: return {"html": None, "text": None, "cookies": None, "har": None} - @staticmethod - async def fetchDataPlaywright(url: str): + @classmethod + async def fetchDataPlaywright(cls, url: str): # relevant docs for this implementation: https://hub.docker.com/r/browserless/chrome#playwright and # https://playwright.dev/python/docs/api/class-browsertype#browser-type-connect-over-cdp async with async_playwright() as p: - browser = await p.chromium.connect_over_cdp(endpoint_url=env.get("PLAYWRIGHT_WS_ENDPOINT")) - page = await browser.new_page() + ws_cdp_endpoint = env.get("PLAYWRIGHT_WS_ENDPOINT") + if cls._playwright_adblocker: + # advertisements pollute the HTML body and obstruct website screenshots, which is why we try to block + # them from rendering via the built-in adblocker (uBlock Origin) of the browserless docker image. + # see: https://docs.browserless.io/chrome-flags/#blocking-ads + ws_cdp_endpoint = f"{ws_cdp_endpoint}/?blockAds=true" + browser = await p.chromium.connect_over_cdp(endpoint_url=ws_cdp_endpoint) + browser_context = await browser.new_context() + if cls._playwright_cookies: + # Some websites may require setting specific cookies to render properly + # (e.g., to skip or close annoying cookie banners). + # Playwright supports passing cookies to requests + # see: https://playwright.dev/python/docs/api/class-browsercontext#browser-context-add-cookies + log.debug(f"Preparing cookies for Playwright HTTP request...") + prepared_cookies: list[dict] = list() + for cookie_object in cls._playwright_cookies: + if isinstance(cookie_object, dict): + # Playwright expects a list[dict]! + # Each cookie must have the following (REQUIRED) properties: + # "name" (type: str), "value" (type: str) and "url" (type: str) + if "name" in cookie_object and "value" in cookie_object: + cookie = { + "name": cookie_object["name"], + "value": cookie_object["value"], + "url": url + } + prepared_cookies.append(cookie) + else: + log.warning(f"Cannot set custom cookie for Playwright (headless browser) request due to " + f"missing properties: 'name' and 'value' are REQUIRED attributes " + f"within a cookie object (type: dict)! " + f"Discarding cookie: {cookie_object}") + if prepared_cookies and isinstance(prepared_cookies, list): + await browser_context.add_cookies(cookies=prepared_cookies) + page = await browser_context.new_page() await page.goto(url, wait_until="load", timeout=90000) - # waits for a website to fire the DOMContentLoaded event or for a timeout of 90s - # since waiting for 'networkidle' seems to cause timeouts + # waits for a website to fire the "load" event or for a timeout of 90 seconds + # (since waiting for "networkidle" seems to cause timeouts) content = await page.content() screenshot_bytes = await page.screenshot() # ToDo: HAR / cookies From 310461052dd0c42d4922d5a543ca221c62401d80 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:11:37 +0200 Subject: [PATCH 531/590] bne_portal_spider v0.0.3 (skip cookie banner during screenshot fallback) - feat: by spoofing two cookie attributes, we can skip the rendering of the (obtrusive) cookie banner whenever BNE-Portal does not provide a thumbnail URL for an item - one cookie attribute ("gsbbanner") can be hard-coded - while the other necessary cookie ("AL_SESS-S") needs to be parsed from the first HTTP Response --- converter/spiders/bne_portal_spider.py | 69 +++++++++++++++++++++++--- 1 file changed, 63 insertions(+), 6 deletions(-) diff --git a/converter/spiders/bne_portal_spider.py b/converter/spiders/bne_portal_spider.py index 44d2c3d9..3d7c1a3d 100644 --- a/converter/spiders/bne_portal_spider.py +++ b/converter/spiders/bne_portal_spider.py @@ -32,7 +32,18 @@ class BnePortalSpider(scrapy.Spider, LomBase): name = "bne_portal_spider" friendlyName = "BNE-Portal" - version = "0.0.2" # last update: 2024-08-02 + version = "0.0.3" # last update: 2024-08-08 + playwright_cookies: list[dict] = [ + { + "name": "gsbbanner", + "value": "closed" + } + ] + current_session_cookie: dict = dict() + # By using two cookie attributes ("gsbbanner" and "AL_SESS-S"), we enable playwright to skip the rendering of + # cookie banners while taking website screenshots (e.g., when no thumbnail was provided by BNE). + # While we can hard-code the "gsbbanner"-cookie, + # we need to dynamically parse the current session cookie ("AL_SESS-S") from the first HTTP response. custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, @@ -41,6 +52,8 @@ class BnePortalSpider(scrapy.Spider, LomBase): "WEB_TOOLS": WebEngine.Playwright, "ROBOTSTXT_OBEY": False, # "COOKIES_DEBUG": True, + "PLAYWRIGHT_COOKIES": playwright_cookies, + "PLAYWRIGHT_ADBLOCKER": True, "USER_AGENT": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0", } # NOTICE: Custom Settings @@ -86,10 +99,7 @@ def start_requests(self) -> Iterable[Request]: start_url_publikationen: str = ( "https://www.bne-portal.de/SiteGlobals/Forms/bne/publikationen/suche_formular.html?nn=139986" ) - start_urls: list[str] = [ - start_url_lernmaterialien, - start_url_publikationen - ] + start_urls: list[str] = [start_url_lernmaterialien, start_url_publikationen] for start_url in start_urls: yield scrapy.Request( url=start_url, @@ -129,9 +139,57 @@ def gather_urls_from_first_page_of_search_results(self, response: scrapy.http.Ht yield scrapy.Request( url=overview_absolute_url, callback=self.yield_request_for_each_search_result, priority=1 ) + if response.headers and not self.current_session_cookie: + # we want to grab the first session cookie exactly ONCE and forward it to playwright, + # because it allows us to skip the rendering of the cookie-banner when falling back to website-screenshots + self.save_session_cookie_to_spider_custom_settings(response) # the first page should contain 15 search results that need to be yielded to the parse()-method yield from self.yield_request_for_each_search_result(response) + def save_session_cookie_to_spider_custom_settings(self, response: scrapy.http.HtmlResponse): + """ + Parses the HTML header for a specific cookie attribute ("AL_SESS-S") and saves the key-value pair to the + spider's custom_settings. + This allows us to skip the rendering of the (obtrusive) cookie banner when a thumbnail was not available, + and our pipeline needs to fall back to taking a screenshot with playwright. + """ + cookies_raw: list[bytes] | None = response.headers.getlist("Set-Cookie") + if cookies_raw: + for cookie_bytes_object in cookies_raw: + if cookie_bytes_object and isinstance(cookie_bytes_object, bytes): + # cookie example: + # b'AL_SESS-S=AVHw!yeRm5uXQNxe3FM!nbN33IjG7EMgCO2CipHfxK1Iv34ESuTGarlWWhYFBecs0e3b; + # Path=/; Secure; HttpOnly; SameSite=Lax' + cookie = cookie_bytes_object.decode("utf-8") + # we need to decode the cookie into a string object first + if "AL_SESS-S" in cookie: + # we want to pass the session cookie to playwright, in order to not rely on hard-coding a + # session cookie (which might become invalid at any future moment). + cookie_attributes: list[str] | None = cookie.split(";") + # first, we need to split the cookie bytes object into the individual cookie attributes + if cookie_attributes: + for cookie_attribute in cookie_attributes: + if "AL_SESS-S=" in cookie_attribute: + # We're only interested in the session cookie. The other attributes don't seem + # to be necessary to skip the cookie-banner + cookie_split = cookie_attribute.split("=") + cookie_name: str = cookie_split[0] + cookie_value: str = cookie_split[1] + session_cookie = {"name": cookie_name, "value": cookie_value} + self.current_session_cookie.update(session_cookie) + if self.current_session_cookie: + self.logger.info( + f"Saving current session cookie to 'PLAYWRIGHT_COOKIES'-custom-settings. " + f"Session cookie: {self.current_session_cookie}" + ) + # we expect the current sesison cookie to look like this: + # { + # "name": "AL_SESS-S", + # "value": "AUiJVd4i8sXlsq6ZEHfjlvfvCBhnub4_TNyxnydq1cpcuRk8EvO5ryyD9643seQ742AB", + # }, + self.playwright_cookies.append(self.current_session_cookie) + self.custom_settings.update({"PLAYWRIGHT_COOKIES": self.playwright_cookies}) + def yield_request_for_each_search_result(self, response: scrapy.http.HtmlResponse): search_result_relative_urls: list[str] = response.xpath( "//div[@class='c-pub-teaser__text-wrapper']/h2/a[@class='c-pub-teaser__title-link']/@href" @@ -216,7 +274,6 @@ def parse(self, response=None, **kwargs): # og_type: str | None = response.xpath("//meta[@property='og:type']/@content").get() og_image: str | None = response.xpath("//meta[@property='og:image']/@content").get() # attention: a big amount of (older) learning materials do not have a thumbnail! - # ToDo: the pipeline-fallback to a website screenshot for these URLs mostly just shows a cookie-banner # og_image_type: str | None = response.xpath("//meta[@property='og:image:type']/@content").get() og_locale: str | None = response.xpath("//meta[@property='og:locale']/@content").get() og_url: str | None = response.xpath("//meta[@property='og:url']/@content").get() From 74f9d1aab160738dc90aff33b8ea7757df0a3f6d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 13 Aug 2024 17:38:36 +0200 Subject: [PATCH 532/590] fix: forgot to convert 'discipline'-set to list --- converter/spiders/science_in_school_spider.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index a4417640..f79158da 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -333,7 +333,9 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade base.add_value("lom", lom.load_item()) vs = ValuespaceItemLoader() - vs.add_value("discipline", disciplines) + if disciplines: + discipline_list: list[str] = list(disciplines) + vs.add_value("discipline", discipline_list) vs.add_value("intendedEndUserRole", "teacher") vs.add_value("dataProtectionConformity", "generalDataProtectionRegulation") # see: https://www.embl.de/aboutus/privacy_policy/ From 7a0870af65f603299a7326836ff60630704b4886 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:29:05 +0200 Subject: [PATCH 533/590] remove hard-coded value for LOM Technical Format and increase autothrottle concurrency - change: removed the hard-coded LOM Technical Format value "text/html" since this appears to cause problems while rendering the item in edu-sharing - items in edu-sharing would be displayed as a "collection" (with a collection symbol) instead of the learning object (symbol: link) - change: increased the "Autothrottle Target Concurreny"-Setting to 2 - this increases the crawling speed / throughput significantly and the target webserver seems to handle it well --- converter/spiders/bne_portal_spider.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/converter/spiders/bne_portal_spider.py b/converter/spiders/bne_portal_spider.py index 3d7c1a3d..ecba1475 100644 --- a/converter/spiders/bne_portal_spider.py +++ b/converter/spiders/bne_portal_spider.py @@ -47,7 +47,7 @@ class BnePortalSpider(scrapy.Spider, LomBase): custom_settings = { "AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True, - # "AUTOTHROTTLE_TARGET_CONCURRENCY": 0.5, + "AUTOTHROTTLE_TARGET_CONCURRENCY": 2, "AUTOTHROTTLE_MAX_DELAY": 120, "WEB_TOOLS": WebEngine.Playwright, "ROBOTSTXT_OBEY": False, @@ -418,7 +418,6 @@ def parse(self, response=None, **kwargs): lom_technical_itemloader.add_value("location", response.url) else: lom_technical_itemloader.add_value("location", response.url) - lom_technical_itemloader.add_value("format", "text/html") lom_lifecycle_itemloader: LomLifecycleItemloader = LomLifecycleItemloader() lom_lifecycle_itemloader.add_value("role", "publisher") From 110e8f9e979da3843eba28c0efcb970d4bd088d2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:40:14 +0200 Subject: [PATCH 534/590] docs: remove hard-coded LOM Technical Format recommendation for web-sites - change: setting LOM Technical Format to "text/html" appears to cause rendering problems in edu-sharing v8.1, where items would be displayed as "collections" instead of links - debugging the problem with two different crawlers (bne_portal_spider & portal_globales_lernen_spider) showed that learning objects, which have been initially created with the hard-coded "text/html"-value in LOM Technical Format would keep their type, even when a fresh crawl would otherwise overwrite the whole object with the "resetVersion=true"-parameter --- converter/spiders/sample_spider_alternative.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/converter/spiders/sample_spider_alternative.py b/converter/spiders/sample_spider_alternative.py index 14cd649f..751bd1ae 100644 --- a/converter/spiders/sample_spider_alternative.py +++ b/converter/spiders/sample_spider_alternative.py @@ -112,8 +112,10 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade technical = LomTechnicalItemLoader() # TODO: fill "technical"-keys with values for - # - format required (expected: MIME-type, e.g. 'text/html' for web-sites) # - location required (expected: URI / URL of a learning object / material) + # - format optional (expected: MIME-type, e.g. 'text/html' for web-sites. + # Set this value only if absolutely necessary, + # e.g. during imports via oeh_spider) # - size optional # - requirement optional # - installationRemarks optional @@ -123,7 +125,6 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade # technical.add_value('key','value') # or replaced with: # technical.replace_value('key', 'value') - technical.add_value('format', 'text/html') # e.g. if the learning object is a web-page technical.add_value('location', response.url) # if the learning object has a unique URL that's being # navigated by the crawler From 6eeda80fe4b47aae67fb22c83ca36d9df337b8ae Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 15 Aug 2024 15:11:58 +0200 Subject: [PATCH 535/590] remove hard-coded value recommendation for LOM Technical Format from sample_spider - for details: - please check the previous sample_spider_alternative commit for a longer explanation: https://github.com/openeduhub/oeh-search-etl/commit/110e8f9e979da3843eba28c0efcb970d4bd088d2 - see: SDWLO-751 --- converter/spiders/sample_spider.py | 1 - 1 file changed, 1 deletion(-) diff --git a/converter/spiders/sample_spider.py b/converter/spiders/sample_spider.py index 34f852f4..69bf63c3 100644 --- a/converter/spiders/sample_spider.py +++ b/converter/spiders/sample_spider.py @@ -45,7 +45,6 @@ def getLOMGeneral(self, response): def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) technical.add_value("location", response.url) - technical.add_value("format", "text/html") technical.add_value("size", len(response.body)) return technical From fbed55955dca8bf24cb4ceacc66166032822c0c9 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:30:18 +0200 Subject: [PATCH 536/590] serlo_spider v0.3.3 - fix: JSON-LD "audience" metadata extraction - fix: typecast "identifier" value (from int) to str before submitting it to the pipeline - (this change will be necessary with stricter typechecks in the es_connector) - fix: JSON-LD "about" extraction (-> discipline) - the structure of Serlo's "about"-objects changed sometime in the past - the old "prefLabel" implementation can probably removed after more extensive debugging/testing - change: remove hard-coded LOM Technical Format value ("text/html") --- converter/spiders/serlo_spider.py | 71 ++++++++++++++++++++++--------- 1 file changed, 52 insertions(+), 19 deletions(-) diff --git a/converter/spiders/serlo_spider.py b/converter/spiders/serlo_spider.py index bfe47bb3..e8677052 100644 --- a/converter/spiders/serlo_spider.py +++ b/converter/spiders/serlo_spider.py @@ -31,7 +31,7 @@ class SerloSpider(scrapy.Spider, LomBase): # start_urls = ["https://de.serlo.org"] API_URL = "https://api.serlo.org/graphql" # for the API description, please check: https://lenabi.serlo.org/metadata-api - version = "0.3.2" # last update: 2023-10-27 (Serlo API v1.2.0) + version = "0.3.3" # last update: 2023-08-16 (Serlo API v1.2.0) custom_settings = { # Using Playwright because of Splash-issues with thumbnails+text for Serlo "WEB_TOOLS": WebEngine.Playwright @@ -234,9 +234,13 @@ def getId(self, response=None, graphql_json=None) -> str: # "value": "2097" graphql_json: dict = graphql_json try: - identifier_value: str = graphql_json["identifier"]["value"] + identifier_value: int | str | None = graphql_json["identifier"]["value"] if identifier_value: - return identifier_value + if isinstance(identifier_value, int): + identifier_value = str(identifier_value) + return identifier_value + if isinstance(identifier_value, str): + return identifier_value else: return response.url except KeyError: @@ -443,7 +447,6 @@ async def parse(self, response, **kwargs): # # - installationRemarks optional # # - otherPlatformRequirements optional # # - duration optional (only applies to audiovisual content like videos/podcasts) - technical.add_value("format", "text/html") # e.g. if the learning object is a web-page if "id" in graphql_json: graphql_id: str = graphql_json["id"] # e.g.: "https://serlo.org/1495" technical.add_value("location", graphql_id) @@ -501,14 +504,27 @@ async def parse(self, response, **kwargs): # mapping educationalAudienceRole to IntendedEndUserRole here intended_end_user_roles = list() for audience_item in json_ld["audience"]: - edu_audience_role = audience_item["prefLabel"]["en"] - if edu_audience_role == "professional": - vs.add_value("educationalContext", ["Further Education", "vocational education"]) - if edu_audience_role in self.EDU_AUDIENCE_ROLE_MAPPING.keys(): - edu_audience_role = self.EDU_AUDIENCE_ROLE_MAPPING.get(edu_audience_role) - intended_end_user_roles.append(edu_audience_role) - vs.add_value("intendedEndUserRole", intended_end_user_roles) - # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) + # as of 2024-04-23 the 'audience'-object within JSON-LD looks like this: + # "audience": [ + # { + # "id": "http://purl.org/dcx/lrmi-vocabs/educationalAudienceRole/student", + # "audienceType": "student", + # "type": "Audience" + # } + # ], + if "id" in audience_item: + # points towards a vocab URL, e.g. "http://purl.org/dcx/lrmi-vocabs/educationalAudienceRole/student" + pass + if "audienceType" in audience_item: + edu_audience_role = audience_item["audienceType"] + if edu_audience_role == "professional": + vs.add_value("educationalContext", ["Further Education", "vocational education"]) + if edu_audience_role in self.EDU_AUDIENCE_ROLE_MAPPING.keys(): + edu_audience_role = self.EDU_AUDIENCE_ROLE_MAPPING.get(edu_audience_role) + intended_end_user_roles.append(edu_audience_role) + if intended_end_user_roles: + vs.add_value("intendedEndUserRole", intended_end_user_roles) + # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/intendedEndUserRole.ttl) disciplines_set: set = set() if "about" in graphql_json: @@ -543,13 +559,30 @@ async def parse(self, response, **kwargs): # we need to make sure that we only try to access "about" if it's actually available # making sure that we only try to look for a discipline if the "about"-list actually has list items disciplines = list() - for about_item in json_ld["about"]: - if "de" in about_item["prefLabel"]: - discipline_de: str = about_item["prefLabel"]["de"] - disciplines.append(discipline_de) - elif "en" in about_item["prefLabel"]: - discipline_en: str = about_item["prefLabel"]["en"] - disciplines.append(discipline_en) + json_ld_about: list[dict] = json_ld["about"] + for about_item in json_ld_about: + # as of 2024-08-16 the "about"-property in a JSON-LD currently looks like this: + # "about": [ + # { + # "id": "https://serlo.org/5", + # "name": "Mathematik", + # "type": "Thing" + # } + if "id" in about_item: + json_ld_about_id: str = about_item["id"] + pass + if "name" in about_item: + json_ld_about_name: str = about_item["name"] + if json_ld_about_name and isinstance(json_ld_about_name, str): + disciplines.append(json_ld_about_name) + elif "prefLabel" in about_item: + # ToDo: this case should no longer happen as the "about" structure changed + if "de" in about_item["prefLabel"]: + discipline_de: str = about_item["prefLabel"]["de"] + disciplines.append(discipline_de) + elif "en" in about_item["prefLabel"]: + discipline_en: str = about_item["prefLabel"]["en"] + disciplines.append(discipline_en) if len(disciplines) > 0: vs.add_value("discipline", disciplines) # (see: https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl) From 32534e24ff927fec020f18b6eb79a1b8be1a3ca2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Aug 2024 11:35:47 +0200 Subject: [PATCH 537/590] remove "swagger"-generated API client --- edu_sharing_client/__init__.py | 266 - edu_sharing_client/api/__init__.py | 31 - edu_sharing_client/api/about_api.py | 219 - edu_sharing_client/api/admin_v1_api.py | 4267 --------------- edu_sharing_client/api/archive_v1_api.py | 505 -- .../api/authentication_v1_api.py | 389 -- edu_sharing_client/api/bulk_v1_api.py | 270 - edu_sharing_client/api/clientutils_v1_api.py | 122 - edu_sharing_client/api/collection_v1_api.py | 1722 ------ edu_sharing_client/api/comment_v1_api.py | 473 -- edu_sharing_client/api/config_v1_api.py | 587 -- edu_sharing_client/api/connector_v1_api.py | 126 - edu_sharing_client/api/iam_v1_api.py | 2901 ---------- edu_sharing_client/api/mds_v1_api.py | 342 -- edu_sharing_client/api/mediacenter_v1_api.py | 1207 ----- edu_sharing_client/api/network_v1_api.py | 496 -- edu_sharing_client/api/node_v1_api.py | 4749 ----------------- edu_sharing_client/api/organization_v1_api.py | 578 -- edu_sharing_client/api/rating_v1_api.py | 259 - edu_sharing_client/api/register_v1_api.py | 601 --- edu_sharing_client/api/rendering_v1_api.py | 263 - edu_sharing_client/api/search_v1_api.py | 821 --- edu_sharing_client/api/sharing_v1_api.py | 279 - edu_sharing_client/api/statistic_v1_api.py | 507 -- edu_sharing_client/api/stream_v1_api.py | 689 --- edu_sharing_client/api/tool_v1_api.py | 695 --- edu_sharing_client/api/tracking_v1_api.py | 140 - edu_sharing_client/api/usage_v1_api.py | 625 --- edu_sharing_client/api_client.py | 628 --- edu_sharing_client/configuration.py | 244 - edu_sharing_client/models/__init__.py | 235 - edu_sharing_client/models/about.py | 191 - .../models/accumulated_ratings.py | 163 - edu_sharing_client/models/ace.py | 217 - edu_sharing_client/models/acl.py | 139 - edu_sharing_client/models/admin.py | 111 - edu_sharing_client/models/admin_statistics.py | 241 - edu_sharing_client/models/application.py | 371 -- edu_sharing_client/models/audience.py | 111 - edu_sharing_client/models/authority.py | 144 - .../models/authority_entries.py | 139 - edu_sharing_client/models/available_mds.py | 137 - edu_sharing_client/models/banner.py | 163 - edu_sharing_client/models/body.py | 114 - edu_sharing_client/models/body1.py | 114 - edu_sharing_client/models/body10.py | 111 - edu_sharing_client/models/body11.py | 111 - edu_sharing_client/models/body2.py | 114 - edu_sharing_client/models/body3.py | 111 - edu_sharing_client/models/body4.py | 114 - edu_sharing_client/models/body5.py | 111 - edu_sharing_client/models/body6.py | 114 - edu_sharing_client/models/body7.py | 114 - edu_sharing_client/models/body8.py | 114 - edu_sharing_client/models/body9.py | 114 - edu_sharing_client/models/cache_cluster.py | 319 -- edu_sharing_client/models/cache_info.py | 423 -- edu_sharing_client/models/cache_member.py | 111 - edu_sharing_client/models/catalog.py | 137 - edu_sharing_client/models/collection.py | 510 -- .../models/collection_counts.py | 137 - .../models/collection_entries.py | 138 - edu_sharing_client/models/collection_entry.py | 112 - .../models/collection_feedback.py | 163 - .../models/collection_options.py | 149 - .../models/collection_reference.py | 873 --- edu_sharing_client/models/collections.py | 111 - .../models/collections_result.py | 111 - edu_sharing_client/models/column_v2.py | 163 - edu_sharing_client/models/comment.py | 215 - edu_sharing_client/models/comments.py | 111 - edu_sharing_client/models/condition.py | 169 - edu_sharing_client/models/config.py | 163 - edu_sharing_client/models/connector.py | 270 - .../models/connector_file_type.py | 293 - edu_sharing_client/models/connector_list.py | 137 - edu_sharing_client/models/content.py | 163 - .../models/context_menu_entry.py | 475 -- edu_sharing_client/models/counts.py | 111 - edu_sharing_client/models/create.py | 111 - edu_sharing_client/models/delete_option.py | 111 - edu_sharing_client/models/dynamic_config.py | 137 - edu_sharing_client/models/element.py | 163 - edu_sharing_client/models/error_response.py | 192 - edu_sharing_client/models/excel_result.py | 111 - edu_sharing_client/models/facette.py | 139 - edu_sharing_client/models/filter.py | 112 - edu_sharing_client/models/filter_entry.py | 139 - edu_sharing_client/models/frontpage.py | 253 - edu_sharing_client/models/general.py | 163 - edu_sharing_client/models/geo.py | 163 - edu_sharing_client/models/group.py | 248 - edu_sharing_client/models/group_entries.py | 139 - edu_sharing_client/models/group_entry.py | 112 - edu_sharing_client/models/group_profile.py | 189 - edu_sharing_client/models/group_v2.py | 137 - edu_sharing_client/models/guest.py | 111 - .../models/help_menu_options.py | 163 - .../models/home_folder_options.py | 207 - edu_sharing_client/models/icon.py | 111 - edu_sharing_client/models/image.py | 137 - edu_sharing_client/models/interface.py | 253 - edu_sharing_client/models/job_detail.py | 345 -- edu_sharing_client/models/job_info.py | 247 - edu_sharing_client/models/key.py | 189 - edu_sharing_client/models/key_value_pair.py | 137 - edu_sharing_client/models/language.py | 137 - edu_sharing_client/models/level.py | 111 - edu_sharing_client/models/license.py | 137 - .../models/license_agreement.py | 111 - .../models/license_agreement_node.py | 137 - edu_sharing_client/models/list_v2.py | 137 - edu_sharing_client/models/location.py | 111 - edu_sharing_client/models/log_entry.py | 189 - edu_sharing_client/models/login.py | 350 -- .../models/login_credentials.py | 166 - edu_sharing_client/models/logout_info.py | 189 - edu_sharing_client/models/mainnav.py | 137 - .../models/mc_org_connect_result.py | 111 - edu_sharing_client/models/mds.py | 247 - edu_sharing_client/models/mds_entries_v2.py | 111 - edu_sharing_client/models/mds_entry.py | 112 - edu_sharing_client/models/mds_form.py | 139 - edu_sharing_client/models/mds_form_panel.py | 328 -- .../models/mds_form_property.py | 544 -- .../models/mds_form_property_parameter.py | 139 - .../models/mds_form_property_value.py | 139 - edu_sharing_client/models/mds_list.py | 166 - .../models/mds_list_property.py | 517 -- .../models/mds_list_property_parameter.py | 139 - .../models/mds_list_property_value.py | 139 - edu_sharing_client/models/mds_property.py | 301 -- edu_sharing_client/models/mds_queries.py | 139 - edu_sharing_client/models/mds_query.py | 328 -- .../models/mds_query_criteria.py | 139 - .../models/mds_query_property.py | 652 --- .../models/mds_query_property_parameter.py | 139 - .../models/mds_query_property_value.py | 139 - edu_sharing_client/models/mds_ref.py | 139 - edu_sharing_client/models/mds_type.py | 139 - edu_sharing_client/models/mds_v2.py | 267 - edu_sharing_client/models/mds_view.py | 139 - .../models/mds_view_property.py | 517 -- .../models/mds_view_property_parameter.py | 139 - .../models/mds_view_property_value.py | 139 - edu_sharing_client/models/mediacenter.py | 274 - .../models/mediacenter_profile_extension.py | 247 - .../models/mediacenters_import_result.py | 111 - edu_sharing_client/models/menu_entry.py | 345 -- .../models/metadata_set_info.py | 137 - edu_sharing_client/models/node.py | 821 --- edu_sharing_client/models/node_entries.py | 139 - edu_sharing_client/models/node_entry.py | 112 - edu_sharing_client/models/node_locked.py | 112 - .../models/node_permission_entry.py | 112 - edu_sharing_client/models/node_permissions.py | 139 - edu_sharing_client/models/node_ref.py | 192 - edu_sharing_client/models/node_remote.py | 139 - edu_sharing_client/models/node_share.py | 293 - edu_sharing_client/models/node_text.py | 163 - edu_sharing_client/models/node_version.py | 245 - .../models/node_version_entry.py | 112 - edu_sharing_client/models/node_version_ref.py | 166 - .../models/node_version_ref_entries.py | 112 - edu_sharing_client/models/notify_entry.py | 193 - .../models/organisations_import_result.py | 111 - edu_sharing_client/models/organization.py | 111 - .../models/organization_entries.py | 165 - edu_sharing_client/models/pagination.py | 166 - edu_sharing_client/models/parameters.py | 111 - edu_sharing_client/models/parent_entries.py | 165 - edu_sharing_client/models/person.py | 189 - .../models/person_delete_options.py | 371 -- .../models/person_delete_result.py | 319 -- edu_sharing_client/models/person_report.py | 137 - edu_sharing_client/models/preferences.py | 111 - edu_sharing_client/models/preview.py | 219 - edu_sharing_client/models/profile.py | 215 - edu_sharing_client/models/provider.py | 221 - edu_sharing_client/models/query.py | 137 - edu_sharing_client/models/rating_data.py | 163 - .../models/reference_entries.py | 138 - edu_sharing_client/models/register.py | 189 - edu_sharing_client/models/register_exists.py | 111 - .../models/register_information.py | 241 - edu_sharing_client/models/remote.py | 137 - .../models/remote_auth_description.py | 137 - edu_sharing_client/models/rendering.py | 85 - .../models/rendering_details_entry.py | 166 - edu_sharing_client/models/repo.py | 267 - edu_sharing_client/models/repo_entries.py | 112 - .../models/repository_config.py | 111 - edu_sharing_client/models/restore_result.py | 247 - edu_sharing_client/models/restore_results.py | 112 - .../models/search_parameters.py | 139 - edu_sharing_client/models/search_result.py | 192 - .../models/search_result_node.py | 192 - edu_sharing_client/models/serializable.py | 85 - .../models/server_update_info.py | 163 - edu_sharing_client/models/service.py | 139 - edu_sharing_client/models/service_instance.py | 139 - edu_sharing_client/models/service_version.py | 191 - edu_sharing_client/models/services.py | 111 - .../models/session_expired_dialog.py | 85 - .../models/shared_folder_options.py | 207 - edu_sharing_client/models/sharing_info.py | 215 - edu_sharing_client/models/simple_edit.py | 137 - edu_sharing_client/models/sort_column_v2.py | 137 - edu_sharing_client/models/sort_v2.py | 163 - edu_sharing_client/models/sort_v2_default.py | 137 - edu_sharing_client/models/statistic_entity.py | 139 - edu_sharing_client/models/statistic_entry.py | 139 - edu_sharing_client/models/statistics.py | 111 - .../models/statistics_global.py | 163 - edu_sharing_client/models/statistics_group.py | 137 - .../models/statistics_key_group.py | 189 - .../models/statistics_sub_group.py | 137 - edu_sharing_client/models/stored_service.py | 449 -- edu_sharing_client/models/stream.py | 111 - edu_sharing_client/models/stream_entry.py | 293 - .../models/stream_entry_input.py | 241 - edu_sharing_client/models/stream_list.py | 137 - edu_sharing_client/models/sub_group_item.py | 163 - edu_sharing_client/models/subwidget.py | 111 - edu_sharing_client/models/suggestion_param.py | 137 - edu_sharing_client/models/tracking.py | 215 - edu_sharing_client/models/tracking_node.py | 241 - edu_sharing_client/models/upload_result.py | 111 - edu_sharing_client/models/usage.py | 613 --- edu_sharing_client/models/usages.py | 111 - edu_sharing_client/models/user.py | 353 -- edu_sharing_client/models/user_credential.py | 138 - edu_sharing_client/models/user_entries.py | 139 - edu_sharing_client/models/user_entry.py | 138 - edu_sharing_client/models/user_profile.py | 293 - .../models/user_profile_edit.py | 319 -- edu_sharing_client/models/user_quota.py | 163 - edu_sharing_client/models/user_simple.py | 248 - edu_sharing_client/models/user_stats.py | 163 - edu_sharing_client/models/user_status.py | 143 - edu_sharing_client/models/value.py | 139 - edu_sharing_client/models/value_parameters.py | 168 - edu_sharing_client/models/value_v2.py | 189 - edu_sharing_client/models/values.py | 1541 ------ edu_sharing_client/models/variables.py | 137 - edu_sharing_client/models/view_v2.py | 241 - .../models/website_information.py | 215 - edu_sharing_client/models/widget_v2.py | 683 --- edu_sharing_client/models/workflow.py | 189 - edu_sharing_client/models/workflow_history.py | 215 - edu_sharing_client/rest.py | 322 -- 251 files changed, 68505 deletions(-) delete mode 100644 edu_sharing_client/__init__.py delete mode 100644 edu_sharing_client/api/__init__.py delete mode 100644 edu_sharing_client/api/about_api.py delete mode 100644 edu_sharing_client/api/admin_v1_api.py delete mode 100644 edu_sharing_client/api/archive_v1_api.py delete mode 100644 edu_sharing_client/api/authentication_v1_api.py delete mode 100644 edu_sharing_client/api/bulk_v1_api.py delete mode 100644 edu_sharing_client/api/clientutils_v1_api.py delete mode 100644 edu_sharing_client/api/collection_v1_api.py delete mode 100644 edu_sharing_client/api/comment_v1_api.py delete mode 100644 edu_sharing_client/api/config_v1_api.py delete mode 100644 edu_sharing_client/api/connector_v1_api.py delete mode 100644 edu_sharing_client/api/iam_v1_api.py delete mode 100644 edu_sharing_client/api/mds_v1_api.py delete mode 100644 edu_sharing_client/api/mediacenter_v1_api.py delete mode 100644 edu_sharing_client/api/network_v1_api.py delete mode 100644 edu_sharing_client/api/node_v1_api.py delete mode 100644 edu_sharing_client/api/organization_v1_api.py delete mode 100644 edu_sharing_client/api/rating_v1_api.py delete mode 100644 edu_sharing_client/api/register_v1_api.py delete mode 100644 edu_sharing_client/api/rendering_v1_api.py delete mode 100644 edu_sharing_client/api/search_v1_api.py delete mode 100644 edu_sharing_client/api/sharing_v1_api.py delete mode 100644 edu_sharing_client/api/statistic_v1_api.py delete mode 100644 edu_sharing_client/api/stream_v1_api.py delete mode 100644 edu_sharing_client/api/tool_v1_api.py delete mode 100644 edu_sharing_client/api/tracking_v1_api.py delete mode 100644 edu_sharing_client/api/usage_v1_api.py delete mode 100644 edu_sharing_client/api_client.py delete mode 100644 edu_sharing_client/configuration.py delete mode 100644 edu_sharing_client/models/__init__.py delete mode 100644 edu_sharing_client/models/about.py delete mode 100644 edu_sharing_client/models/accumulated_ratings.py delete mode 100644 edu_sharing_client/models/ace.py delete mode 100644 edu_sharing_client/models/acl.py delete mode 100644 edu_sharing_client/models/admin.py delete mode 100644 edu_sharing_client/models/admin_statistics.py delete mode 100644 edu_sharing_client/models/application.py delete mode 100644 edu_sharing_client/models/audience.py delete mode 100644 edu_sharing_client/models/authority.py delete mode 100644 edu_sharing_client/models/authority_entries.py delete mode 100644 edu_sharing_client/models/available_mds.py delete mode 100644 edu_sharing_client/models/banner.py delete mode 100644 edu_sharing_client/models/body.py delete mode 100644 edu_sharing_client/models/body1.py delete mode 100644 edu_sharing_client/models/body10.py delete mode 100644 edu_sharing_client/models/body11.py delete mode 100644 edu_sharing_client/models/body2.py delete mode 100644 edu_sharing_client/models/body3.py delete mode 100644 edu_sharing_client/models/body4.py delete mode 100644 edu_sharing_client/models/body5.py delete mode 100644 edu_sharing_client/models/body6.py delete mode 100644 edu_sharing_client/models/body7.py delete mode 100644 edu_sharing_client/models/body8.py delete mode 100644 edu_sharing_client/models/body9.py delete mode 100644 edu_sharing_client/models/cache_cluster.py delete mode 100644 edu_sharing_client/models/cache_info.py delete mode 100644 edu_sharing_client/models/cache_member.py delete mode 100644 edu_sharing_client/models/catalog.py delete mode 100644 edu_sharing_client/models/collection.py delete mode 100644 edu_sharing_client/models/collection_counts.py delete mode 100644 edu_sharing_client/models/collection_entries.py delete mode 100644 edu_sharing_client/models/collection_entry.py delete mode 100644 edu_sharing_client/models/collection_feedback.py delete mode 100644 edu_sharing_client/models/collection_options.py delete mode 100644 edu_sharing_client/models/collection_reference.py delete mode 100644 edu_sharing_client/models/collections.py delete mode 100644 edu_sharing_client/models/collections_result.py delete mode 100644 edu_sharing_client/models/column_v2.py delete mode 100644 edu_sharing_client/models/comment.py delete mode 100644 edu_sharing_client/models/comments.py delete mode 100644 edu_sharing_client/models/condition.py delete mode 100644 edu_sharing_client/models/config.py delete mode 100644 edu_sharing_client/models/connector.py delete mode 100644 edu_sharing_client/models/connector_file_type.py delete mode 100644 edu_sharing_client/models/connector_list.py delete mode 100644 edu_sharing_client/models/content.py delete mode 100644 edu_sharing_client/models/context_menu_entry.py delete mode 100644 edu_sharing_client/models/counts.py delete mode 100644 edu_sharing_client/models/create.py delete mode 100644 edu_sharing_client/models/delete_option.py delete mode 100644 edu_sharing_client/models/dynamic_config.py delete mode 100644 edu_sharing_client/models/element.py delete mode 100644 edu_sharing_client/models/error_response.py delete mode 100644 edu_sharing_client/models/excel_result.py delete mode 100644 edu_sharing_client/models/facette.py delete mode 100644 edu_sharing_client/models/filter.py delete mode 100644 edu_sharing_client/models/filter_entry.py delete mode 100644 edu_sharing_client/models/frontpage.py delete mode 100644 edu_sharing_client/models/general.py delete mode 100644 edu_sharing_client/models/geo.py delete mode 100644 edu_sharing_client/models/group.py delete mode 100644 edu_sharing_client/models/group_entries.py delete mode 100644 edu_sharing_client/models/group_entry.py delete mode 100644 edu_sharing_client/models/group_profile.py delete mode 100644 edu_sharing_client/models/group_v2.py delete mode 100644 edu_sharing_client/models/guest.py delete mode 100644 edu_sharing_client/models/help_menu_options.py delete mode 100644 edu_sharing_client/models/home_folder_options.py delete mode 100644 edu_sharing_client/models/icon.py delete mode 100644 edu_sharing_client/models/image.py delete mode 100644 edu_sharing_client/models/interface.py delete mode 100644 edu_sharing_client/models/job_detail.py delete mode 100644 edu_sharing_client/models/job_info.py delete mode 100644 edu_sharing_client/models/key.py delete mode 100644 edu_sharing_client/models/key_value_pair.py delete mode 100644 edu_sharing_client/models/language.py delete mode 100644 edu_sharing_client/models/level.py delete mode 100644 edu_sharing_client/models/license.py delete mode 100644 edu_sharing_client/models/license_agreement.py delete mode 100644 edu_sharing_client/models/license_agreement_node.py delete mode 100644 edu_sharing_client/models/list_v2.py delete mode 100644 edu_sharing_client/models/location.py delete mode 100644 edu_sharing_client/models/log_entry.py delete mode 100644 edu_sharing_client/models/login.py delete mode 100644 edu_sharing_client/models/login_credentials.py delete mode 100644 edu_sharing_client/models/logout_info.py delete mode 100644 edu_sharing_client/models/mainnav.py delete mode 100644 edu_sharing_client/models/mc_org_connect_result.py delete mode 100644 edu_sharing_client/models/mds.py delete mode 100644 edu_sharing_client/models/mds_entries_v2.py delete mode 100644 edu_sharing_client/models/mds_entry.py delete mode 100644 edu_sharing_client/models/mds_form.py delete mode 100644 edu_sharing_client/models/mds_form_panel.py delete mode 100644 edu_sharing_client/models/mds_form_property.py delete mode 100644 edu_sharing_client/models/mds_form_property_parameter.py delete mode 100644 edu_sharing_client/models/mds_form_property_value.py delete mode 100644 edu_sharing_client/models/mds_list.py delete mode 100644 edu_sharing_client/models/mds_list_property.py delete mode 100644 edu_sharing_client/models/mds_list_property_parameter.py delete mode 100644 edu_sharing_client/models/mds_list_property_value.py delete mode 100644 edu_sharing_client/models/mds_property.py delete mode 100644 edu_sharing_client/models/mds_queries.py delete mode 100644 edu_sharing_client/models/mds_query.py delete mode 100644 edu_sharing_client/models/mds_query_criteria.py delete mode 100644 edu_sharing_client/models/mds_query_property.py delete mode 100644 edu_sharing_client/models/mds_query_property_parameter.py delete mode 100644 edu_sharing_client/models/mds_query_property_value.py delete mode 100644 edu_sharing_client/models/mds_ref.py delete mode 100644 edu_sharing_client/models/mds_type.py delete mode 100644 edu_sharing_client/models/mds_v2.py delete mode 100644 edu_sharing_client/models/mds_view.py delete mode 100644 edu_sharing_client/models/mds_view_property.py delete mode 100644 edu_sharing_client/models/mds_view_property_parameter.py delete mode 100644 edu_sharing_client/models/mds_view_property_value.py delete mode 100644 edu_sharing_client/models/mediacenter.py delete mode 100644 edu_sharing_client/models/mediacenter_profile_extension.py delete mode 100644 edu_sharing_client/models/mediacenters_import_result.py delete mode 100644 edu_sharing_client/models/menu_entry.py delete mode 100644 edu_sharing_client/models/metadata_set_info.py delete mode 100644 edu_sharing_client/models/node.py delete mode 100644 edu_sharing_client/models/node_entries.py delete mode 100644 edu_sharing_client/models/node_entry.py delete mode 100644 edu_sharing_client/models/node_locked.py delete mode 100644 edu_sharing_client/models/node_permission_entry.py delete mode 100644 edu_sharing_client/models/node_permissions.py delete mode 100644 edu_sharing_client/models/node_ref.py delete mode 100644 edu_sharing_client/models/node_remote.py delete mode 100644 edu_sharing_client/models/node_share.py delete mode 100644 edu_sharing_client/models/node_text.py delete mode 100644 edu_sharing_client/models/node_version.py delete mode 100644 edu_sharing_client/models/node_version_entry.py delete mode 100644 edu_sharing_client/models/node_version_ref.py delete mode 100644 edu_sharing_client/models/node_version_ref_entries.py delete mode 100644 edu_sharing_client/models/notify_entry.py delete mode 100644 edu_sharing_client/models/organisations_import_result.py delete mode 100644 edu_sharing_client/models/organization.py delete mode 100644 edu_sharing_client/models/organization_entries.py delete mode 100644 edu_sharing_client/models/pagination.py delete mode 100644 edu_sharing_client/models/parameters.py delete mode 100644 edu_sharing_client/models/parent_entries.py delete mode 100644 edu_sharing_client/models/person.py delete mode 100644 edu_sharing_client/models/person_delete_options.py delete mode 100644 edu_sharing_client/models/person_delete_result.py delete mode 100644 edu_sharing_client/models/person_report.py delete mode 100644 edu_sharing_client/models/preferences.py delete mode 100644 edu_sharing_client/models/preview.py delete mode 100644 edu_sharing_client/models/profile.py delete mode 100644 edu_sharing_client/models/provider.py delete mode 100644 edu_sharing_client/models/query.py delete mode 100644 edu_sharing_client/models/rating_data.py delete mode 100644 edu_sharing_client/models/reference_entries.py delete mode 100644 edu_sharing_client/models/register.py delete mode 100644 edu_sharing_client/models/register_exists.py delete mode 100644 edu_sharing_client/models/register_information.py delete mode 100644 edu_sharing_client/models/remote.py delete mode 100644 edu_sharing_client/models/remote_auth_description.py delete mode 100644 edu_sharing_client/models/rendering.py delete mode 100644 edu_sharing_client/models/rendering_details_entry.py delete mode 100644 edu_sharing_client/models/repo.py delete mode 100644 edu_sharing_client/models/repo_entries.py delete mode 100644 edu_sharing_client/models/repository_config.py delete mode 100644 edu_sharing_client/models/restore_result.py delete mode 100644 edu_sharing_client/models/restore_results.py delete mode 100644 edu_sharing_client/models/search_parameters.py delete mode 100644 edu_sharing_client/models/search_result.py delete mode 100644 edu_sharing_client/models/search_result_node.py delete mode 100644 edu_sharing_client/models/serializable.py delete mode 100644 edu_sharing_client/models/server_update_info.py delete mode 100644 edu_sharing_client/models/service.py delete mode 100644 edu_sharing_client/models/service_instance.py delete mode 100644 edu_sharing_client/models/service_version.py delete mode 100644 edu_sharing_client/models/services.py delete mode 100644 edu_sharing_client/models/session_expired_dialog.py delete mode 100644 edu_sharing_client/models/shared_folder_options.py delete mode 100644 edu_sharing_client/models/sharing_info.py delete mode 100644 edu_sharing_client/models/simple_edit.py delete mode 100644 edu_sharing_client/models/sort_column_v2.py delete mode 100644 edu_sharing_client/models/sort_v2.py delete mode 100644 edu_sharing_client/models/sort_v2_default.py delete mode 100644 edu_sharing_client/models/statistic_entity.py delete mode 100644 edu_sharing_client/models/statistic_entry.py delete mode 100644 edu_sharing_client/models/statistics.py delete mode 100644 edu_sharing_client/models/statistics_global.py delete mode 100644 edu_sharing_client/models/statistics_group.py delete mode 100644 edu_sharing_client/models/statistics_key_group.py delete mode 100644 edu_sharing_client/models/statistics_sub_group.py delete mode 100644 edu_sharing_client/models/stored_service.py delete mode 100644 edu_sharing_client/models/stream.py delete mode 100644 edu_sharing_client/models/stream_entry.py delete mode 100644 edu_sharing_client/models/stream_entry_input.py delete mode 100644 edu_sharing_client/models/stream_list.py delete mode 100644 edu_sharing_client/models/sub_group_item.py delete mode 100644 edu_sharing_client/models/subwidget.py delete mode 100644 edu_sharing_client/models/suggestion_param.py delete mode 100644 edu_sharing_client/models/tracking.py delete mode 100644 edu_sharing_client/models/tracking_node.py delete mode 100644 edu_sharing_client/models/upload_result.py delete mode 100644 edu_sharing_client/models/usage.py delete mode 100644 edu_sharing_client/models/usages.py delete mode 100644 edu_sharing_client/models/user.py delete mode 100644 edu_sharing_client/models/user_credential.py delete mode 100644 edu_sharing_client/models/user_entries.py delete mode 100644 edu_sharing_client/models/user_entry.py delete mode 100644 edu_sharing_client/models/user_profile.py delete mode 100644 edu_sharing_client/models/user_profile_edit.py delete mode 100644 edu_sharing_client/models/user_quota.py delete mode 100644 edu_sharing_client/models/user_simple.py delete mode 100644 edu_sharing_client/models/user_stats.py delete mode 100644 edu_sharing_client/models/user_status.py delete mode 100644 edu_sharing_client/models/value.py delete mode 100644 edu_sharing_client/models/value_parameters.py delete mode 100644 edu_sharing_client/models/value_v2.py delete mode 100644 edu_sharing_client/models/values.py delete mode 100644 edu_sharing_client/models/variables.py delete mode 100644 edu_sharing_client/models/view_v2.py delete mode 100644 edu_sharing_client/models/website_information.py delete mode 100644 edu_sharing_client/models/widget_v2.py delete mode 100644 edu_sharing_client/models/workflow.py delete mode 100644 edu_sharing_client/models/workflow_history.py delete mode 100644 edu_sharing_client/rest.py diff --git a/edu_sharing_client/__init__.py b/edu_sharing_client/__init__.py deleted file mode 100644 index e0e88954..00000000 --- a/edu_sharing_client/__init__.py +++ /dev/null @@ -1,266 +0,0 @@ -# coding: utf-8 - -# flake8: noqa - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -# import apis into sdk package -from edu_sharing_client.api.about_api import ABOUTApi -from edu_sharing_client.api.admin_v1_api import ADMINV1Api -from edu_sharing_client.api.archive_v1_api import ARCHIVEV1Api -from edu_sharing_client.api.authentication_v1_api import AUTHENTICATIONV1Api -from edu_sharing_client.api.bulk_v1_api import BULKV1Api -from edu_sharing_client.api.clientutils_v1_api import CLIENTUTILSV1Api -from edu_sharing_client.api.collection_v1_api import COLLECTIONV1Api -from edu_sharing_client.api.comment_v1_api import COMMENTV1Api -from edu_sharing_client.api.config_v1_api import CONFIGV1Api -from edu_sharing_client.api.connector_v1_api import CONNECTORV1Api -from edu_sharing_client.api.iam_v1_api import IAMV1Api -from edu_sharing_client.api.mds_v1_api import MDSV1Api -from edu_sharing_client.api.mediacenter_v1_api import MEDIACENTERV1Api -from edu_sharing_client.api.network_v1_api import NETWORKV1Api -from edu_sharing_client.api.node_v1_api import NODEV1Api -from edu_sharing_client.api.organization_v1_api import ORGANIZATIONV1Api -from edu_sharing_client.api.rating_v1_api import RATINGV1Api -from edu_sharing_client.api.register_v1_api import REGISTERV1Api -from edu_sharing_client.api.rendering_v1_api import RENDERINGV1Api -from edu_sharing_client.api.search_v1_api import SEARCHV1Api -from edu_sharing_client.api.sharing_v1_api import SHARINGV1Api -from edu_sharing_client.api.statistic_v1_api import STATISTICV1Api -from edu_sharing_client.api.stream_v1_api import STREAMV1Api -from edu_sharing_client.api.tool_v1_api import TOOLV1Api -from edu_sharing_client.api.tracking_v1_api import TRACKINGV1Api -from edu_sharing_client.api.usage_v1_api import USAGEV1Api -# import ApiClient -from edu_sharing_client.api_client import ApiClient -from edu_sharing_client.configuration import Configuration -# import models into sdk package -from edu_sharing_client.models.ace import ACE -from edu_sharing_client.models.acl import ACL -from edu_sharing_client.models.about import About -from edu_sharing_client.models.accumulated_ratings import AccumulatedRatings -from edu_sharing_client.models.admin import Admin -from edu_sharing_client.models.admin_statistics import AdminStatistics -from edu_sharing_client.models.application import Application -from edu_sharing_client.models.audience import Audience -from edu_sharing_client.models.authority import Authority -from edu_sharing_client.models.authority_entries import AuthorityEntries -from edu_sharing_client.models.available_mds import AvailableMds -from edu_sharing_client.models.banner import Banner -from edu_sharing_client.models.body import Body -from edu_sharing_client.models.body1 import Body1 -from edu_sharing_client.models.body10 import Body10 -from edu_sharing_client.models.body11 import Body11 -from edu_sharing_client.models.body2 import Body2 -from edu_sharing_client.models.body3 import Body3 -from edu_sharing_client.models.body4 import Body4 -from edu_sharing_client.models.body5 import Body5 -from edu_sharing_client.models.body6 import Body6 -from edu_sharing_client.models.body7 import Body7 -from edu_sharing_client.models.body8 import Body8 -from edu_sharing_client.models.body9 import Body9 -from edu_sharing_client.models.cache_cluster import CacheCluster -from edu_sharing_client.models.cache_info import CacheInfo -from edu_sharing_client.models.cache_member import CacheMember -from edu_sharing_client.models.catalog import Catalog -from edu_sharing_client.models.collection import Collection -from edu_sharing_client.models.collection_counts import CollectionCounts -from edu_sharing_client.models.collection_entries import CollectionEntries -from edu_sharing_client.models.collection_entry import CollectionEntry -from edu_sharing_client.models.collection_feedback import CollectionFeedback -from edu_sharing_client.models.collection_options import CollectionOptions -from edu_sharing_client.models.collection_reference import CollectionReference -from edu_sharing_client.models.collections import Collections -from edu_sharing_client.models.collections_result import CollectionsResult -from edu_sharing_client.models.column_v2 import ColumnV2 -from edu_sharing_client.models.comment import Comment -from edu_sharing_client.models.comments import Comments -from edu_sharing_client.models.condition import Condition -from edu_sharing_client.models.config import Config -from edu_sharing_client.models.connector import Connector -from edu_sharing_client.models.connector_file_type import ConnectorFileType -from edu_sharing_client.models.connector_list import ConnectorList -from edu_sharing_client.models.content import Content -from edu_sharing_client.models.context_menu_entry import ContextMenuEntry -from edu_sharing_client.models.counts import Counts -from edu_sharing_client.models.create import Create -from edu_sharing_client.models.delete_option import DeleteOption -from edu_sharing_client.models.dynamic_config import DynamicConfig -from edu_sharing_client.models.element import Element -from edu_sharing_client.models.error_response import ErrorResponse -from edu_sharing_client.models.excel_result import ExcelResult -from edu_sharing_client.models.facette import Facette -from edu_sharing_client.models.filter import Filter -from edu_sharing_client.models.filter_entry import FilterEntry -from edu_sharing_client.models.frontpage import Frontpage -from edu_sharing_client.models.general import General -from edu_sharing_client.models.geo import Geo -from edu_sharing_client.models.group import Group -from edu_sharing_client.models.group_entries import GroupEntries -from edu_sharing_client.models.group_entry import GroupEntry -from edu_sharing_client.models.group_profile import GroupProfile -from edu_sharing_client.models.group_v2 import GroupV2 -from edu_sharing_client.models.guest import Guest -from edu_sharing_client.models.help_menu_options import HelpMenuOptions -from edu_sharing_client.models.home_folder_options import HomeFolderOptions -from edu_sharing_client.models.icon import Icon -from edu_sharing_client.models.image import Image -from edu_sharing_client.models.interface import Interface -from edu_sharing_client.models.job_detail import JobDetail -from edu_sharing_client.models.job_info import JobInfo -from edu_sharing_client.models.key import Key -from edu_sharing_client.models.key_value_pair import KeyValuePair -from edu_sharing_client.models.language import Language -from edu_sharing_client.models.level import Level -from edu_sharing_client.models.license import License -from edu_sharing_client.models.license_agreement import LicenseAgreement -from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode -from edu_sharing_client.models.list_v2 import ListV2 -from edu_sharing_client.models.location import Location -from edu_sharing_client.models.log_entry import LogEntry -from edu_sharing_client.models.login import Login -from edu_sharing_client.models.login_credentials import LoginCredentials -from edu_sharing_client.models.logout_info import LogoutInfo -from edu_sharing_client.models.mainnav import Mainnav -from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult -from edu_sharing_client.models.mds import Mds -from edu_sharing_client.models.mds_entries_v2 import MdsEntriesV2 -from edu_sharing_client.models.mds_entry import MdsEntry -from edu_sharing_client.models.mds_form import MdsForm -from edu_sharing_client.models.mds_form_panel import MdsFormPanel -from edu_sharing_client.models.mds_form_property import MdsFormProperty -from edu_sharing_client.models.mds_form_property_parameter import MdsFormPropertyParameter -from edu_sharing_client.models.mds_form_property_value import MdsFormPropertyValue -from edu_sharing_client.models.mds_list import MdsList -from edu_sharing_client.models.mds_list_property import MdsListProperty -from edu_sharing_client.models.mds_list_property_parameter import MdsListPropertyParameter -from edu_sharing_client.models.mds_list_property_value import MdsListPropertyValue -from edu_sharing_client.models.mds_property import MdsProperty -from edu_sharing_client.models.mds_queries import MdsQueries -from edu_sharing_client.models.mds_query import MdsQuery -from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria -from edu_sharing_client.models.mds_query_property import MdsQueryProperty -from edu_sharing_client.models.mds_query_property_parameter import MdsQueryPropertyParameter -from edu_sharing_client.models.mds_query_property_value import MdsQueryPropertyValue -from edu_sharing_client.models.mds_ref import MdsRef -from edu_sharing_client.models.mds_type import MdsType -from edu_sharing_client.models.mds_v2 import MdsV2 -from edu_sharing_client.models.mds_view import MdsView -from edu_sharing_client.models.mds_view_property import MdsViewProperty -from edu_sharing_client.models.mds_view_property_parameter import MdsViewPropertyParameter -from edu_sharing_client.models.mds_view_property_value import MdsViewPropertyValue -from edu_sharing_client.models.mediacenter import Mediacenter -from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension -from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult -from edu_sharing_client.models.menu_entry import MenuEntry -from edu_sharing_client.models.metadata_set_info import MetadataSetInfo -from edu_sharing_client.models.node import Node -from edu_sharing_client.models.node_entries import NodeEntries -from edu_sharing_client.models.node_entry import NodeEntry -from edu_sharing_client.models.node_locked import NodeLocked -from edu_sharing_client.models.node_permission_entry import NodePermissionEntry -from edu_sharing_client.models.node_permissions import NodePermissions -from edu_sharing_client.models.node_ref import NodeRef -from edu_sharing_client.models.node_remote import NodeRemote -from edu_sharing_client.models.node_share import NodeShare -from edu_sharing_client.models.node_text import NodeText -from edu_sharing_client.models.node_version import NodeVersion -from edu_sharing_client.models.node_version_entry import NodeVersionEntry -from edu_sharing_client.models.node_version_ref import NodeVersionRef -from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries -from edu_sharing_client.models.notify_entry import NotifyEntry -from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult -from edu_sharing_client.models.organization import Organization -from edu_sharing_client.models.organization_entries import OrganizationEntries -from edu_sharing_client.models.pagination import Pagination -from edu_sharing_client.models.parameters import Parameters -from edu_sharing_client.models.parent_entries import ParentEntries -from edu_sharing_client.models.person import Person -from edu_sharing_client.models.person_delete_options import PersonDeleteOptions -from edu_sharing_client.models.person_delete_result import PersonDeleteResult -from edu_sharing_client.models.person_report import PersonReport -from edu_sharing_client.models.preferences import Preferences -from edu_sharing_client.models.preview import Preview -from edu_sharing_client.models.profile import Profile -from edu_sharing_client.models.provider import Provider -from edu_sharing_client.models.query import Query -from edu_sharing_client.models.rating_data import RatingData -from edu_sharing_client.models.reference_entries import ReferenceEntries -from edu_sharing_client.models.register import Register -from edu_sharing_client.models.register_exists import RegisterExists -from edu_sharing_client.models.register_information import RegisterInformation -from edu_sharing_client.models.remote import Remote -from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription -from edu_sharing_client.models.rendering import Rendering -from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry -from edu_sharing_client.models.repo import Repo -from edu_sharing_client.models.repo_entries import RepoEntries -from edu_sharing_client.models.repository_config import RepositoryConfig -from edu_sharing_client.models.restore_result import RestoreResult -from edu_sharing_client.models.restore_results import RestoreResults -from edu_sharing_client.models.search_parameters import SearchParameters -from edu_sharing_client.models.search_result import SearchResult -from edu_sharing_client.models.search_result_node import SearchResultNode -from edu_sharing_client.models.serializable import Serializable -from edu_sharing_client.models.server_update_info import ServerUpdateInfo -from edu_sharing_client.models.service import Service -from edu_sharing_client.models.service_instance import ServiceInstance -from edu_sharing_client.models.service_version import ServiceVersion -from edu_sharing_client.models.services import Services -from edu_sharing_client.models.session_expired_dialog import SessionExpiredDialog -from edu_sharing_client.models.shared_folder_options import SharedFolderOptions -from edu_sharing_client.models.sharing_info import SharingInfo -from edu_sharing_client.models.simple_edit import SimpleEdit -from edu_sharing_client.models.sort_column_v2 import SortColumnV2 -from edu_sharing_client.models.sort_v2 import SortV2 -from edu_sharing_client.models.sort_v2_default import SortV2Default -from edu_sharing_client.models.statistic_entity import StatisticEntity -from edu_sharing_client.models.statistic_entry import StatisticEntry -from edu_sharing_client.models.statistics import Statistics -from edu_sharing_client.models.statistics_global import StatisticsGlobal -from edu_sharing_client.models.statistics_group import StatisticsGroup -from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup -from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup -from edu_sharing_client.models.stored_service import StoredService -from edu_sharing_client.models.stream import Stream -from edu_sharing_client.models.stream_entry import StreamEntry -from edu_sharing_client.models.stream_entry_input import StreamEntryInput -from edu_sharing_client.models.stream_list import StreamList -from edu_sharing_client.models.sub_group_item import SubGroupItem -from edu_sharing_client.models.subwidget import Subwidget -from edu_sharing_client.models.suggestion_param import SuggestionParam -from edu_sharing_client.models.tracking import Tracking -from edu_sharing_client.models.tracking_node import TrackingNode -from edu_sharing_client.models.upload_result import UploadResult -from edu_sharing_client.models.usage import Usage -from edu_sharing_client.models.usages import Usages -from edu_sharing_client.models.user import User -from edu_sharing_client.models.user_credential import UserCredential -from edu_sharing_client.models.user_entries import UserEntries -from edu_sharing_client.models.user_entry import UserEntry -from edu_sharing_client.models.user_profile import UserProfile -from edu_sharing_client.models.user_profile_edit import UserProfileEdit -from edu_sharing_client.models.user_quota import UserQuota -from edu_sharing_client.models.user_simple import UserSimple -from edu_sharing_client.models.user_stats import UserStats -from edu_sharing_client.models.user_status import UserStatus -from edu_sharing_client.models.value import Value -from edu_sharing_client.models.value_parameters import ValueParameters -from edu_sharing_client.models.value_v2 import ValueV2 -from edu_sharing_client.models.values import Values -from edu_sharing_client.models.variables import Variables -from edu_sharing_client.models.view_v2 import ViewV2 -from edu_sharing_client.models.website_information import WebsiteInformation -from edu_sharing_client.models.widget_v2 import WidgetV2 -from edu_sharing_client.models.workflow import Workflow -from edu_sharing_client.models.workflow_history import WorkflowHistory diff --git a/edu_sharing_client/api/__init__.py b/edu_sharing_client/api/__init__.py deleted file mode 100644 index 7168d1d5..00000000 --- a/edu_sharing_client/api/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import absolute_import - -# flake8: noqa - -# import apis into api package -from edu_sharing_client.api.about_api import ABOUTApi -from edu_sharing_client.api.admin_v1_api import ADMINV1Api -from edu_sharing_client.api.archive_v1_api import ARCHIVEV1Api -from edu_sharing_client.api.authentication_v1_api import AUTHENTICATIONV1Api -from edu_sharing_client.api.bulk_v1_api import BULKV1Api -from edu_sharing_client.api.clientutils_v1_api import CLIENTUTILSV1Api -from edu_sharing_client.api.collection_v1_api import COLLECTIONV1Api -from edu_sharing_client.api.comment_v1_api import COMMENTV1Api -from edu_sharing_client.api.config_v1_api import CONFIGV1Api -from edu_sharing_client.api.connector_v1_api import CONNECTORV1Api -from edu_sharing_client.api.iam_v1_api import IAMV1Api -from edu_sharing_client.api.mds_v1_api import MDSV1Api -from edu_sharing_client.api.mediacenter_v1_api import MEDIACENTERV1Api -from edu_sharing_client.api.network_v1_api import NETWORKV1Api -from edu_sharing_client.api.node_v1_api import NODEV1Api -from edu_sharing_client.api.organization_v1_api import ORGANIZATIONV1Api -from edu_sharing_client.api.rating_v1_api import RATINGV1Api -from edu_sharing_client.api.register_v1_api import REGISTERV1Api -from edu_sharing_client.api.rendering_v1_api import RENDERINGV1Api -from edu_sharing_client.api.search_v1_api import SEARCHV1Api -from edu_sharing_client.api.sharing_v1_api import SHARINGV1Api -from edu_sharing_client.api.statistic_v1_api import STATISTICV1Api -from edu_sharing_client.api.stream_v1_api import STREAMV1Api -from edu_sharing_client.api.tool_v1_api import TOOLV1Api -from edu_sharing_client.api.tracking_v1_api import TRACKINGV1Api -from edu_sharing_client.api.usage_v1_api import USAGEV1Api diff --git a/edu_sharing_client/api/about_api.py b/edu_sharing_client/api/about_api.py deleted file mode 100644 index 2fccbc67..00000000 --- a/edu_sharing_client/api/about_api.py +++ /dev/null @@ -1,219 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class ABOUTApi(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def about(self, **kwargs): # noqa: E501 - """Discover the API. # noqa: E501 - - Get all services provided by this API. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.about(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: About - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.about_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.about_with_http_info(**kwargs) # noqa: E501 - return data - - def about_with_http_info(self, **kwargs): # noqa: E501 - """Discover the API. # noqa: E501 - - Get all services provided by this API. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.about_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: About - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method about" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/_about', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='About', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def status(self, mode, **kwargs): # noqa: E501 - """status of repo services # noqa: E501 - - returns http status 200 when ok # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.status(mode, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mode: (required) - :param int timeout_seconds: - :return: str - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.status_with_http_info(mode, **kwargs) # noqa: E501 - else: - (data) = self.status_with_http_info(mode, **kwargs) # noqa: E501 - return data - - def status_with_http_info(self, mode, **kwargs): # noqa: E501 - """status of repo services # noqa: E501 - - returns http status 200 when ok # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.status_with_http_info(mode, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mode: (required) - :param int timeout_seconds: - :return: str - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mode', 'timeout_seconds'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method status" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mode' is set - if ('mode' not in params or - params['mode'] is None): - raise ValueError("Missing the required parameter `mode` when calling `status`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'mode' in params: - path_params['mode'] = params['mode'] # noqa: E501 - - query_params = [] - if 'timeout_seconds' in params: - query_params.append(('timeoutSeconds', params['timeout_seconds'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/_about/status/{mode}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='str', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/admin_v1_api.py b/edu_sharing_client/api/admin_v1_api.py deleted file mode 100644 index 002df979..00000000 --- a/edu_sharing_client/api/admin_v1_api.py +++ /dev/null @@ -1,4267 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class ADMINV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_application(self, url, **kwargs): # noqa: E501 - """register/add an application # noqa: E501 - - register the specified application. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_application(url, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str url: Remote application metadata url (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_application_with_http_info(url, **kwargs) # noqa: E501 - else: - (data) = self.add_application_with_http_info(url, **kwargs) # noqa: E501 - return data - - def add_application_with_http_info(self, url, **kwargs): # noqa: E501 - """register/add an application # noqa: E501 - - register the specified application. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_application_with_http_info(url, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str url: Remote application metadata url (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['url'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_application" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `add_application`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'url' in params: - query_params.append(('url', params['url'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applications', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def add_application_0(self, xml, **kwargs): # noqa: E501 - """register/add an application via xml file # noqa: E501 - - register the xml file provided. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_application_0(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_application_0_with_http_info(xml, **kwargs) # noqa: E501 - else: - (data) = self.add_application_0_with_http_info(xml, **kwargs) # noqa: E501 - return data - - def add_application_0_with_http_info(self, xml, **kwargs): # noqa: E501 - """register/add an application via xml file # noqa: E501 - - register the xml file provided. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_application_0_with_http_info(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['xml'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_application_0" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'xml' is set - if ('xml' not in params or - params['xml'] is None): - raise ValueError("Missing the required parameter `xml` when calling `add_application_0`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'xml' in params: - local_var_files['xml'] = params['xml'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applications/xml', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def add_toolpermission(self, name, **kwargs): # noqa: E501 - """add a new toolpermissions # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_toolpermission(name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str name: Name/ID of toolpermission (required) - :return: Node - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_toolpermission_with_http_info(name, **kwargs) # noqa: E501 - else: - (data) = self.add_toolpermission_with_http_info(name, **kwargs) # noqa: E501 - return data - - def add_toolpermission_with_http_info(self, name, **kwargs): # noqa: E501 - """add a new toolpermissions # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_toolpermission_with_http_info(name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str name: Name/ID of toolpermission (required) - :return: Node - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_toolpermission" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'name' is set - if ('name' not in params or - params['name'] is None): - raise ValueError("Missing the required parameter `name` when calling `add_toolpermission`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'name' in params: - path_params['name'] = params['name'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/toolpermissions/add/{name}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Node', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def apply_template(self, template, group, **kwargs): # noqa: E501 - """apply a folder template # noqa: E501 - - apply a folder template. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.apply_template(template, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str template: Template Filename (required) - :param str group: Group name (authority name) (required) - :param str folder: Folder name - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.apply_template_with_http_info(template, group, **kwargs) # noqa: E501 - else: - (data) = self.apply_template_with_http_info(template, group, **kwargs) # noqa: E501 - return data - - def apply_template_with_http_info(self, template, group, **kwargs): # noqa: E501 - """apply a folder template # noqa: E501 - - apply a folder template. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.apply_template_with_http_info(template, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str template: Template Filename (required) - :param str group: Group name (authority name) (required) - :param str folder: Folder name - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['template', 'group', 'folder'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method apply_template" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'template' is set - if ('template' not in params or - params['template'] is None): - raise ValueError("Missing the required parameter `template` when calling `apply_template`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `apply_template`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'template' in params: - query_params.append(('template', params['template'])) # noqa: E501 - if 'group' in params: - query_params.append(('group', params['group'])) # noqa: E501 - if 'folder' in params: - query_params.append(('folder', params['folder'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applyTemplate', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def cancel_job(self, job, **kwargs): # noqa: E501 - """cancel a running job # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.cancel_job(job, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str job: (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.cancel_job_with_http_info(job, **kwargs) # noqa: E501 - else: - (data) = self.cancel_job_with_http_info(job, **kwargs) # noqa: E501 - return data - - def cancel_job_with_http_info(self, job, **kwargs): # noqa: E501 - """cancel a running job # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.cancel_job_with_http_info(job, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str job: (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['job'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method cancel_job" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'job' is set - if ('job' not in params or - params['job'] is None): - raise ValueError("Missing the required parameter `job` when calling `cancel_job`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'job' in params: - path_params['job'] = params['job'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/jobs/{job}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_logging(self, name, loglevel, **kwargs): # noqa: E501 - """Change the loglevel for classes at runtime. # noqa: E501 - - Root appenders are used. Check the appender treshold. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_logging(name, loglevel, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str name: name (required) - :param str loglevel: loglevel (required) - :param str appender: appender - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_logging_with_http_info(name, loglevel, **kwargs) # noqa: E501 - else: - (data) = self.change_logging_with_http_info(name, loglevel, **kwargs) # noqa: E501 - return data - - def change_logging_with_http_info(self, name, loglevel, **kwargs): # noqa: E501 - """Change the loglevel for classes at runtime. # noqa: E501 - - Root appenders are used. Check the appender treshold. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_logging_with_http_info(name, loglevel, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str name: name (required) - :param str loglevel: loglevel (required) - :param str appender: appender - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['name', 'loglevel', 'appender'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_logging" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'name' is set - if ('name' not in params or - params['name'] is None): - raise ValueError("Missing the required parameter `name` when calling `change_logging`") # noqa: E501 - # verify the required parameter 'loglevel' is set - if ('loglevel' not in params or - params['loglevel'] is None): - raise ValueError("Missing the required parameter `loglevel` when calling `change_logging`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 - if 'loglevel' in params: - query_params.append(('loglevel', params['loglevel'])) # noqa: E501 - if 'appender' in params: - query_params.append(('appender', params['appender'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/log', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_person(self, username, **kwargs): # noqa: E501 - """delete persons # noqa: E501 - - delete the given persons. Their status must be set to \"todelete\" # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_person(username, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] username: names of the users to delete (required) - :param PersonDeleteOptions body: options object what and how to delete user contents - :return: PersonReport - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_person_with_http_info(username, **kwargs) # noqa: E501 - else: - (data) = self.delete_person_with_http_info(username, **kwargs) # noqa: E501 - return data - - def delete_person_with_http_info(self, username, **kwargs): # noqa: E501 - """delete persons # noqa: E501 - - delete the given persons. Their status must be set to \"todelete\" # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_person_with_http_info(username, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] username: names of the users to delete (required) - :param PersonDeleteOptions body: options object what and how to delete user contents - :return: PersonReport - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['username', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_person" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'username' is set - if ('username' not in params or - params['username'] is None): - raise ValueError("Missing the required parameter `username` when calling `delete_person`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'username' in params: - query_params.append(('username', params['username'])) # noqa: E501 - collection_formats['username'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/deletePersons', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='PersonReport', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def export_by_lucene(self, **kwargs): # noqa: E501 - """Search for custom lucene query and choose specific properties to load # noqa: E501 - - e.g. @cm\\:name:\"*\" # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.export_by_lucene(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str query: query - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] properties: properties to fetch, use parent:: to include parent property values - :return: list[object] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.export_by_lucene_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.export_by_lucene_with_http_info(**kwargs) # noqa: E501 - return data - - def export_by_lucene_with_http_info(self, **kwargs): # noqa: E501 - """Search for custom lucene query and choose specific properties to load # noqa: E501 - - e.g. @cm\\:name:\"*\" # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.export_by_lucene_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str query: query - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] properties: properties to fetch, use parent:: to include parent property values - :return: list[object] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['query', 'sort_properties', 'sort_ascending', 'properties'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method export_by_lucene" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'query' in params: - query_params.append(('query', params['query'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'properties' in params: - query_params.append(('properties', params['properties'])) # noqa: E501 - collection_formats['properties'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/lucene/export', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[object]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def export_lom(self, filter_query, target_dir, sub_object_handler, **kwargs): # noqa: E501 - """Export Nodes with LOM Metadata Format # noqa: E501 - - Export Nodes with LOM Metadata Format. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.export_lom(filter_query, target_dir, sub_object_handler, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str filter_query: filterQuery (required) - :param str target_dir: targetDir (required) - :param bool sub_object_handler: subObjectHandler (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.export_lom_with_http_info(filter_query, target_dir, sub_object_handler, **kwargs) # noqa: E501 - else: - (data) = self.export_lom_with_http_info(filter_query, target_dir, sub_object_handler, **kwargs) # noqa: E501 - return data - - def export_lom_with_http_info(self, filter_query, target_dir, sub_object_handler, **kwargs): # noqa: E501 - """Export Nodes with LOM Metadata Format # noqa: E501 - - Export Nodes with LOM Metadata Format. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.export_lom_with_http_info(filter_query, target_dir, sub_object_handler, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str filter_query: filterQuery (required) - :param str target_dir: targetDir (required) - :param bool sub_object_handler: subObjectHandler (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['filter_query', 'target_dir', 'sub_object_handler'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method export_lom" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'filter_query' is set - if ('filter_query' not in params or - params['filter_query'] is None): - raise ValueError("Missing the required parameter `filter_query` when calling `export_lom`") # noqa: E501 - # verify the required parameter 'target_dir' is set - if ('target_dir' not in params or - params['target_dir'] is None): - raise ValueError("Missing the required parameter `target_dir` when calling `export_lom`") # noqa: E501 - # verify the required parameter 'sub_object_handler' is set - if ('sub_object_handler' not in params or - params['sub_object_handler'] is None): - raise ValueError("Missing the required parameter `sub_object_handler` when calling `export_lom`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'filter_query' in params: - query_params.append(('filterQuery', params['filter_query'])) # noqa: E501 - if 'target_dir' in params: - query_params.append(('targetDir', params['target_dir'])) # noqa: E501 - if 'sub_object_handler' in params: - query_params.append(('subObjectHandler', params['sub_object_handler'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/export/lom', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_all_toolpermissions(self, authority, **kwargs): # noqa: E501 - """get all toolpermissions for an authority # noqa: E501 - - Returns explicit (rights set for this authority) + effective (resulting rights for this authority) toolpermission # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_all_toolpermissions(authority, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str authority: Authority to load (user or group) (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_all_toolpermissions_with_http_info(authority, **kwargs) # noqa: E501 - else: - (data) = self.get_all_toolpermissions_with_http_info(authority, **kwargs) # noqa: E501 - return data - - def get_all_toolpermissions_with_http_info(self, authority, **kwargs): # noqa: E501 - """get all toolpermissions for an authority # noqa: E501 - - Returns explicit (rights set for this authority) + effective (resulting rights for this authority) toolpermission # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_all_toolpermissions_with_http_info(authority, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str authority: Authority to load (user or group) (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['authority'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_all_toolpermissions" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'authority' is set - if ('authority' not in params or - params['authority'] is None): - raise ValueError("Missing the required parameter `authority` when calling `get_all_toolpermissions`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'authority' in params: - path_params['authority'] = params['authority'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/toolpermissions/{authority}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_application_xml(self, xml, **kwargs): # noqa: E501 - """list any xml properties (like from homeApplication.properties.xml) # noqa: E501 - - list any xml properties (like from homeApplication.properties.xml) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_application_xml(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: Properties Filename (*.xml) (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_application_xml_with_http_info(xml, **kwargs) # noqa: E501 - else: - (data) = self.get_application_xml_with_http_info(xml, **kwargs) # noqa: E501 - return data - - def get_application_xml_with_http_info(self, xml, **kwargs): # noqa: E501 - """list any xml properties (like from homeApplication.properties.xml) # noqa: E501 - - list any xml properties (like from homeApplication.properties.xml) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_application_xml_with_http_info(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: Properties Filename (*.xml) (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['xml'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_application_xml" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'xml' is set - if ('xml' not in params or - params['xml'] is None): - raise ValueError("Missing the required parameter `xml` when calling `get_application_xml`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'xml' in params: - path_params['xml'] = params['xml'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applications/{xml}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_applications(self, **kwargs): # noqa: E501 - """list applications # noqa: E501 - - List all registered applications. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_applications(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[Application] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_applications_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_applications_with_http_info(**kwargs) # noqa: E501 - return data - - def get_applications_with_http_info(self, **kwargs): # noqa: E501 - """list applications # noqa: E501 - - List all registered applications. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_applications_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[Application] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_applications" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applications', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Application]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_cache_info(self, id, **kwargs): # noqa: E501 - """Get information about a cache # noqa: E501 - - Get information about a cache. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_cache_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Id/bean name of the cache (required) - :return: CacheInfo - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_cache_info_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.get_cache_info_with_http_info(id, **kwargs) # noqa: E501 - return data - - def get_cache_info_with_http_info(self, id, **kwargs): # noqa: E501 - """Get information about a cache # noqa: E501 - - Get information about a cache. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_cache_info_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Id/bean name of the cache (required) - :return: CacheInfo - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_cache_info" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_cache_info`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/cache/cacheInfo/{id}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CacheInfo', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_catalina_out(self, **kwargs): # noqa: E501 - """Get last info from catalina out # noqa: E501 - - Get catalina.out log. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_catalina_out(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[str] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_catalina_out_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_catalina_out_with_http_info(**kwargs) # noqa: E501 - return data - - def get_catalina_out_with_http_info(self, **kwargs): # noqa: E501 - """Get last info from catalina out # noqa: E501 - - Get catalina.out log. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_catalina_out_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[str] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_catalina_out" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/catalina', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[str]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_cluster(self, **kwargs): # noqa: E501 - """Get information about the Cluster # noqa: E501 - - Get information the Cluster # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_cluster(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: CacheCluster - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_cluster_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_cluster_with_http_info(**kwargs) # noqa: E501 - return data - - def get_cluster_with_http_info(self, **kwargs): # noqa: E501 - """Get information about the Cluster # noqa: E501 - - Get information the Cluster # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_cluster_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: CacheCluster - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_cluster" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/clusterInfo', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CacheCluster', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_clusters(self, **kwargs): # noqa: E501 - """Get information about the Cluster # noqa: E501 - - Get information the Cluster # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_clusters(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: CacheCluster - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_clusters_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_clusters_with_http_info(**kwargs) # noqa: E501 - return data - - def get_clusters_with_http_info(self, **kwargs): # noqa: E501 - """Get information about the Cluster # noqa: E501 - - Get information the Cluster # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_clusters_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: CacheCluster - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_clusters" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/clusterInfos', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CacheCluster', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_config(self, **kwargs): # noqa: E501 - """set/update the repository config object # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_config(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param RepositoryConfig body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_config_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_config_with_http_info(**kwargs) # noqa: E501 - return data - - def get_config_with_http_info(self, **kwargs): # noqa: E501 - """set/update the repository config object # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_config_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param RepositoryConfig body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_config" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/repositoryConfig', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_config_file(self, filename, **kwargs): # noqa: E501 - """get a base system config file (e.g. edu-sharing.conf) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_config_file(filename, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str filename: filename to fetch (required) - :return: str - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_config_file_with_http_info(filename, **kwargs) # noqa: E501 - else: - (data) = self.get_config_file_with_http_info(filename, **kwargs) # noqa: E501 - return data - - def get_config_file_with_http_info(self, filename, **kwargs): # noqa: E501 - """get a base system config file (e.g. edu-sharing.conf) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_config_file_with_http_info(filename, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str filename: filename to fetch (required) - :return: str - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['filename'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_config_file" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'filename' is set - if ('filename' not in params or - params['filename'] is None): - raise ValueError("Missing the required parameter `filename` when calling `get_config_file`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'filename' in params: - query_params.append(('filename', params['filename'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/configFile', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='str', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_global_groups(self, **kwargs): # noqa: E501 - """Get global groups # noqa: E501 - - Get global groups (groups across repositories). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_global_groups(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_global_groups_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_global_groups_with_http_info(**kwargs) # noqa: E501 - return data - - def get_global_groups_with_http_info(self, **kwargs): # noqa: E501 - """Get global groups # noqa: E501 - - Get global groups (groups across repositories). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_global_groups_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_global_groups" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/globalGroups', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Group]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_jobs(self, **kwargs): # noqa: E501 - """get all running jobs # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_jobs(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[JobInfo] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_jobs_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_jobs_with_http_info(**kwargs) # noqa: E501 - return data - - def get_jobs_with_http_info(self, **kwargs): # noqa: E501 - """get all running jobs # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_jobs_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[JobInfo] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_jobs" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/jobs', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[JobInfo]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_oai_classes(self, **kwargs): # noqa: E501 - """Get OAI class names # noqa: E501 - - Get available importer classes for OAI import. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_oai_classes(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[str] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_oai_classes_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_oai_classes_with_http_info(**kwargs) # noqa: E501 - return data - - def get_oai_classes_with_http_info(self, **kwargs): # noqa: E501 - """Get OAI class names # noqa: E501 - - Get available importer classes for OAI import. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_oai_classes_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[str] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_oai_classes" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/oai/classes', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[str]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_property_to_mds(self, properties, **kwargs): # noqa: E501 - """Get a Mds Valuespace for all values of the given properties # noqa: E501 - - Get a Mds Valuespace for all values of the given properties. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_property_to_mds(properties, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] properties: one or more properties (required) - :return: str - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_property_to_mds_with_http_info(properties, **kwargs) # noqa: E501 - else: - (data) = self.get_property_to_mds_with_http_info(properties, **kwargs) # noqa: E501 - return data - - def get_property_to_mds_with_http_info(self, properties, **kwargs): # noqa: E501 - """Get a Mds Valuespace for all values of the given properties # noqa: E501 - - Get a Mds Valuespace for all values of the given properties. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_property_to_mds_with_http_info(properties, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] properties: one or more properties (required) - :return: str - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['properties'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_property_to_mds" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'properties' is set - if ('properties' not in params or - params['properties'] is None): - raise ValueError("Missing the required parameter `properties` when calling `get_property_to_mds`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'properties' in params: - query_params.append(('properties', params['properties'])) # noqa: E501 - collection_formats['properties'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/propertyToMds', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='str', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_statistics(self, **kwargs): # noqa: E501 - """get statistics # noqa: E501 - - get statistics. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_statistics(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: AdminStatistics - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_statistics_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_statistics_with_http_info(**kwargs) # noqa: E501 - return data - - def get_statistics_with_http_info(self, **kwargs): # noqa: E501 - """get statistics # noqa: E501 - - get statistics. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_statistics_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: AdminStatistics - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_statistics" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/statistics', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='AdminStatistics', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_collections(self, xml, **kwargs): # noqa: E501 - """import collections via a xml file # noqa: E501 - - xml file must be structured as defined by the xsd standard # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_collections(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: (required) - :param str parent: Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level - :return: CollectionsResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_collections_with_http_info(xml, **kwargs) # noqa: E501 - else: - (data) = self.import_collections_with_http_info(xml, **kwargs) # noqa: E501 - return data - - def import_collections_with_http_info(self, xml, **kwargs): # noqa: E501 - """import collections via a xml file # noqa: E501 - - xml file must be structured as defined by the xsd standard # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_collections_with_http_info(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: (required) - :param str parent: Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level - :return: CollectionsResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['xml', 'parent'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_collections" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'xml' is set - if ('xml' not in params or - params['xml'] is None): - raise ValueError("Missing the required parameter `xml` when calling `import_collections`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'parent' in params: - query_params.append(('parent', params['parent'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'xml' in params: - local_var_files['xml'] = params['xml'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/collections', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CollectionsResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_excel(self, excel, parent, **kwargs): # noqa: E501 - """Import excel data # noqa: E501 - - Import excel data. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_excel(excel, parent, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str excel: (required) - :param str parent: parent (required) - :return: ExcelResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_excel_with_http_info(excel, parent, **kwargs) # noqa: E501 - else: - (data) = self.import_excel_with_http_info(excel, parent, **kwargs) # noqa: E501 - return data - - def import_excel_with_http_info(self, excel, parent, **kwargs): # noqa: E501 - """Import excel data # noqa: E501 - - Import excel data. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_excel_with_http_info(excel, parent, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str excel: (required) - :param str parent: parent (required) - :return: ExcelResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['excel', 'parent'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_excel" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'excel' is set - if ('excel' not in params or - params['excel'] is None): - raise ValueError("Missing the required parameter `excel` when calling `import_excel`") # noqa: E501 - # verify the required parameter 'parent' is set - if ('parent' not in params or - params['parent'] is None): - raise ValueError("Missing the required parameter `parent` when calling `import_excel`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'parent' in params: - query_params.append(('parent', params['parent'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'excel' in params: - local_var_files['excel'] = params['excel'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/excel', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='ExcelResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_oai(self, base_url, set, metadata_prefix, class_name, **kwargs): # noqa: E501 - """Import oai data # noqa: E501 - - Import oai data. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_oai(base_url, set, metadata_prefix, class_name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str base_url: base url (required) - :param str set: set/catalog id (required) - :param str metadata_prefix: metadata prefix (required) - :param str class_name: importer job class name (call /classes to obtain a list) (required) - :param str metadataset: id metadataset - :param str importer_class_name: importer class name (call /classes to obtain a list) - :param str record_handler_class_name: RecordHandler class name - :param str binary_handler_class_name: BinaryHandler class name (may be empty for none) - :param str file_url: url to file - :param str oai_ids: OAI Ids to import, can be null than the whole set will be imported - :param bool force_update: force Update of all entries - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_oai_with_http_info(base_url, set, metadata_prefix, class_name, **kwargs) # noqa: E501 - else: - (data) = self.import_oai_with_http_info(base_url, set, metadata_prefix, class_name, **kwargs) # noqa: E501 - return data - - def import_oai_with_http_info(self, base_url, set, metadata_prefix, class_name, **kwargs): # noqa: E501 - """Import oai data # noqa: E501 - - Import oai data. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_oai_with_http_info(base_url, set, metadata_prefix, class_name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str base_url: base url (required) - :param str set: set/catalog id (required) - :param str metadata_prefix: metadata prefix (required) - :param str class_name: importer job class name (call /classes to obtain a list) (required) - :param str metadataset: id metadataset - :param str importer_class_name: importer class name (call /classes to obtain a list) - :param str record_handler_class_name: RecordHandler class name - :param str binary_handler_class_name: BinaryHandler class name (may be empty for none) - :param str file_url: url to file - :param str oai_ids: OAI Ids to import, can be null than the whole set will be imported - :param bool force_update: force Update of all entries - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['base_url', 'set', 'metadata_prefix', 'class_name', 'metadataset', 'importer_class_name', 'record_handler_class_name', 'binary_handler_class_name', 'file_url', 'oai_ids', 'force_update'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_oai" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'base_url' is set - if ('base_url' not in params or - params['base_url'] is None): - raise ValueError("Missing the required parameter `base_url` when calling `import_oai`") # noqa: E501 - # verify the required parameter 'set' is set - if ('set' not in params or - params['set'] is None): - raise ValueError("Missing the required parameter `set` when calling `import_oai`") # noqa: E501 - # verify the required parameter 'metadata_prefix' is set - if ('metadata_prefix' not in params or - params['metadata_prefix'] is None): - raise ValueError("Missing the required parameter `metadata_prefix` when calling `import_oai`") # noqa: E501 - # verify the required parameter 'class_name' is set - if ('class_name' not in params or - params['class_name'] is None): - raise ValueError("Missing the required parameter `class_name` when calling `import_oai`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'base_url' in params: - query_params.append(('baseUrl', params['base_url'])) # noqa: E501 - if 'set' in params: - query_params.append(('set', params['set'])) # noqa: E501 - if 'metadata_prefix' in params: - query_params.append(('metadataPrefix', params['metadata_prefix'])) # noqa: E501 - if 'metadataset' in params: - query_params.append(('metadataset', params['metadataset'])) # noqa: E501 - if 'class_name' in params: - query_params.append(('className', params['class_name'])) # noqa: E501 - if 'importer_class_name' in params: - query_params.append(('importerClassName', params['importer_class_name'])) # noqa: E501 - if 'record_handler_class_name' in params: - query_params.append(('recordHandlerClassName', params['record_handler_class_name'])) # noqa: E501 - if 'binary_handler_class_name' in params: - query_params.append(('binaryHandlerClassName', params['binary_handler_class_name'])) # noqa: E501 - if 'file_url' in params: - query_params.append(('fileUrl', params['file_url'])) # noqa: E501 - if 'oai_ids' in params: - query_params.append(('oaiIds', params['oai_ids'])) # noqa: E501 - if 'force_update' in params: - query_params.append(('forceUpdate', params['force_update'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/oai', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_oai_xml(self, **kwargs): # noqa: E501 - """Import single xml via oai (for testing) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_oai_xml(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: - :param str record_handler_class_name: RecordHandler class name - :param str binary_handler_class_name: BinaryHandler class name (may be empty for none) - :return: Node - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_oai_xml_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.import_oai_xml_with_http_info(**kwargs) # noqa: E501 - return data - - def import_oai_xml_with_http_info(self, **kwargs): # noqa: E501 - """Import single xml via oai (for testing) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_oai_xml_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: - :param str record_handler_class_name: RecordHandler class name - :param str binary_handler_class_name: BinaryHandler class name (may be empty for none) - :return: Node - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['xml', 'record_handler_class_name', 'binary_handler_class_name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_oai_xml" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'record_handler_class_name' in params: - query_params.append(('recordHandlerClassName', params['record_handler_class_name'])) # noqa: E501 - if 'binary_handler_class_name' in params: - query_params.append(('binaryHandlerClassName', params['binary_handler_class_name'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'xml' in params: - local_var_files['xml'] = params['xml'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/oai/xml', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Node', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def refresh_app_info(self, **kwargs): # noqa: E501 - """refresh app info # noqa: E501 - - Refresh the application info. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.refresh_app_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.refresh_app_info_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.refresh_app_info_with_http_info(**kwargs) # noqa: E501 - return data - - def refresh_app_info_with_http_info(self, **kwargs): # noqa: E501 - """refresh app info # noqa: E501 - - Refresh the application info. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.refresh_app_info_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method refresh_app_info" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/refreshAppInfo', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def refresh_cache(self, folder, sticky, **kwargs): # noqa: E501 - """Refresh cache # noqa: E501 - - Refresh importer cache. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.refresh_cache(folder, sticky, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str folder: refresh cache root folder id (required) - :param bool sticky: sticky (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.refresh_cache_with_http_info(folder, sticky, **kwargs) # noqa: E501 - else: - (data) = self.refresh_cache_with_http_info(folder, sticky, **kwargs) # noqa: E501 - return data - - def refresh_cache_with_http_info(self, folder, sticky, **kwargs): # noqa: E501 - """Refresh cache # noqa: E501 - - Refresh importer cache. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.refresh_cache_with_http_info(folder, sticky, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str folder: refresh cache root folder id (required) - :param bool sticky: sticky (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['folder', 'sticky'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method refresh_cache" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'folder' is set - if ('folder' not in params or - params['folder'] is None): - raise ValueError("Missing the required parameter `folder` when calling `refresh_cache`") # noqa: E501 - # verify the required parameter 'sticky' is set - if ('sticky' not in params or - params['sticky'] is None): - raise ValueError("Missing the required parameter `sticky` when calling `refresh_cache`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'folder' in params: - path_params['folder'] = params['folder'] # noqa: E501 - - query_params = [] - if 'sticky' in params: - query_params.append(('sticky', params['sticky'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/refreshCache/{folder}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def refresh_edu_group_cache(self, **kwargs): # noqa: E501 - """Refresh the Edu Group Cache # noqa: E501 - - Refresh the Edu Group Cache. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.refresh_edu_group_cache(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param bool keep_existing: keep existing - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.refresh_edu_group_cache_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.refresh_edu_group_cache_with_http_info(**kwargs) # noqa: E501 - return data - - def refresh_edu_group_cache_with_http_info(self, **kwargs): # noqa: E501 - """Refresh the Edu Group Cache # noqa: E501 - - Refresh the Edu Group Cache. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.refresh_edu_group_cache_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param bool keep_existing: keep existing - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['keep_existing'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method refresh_edu_group_cache" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'keep_existing' in params: - query_params.append(('keepExisting', params['keep_existing'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/cache/refreshEduGroupCache', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_application(self, id, **kwargs): # noqa: E501 - """remove an application # noqa: E501 - - remove the specified application. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_application(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Application id (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_application_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.remove_application_with_http_info(id, **kwargs) # noqa: E501 - return data - - def remove_application_with_http_info(self, id, **kwargs): # noqa: E501 - """remove an application # noqa: E501 - - remove the specified application. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_application_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Application id (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_application" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `remove_application`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applications/{id}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_cache_entry(self, **kwargs): # noqa: E501 - """remove cache entry # noqa: E501 - - remove cache entry # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_cache_entry(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param int cache_index: cacheIndex - :param str bean: bean - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_cache_entry_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.remove_cache_entry_with_http_info(**kwargs) # noqa: E501 - return data - - def remove_cache_entry_with_http_info(self, **kwargs): # noqa: E501 - """remove cache entry # noqa: E501 - - remove cache entry # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_cache_entry_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param int cache_index: cacheIndex - :param str bean: bean - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['cache_index', 'bean'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_cache_entry" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'cache_index' in params: - query_params.append(('cacheIndex', params['cache_index'])) # noqa: E501 - if 'bean' in params: - query_params.append(('bean', params['bean'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/cache/removeCacheEntry', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_oai_imports(self, base_url, set, metadata_prefix, **kwargs): # noqa: E501 - """Remove deleted imports # noqa: E501 - - Remove deleted imports. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_oai_imports(base_url, set, metadata_prefix, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str base_url: base url (required) - :param str set: set/catalog id (required) - :param str metadata_prefix: metadata prefix (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_oai_imports_with_http_info(base_url, set, metadata_prefix, **kwargs) # noqa: E501 - else: - (data) = self.remove_oai_imports_with_http_info(base_url, set, metadata_prefix, **kwargs) # noqa: E501 - return data - - def remove_oai_imports_with_http_info(self, base_url, set, metadata_prefix, **kwargs): # noqa: E501 - """Remove deleted imports # noqa: E501 - - Remove deleted imports. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_oai_imports_with_http_info(base_url, set, metadata_prefix, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str base_url: base url (required) - :param str set: set/catalog id (required) - :param str metadata_prefix: metadata prefix (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['base_url', 'set', 'metadata_prefix'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_oai_imports" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'base_url' is set - if ('base_url' not in params or - params['base_url'] is None): - raise ValueError("Missing the required parameter `base_url` when calling `remove_oai_imports`") # noqa: E501 - # verify the required parameter 'set' is set - if ('set' not in params or - params['set'] is None): - raise ValueError("Missing the required parameter `set` when calling `remove_oai_imports`") # noqa: E501 - # verify the required parameter 'metadata_prefix' is set - if ('metadata_prefix' not in params or - params['metadata_prefix'] is None): - raise ValueError("Missing the required parameter `metadata_prefix` when calling `remove_oai_imports`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'base_url' in params: - query_params.append(('baseUrl', params['base_url'])) # noqa: E501 - if 'set' in params: - query_params.append(('set', params['set'])) # noqa: E501 - if 'metadata_prefix' in params: - query_params.append(('metadataPrefix', params['metadata_prefix'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/import/oai', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_by_lucene(self, **kwargs): # noqa: E501 - """Search for custom lucene query # noqa: E501 - - e.g. @cm\\:name:\"*\" # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_by_lucene(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str query: query - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param list[str] authority_scope: authority scope to search for - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_by_lucene_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.search_by_lucene_with_http_info(**kwargs) # noqa: E501 - return data - - def search_by_lucene_with_http_info(self, **kwargs): # noqa: E501 - """Search for custom lucene query # noqa: E501 - - e.g. @cm\\:name:\"*\" # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_by_lucene_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str query: query - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param list[str] authority_scope: authority scope to search for - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['query', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter', 'authority_scope'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_by_lucene" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'query' in params: - query_params.append(('query', params['query'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - if 'authority_scope' in params: - query_params.append(('authorityScope', params['authority_scope'])) # noqa: E501 - collection_formats['authorityScope'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/lucene', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def server_update_list(self, **kwargs): # noqa: E501 - """list available update tasks # noqa: E501 - - list available update tasks # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.server_update_list(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[ServerUpdateInfo] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.server_update_list_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.server_update_list_with_http_info(**kwargs) # noqa: E501 - return data - - def server_update_list_with_http_info(self, **kwargs): # noqa: E501 - """list available update tasks # noqa: E501 - - list available update tasks # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.server_update_list_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[ServerUpdateInfo] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method server_update_list" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/serverUpdate/list', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[ServerUpdateInfo]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def server_update_list_0(self, id, execute, **kwargs): # noqa: E501 - """Run an update tasks # noqa: E501 - - Run a specific update task (test or full update). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.server_update_list_0(id, execute, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Id of the update task (required) - :param bool execute: Actually execute (if false, just runs in test mode) (required) - :return: list[ServerUpdateInfo] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.server_update_list_0_with_http_info(id, execute, **kwargs) # noqa: E501 - else: - (data) = self.server_update_list_0_with_http_info(id, execute, **kwargs) # noqa: E501 - return data - - def server_update_list_0_with_http_info(self, id, execute, **kwargs): # noqa: E501 - """Run an update tasks # noqa: E501 - - Run a specific update task (test or full update). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.server_update_list_0_with_http_info(id, execute, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Id of the update task (required) - :param bool execute: Actually execute (if false, just runs in test mode) (required) - :return: list[ServerUpdateInfo] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'execute'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method server_update_list_0" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `server_update_list_0`") # noqa: E501 - # verify the required parameter 'execute' is set - if ('execute' not in params or - params['execute'] is None): - raise ValueError("Missing the required parameter `execute` when calling `server_update_list_0`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - if 'execute' in params: - query_params.append(('execute', params['execute'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/serverUpdate/run/{id}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[ServerUpdateInfo]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_config(self, **kwargs): # noqa: E501 - """get the repository config object # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_config(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: RepositoryConfig - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_config_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.set_config_with_http_info(**kwargs) # noqa: E501 - return data - - def set_config_with_http_info(self, **kwargs): # noqa: E501 - """get the repository config object # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_config_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: RepositoryConfig - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_config" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/repositoryConfig', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='RepositoryConfig', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_toolpermissions(self, authority, **kwargs): # noqa: E501 - """set toolpermissions for an authority # noqa: E501 - - If a toolpermission has status UNDEFINED, it will remove explicit permissions for the authority # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_toolpermissions(authority, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str authority: Authority to set (user or group) (required) - :param dict(str, str) body: - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_toolpermissions_with_http_info(authority, **kwargs) # noqa: E501 - else: - (data) = self.set_toolpermissions_with_http_info(authority, **kwargs) # noqa: E501 - return data - - def set_toolpermissions_with_http_info(self, authority, **kwargs): # noqa: E501 - """set toolpermissions for an authority # noqa: E501 - - If a toolpermission has status UNDEFINED, it will remove explicit permissions for the authority # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_toolpermissions_with_http_info(authority, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str authority: Authority to set (user or group) (required) - :param dict(str, str) body: - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['authority', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_toolpermissions" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'authority' is set - if ('authority' not in params or - params['authority'] is None): - raise ValueError("Missing the required parameter `authority` when calling `set_toolpermissions`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'authority' in params: - path_params['authority'] = params['authority'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/toolpermissions/{authority}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def start_job(self, body, job_class, **kwargs): # noqa: E501 - """Start a Job. # noqa: E501 - - Start a Job. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.start_job(body, job_class, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, str) body: params (required) - :param str job_class: jobClass (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.start_job_with_http_info(body, job_class, **kwargs) # noqa: E501 - else: - (data) = self.start_job_with_http_info(body, job_class, **kwargs) # noqa: E501 - return data - - def start_job_with_http_info(self, body, job_class, **kwargs): # noqa: E501 - """Start a Job. # noqa: E501 - - Start a Job. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.start_job_with_http_info(body, job_class, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, str) body: params (required) - :param str job_class: jobClass (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'job_class'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method start_job" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `start_job`") # noqa: E501 - # verify the required parameter 'job_class' is set - if ('job_class' not in params or - params['job_class'] is None): - raise ValueError("Missing the required parameter `job_class` when calling `start_job`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'job_class' in params: - path_params['jobClass'] = params['job_class'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/job/{jobClass}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def test_mail(self, receiver, template, **kwargs): # noqa: E501 - """Test a mail template # noqa: E501 - - Sends the given template as a test to the given receiver. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.test_mail(receiver, template, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str receiver: (required) - :param str template: (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.test_mail_with_http_info(receiver, template, **kwargs) # noqa: E501 - else: - (data) = self.test_mail_with_http_info(receiver, template, **kwargs) # noqa: E501 - return data - - def test_mail_with_http_info(self, receiver, template, **kwargs): # noqa: E501 - """Test a mail template # noqa: E501 - - Sends the given template as a test to the given receiver. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.test_mail_with_http_info(receiver, template, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str receiver: (required) - :param str template: (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['receiver', 'template'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method test_mail" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'receiver' is set - if ('receiver' not in params or - params['receiver'] is None): - raise ValueError("Missing the required parameter `receiver` when calling `test_mail`") # noqa: E501 - # verify the required parameter 'template' is set - if ('template' not in params or - params['template'] is None): - raise ValueError("Missing the required parameter `template` when calling `test_mail`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'receiver' in params: - path_params['receiver'] = params['receiver'] # noqa: E501 - if 'template' in params: - path_params['template'] = params['template'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/mail/{receiver}/{template}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_application_xml(self, xml, **kwargs): # noqa: E501 - """edit any properties xml (like homeApplication.properties.xml) # noqa: E501 - - if the key exists, it will be overwritten. Otherwise, it will be created. You only need to transfer keys you want to edit # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_application_xml(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: Properties Filename (*.xml) (required) - :param dict(str, str) body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_application_xml_with_http_info(xml, **kwargs) # noqa: E501 - else: - (data) = self.update_application_xml_with_http_info(xml, **kwargs) # noqa: E501 - return data - - def update_application_xml_with_http_info(self, xml, **kwargs): # noqa: E501 - """edit any properties xml (like homeApplication.properties.xml) # noqa: E501 - - if the key exists, it will be overwritten. Otherwise, it will be created. You only need to transfer keys you want to edit # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_application_xml_with_http_info(xml, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str xml: Properties Filename (*.xml) (required) - :param dict(str, str) body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['xml', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_application_xml" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'xml' is set - if ('xml' not in params or - params['xml'] is None): - raise ValueError("Missing the required parameter `xml` when calling `update_application_xml`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'xml' in params: - path_params['xml'] = params['xml'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/applications/{xml}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_config_file(self, filename, **kwargs): # noqa: E501 - """update a base system config file (e.g. edu-sharing.conf) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_config_file(filename, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str filename: filename to fetch (required) - :param str body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_config_file_with_http_info(filename, **kwargs) # noqa: E501 - else: - (data) = self.update_config_file_with_http_info(filename, **kwargs) # noqa: E501 - return data - - def update_config_file_with_http_info(self, filename, **kwargs): # noqa: E501 - """update a base system config file (e.g. edu-sharing.conf) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_config_file_with_http_info(filename, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str filename: filename to fetch (required) - :param str body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['filename', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_config_file" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'filename' is set - if ('filename' not in params or - params['filename'] is None): - raise ValueError("Missing the required parameter `filename` when calling `update_config_file`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'filename' in params: - query_params.append(('filename', params['filename'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/configFile', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def upload_temp(self, file, name, **kwargs): # noqa: E501 - """Upload a file # noqa: E501 - - Upload a file to tomcat temp directory, to use it on the server (e.g. an update) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.upload_temp(file, name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str file: (required) - :param str name: filename (required) - :return: UploadResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_temp_with_http_info(file, name, **kwargs) # noqa: E501 - else: - (data) = self.upload_temp_with_http_info(file, name, **kwargs) # noqa: E501 - return data - - def upload_temp_with_http_info(self, file, name, **kwargs): # noqa: E501 - """Upload a file # noqa: E501 - - Upload a file to tomcat temp directory, to use it on the server (e.g. an update) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.upload_temp_with_http_info(file, name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str file: (required) - :param str name: filename (required) - :return: UploadResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['file', 'name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method upload_temp" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'file' is set - if ('file' not in params or - params['file'] is None): - raise ValueError("Missing the required parameter `file` when calling `upload_temp`") # noqa: E501 - # verify the required parameter 'name' is set - if ('name' not in params or - params['name'] is None): - raise ValueError("Missing the required parameter `name` when calling `upload_temp`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'name' in params: - path_params['name'] = params['name'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'file' in params: - local_var_files['file'] = params['file'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/admin/v1/upload/temp/{name}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='UploadResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/archive_v1_api.py b/edu_sharing_client/api/archive_v1_api.py deleted file mode 100644 index 92b29b7d..00000000 --- a/edu_sharing_client/api/archive_v1_api.py +++ /dev/null @@ -1,505 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class ARCHIVEV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def purge(self, repository, archived_node_ids, **kwargs): # noqa: E501 - """Searches for archive nodes. # noqa: E501 - - Searches for archive nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.purge(repository, archived_node_ids, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param list[str] archived_node_ids: archived node (required) - :return: str - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.purge_with_http_info(repository, archived_node_ids, **kwargs) # noqa: E501 - else: - (data) = self.purge_with_http_info(repository, archived_node_ids, **kwargs) # noqa: E501 - return data - - def purge_with_http_info(self, repository, archived_node_ids, **kwargs): # noqa: E501 - """Searches for archive nodes. # noqa: E501 - - Searches for archive nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.purge_with_http_info(repository, archived_node_ids, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param list[str] archived_node_ids: archived node (required) - :return: str - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'archived_node_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method purge" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `purge`") # noqa: E501 - # verify the required parameter 'archived_node_ids' is set - if ('archived_node_ids' not in params or - params['archived_node_ids'] is None): - raise ValueError("Missing the required parameter `archived_node_ids` when calling `purge`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'archived_node_ids' in params: - query_params.append(('archivedNodeIds', params['archived_node_ids'])) # noqa: E501 - collection_formats['archivedNodeIds'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/archive/v1/purge/{repository}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='str', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def restore(self, repository, archived_node_ids, **kwargs): # noqa: E501 - """restore archived nodes. # noqa: E501 - - restores archived nodes. restoreStatus can have the following values: FALLBACK_PARENT_NOT_EXISTS, FALLBACK_PARENT_NO_PERMISSION, DUPLICATENAME, FINE # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.restore(repository, archived_node_ids, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param list[str] archived_node_ids: archived nodes (required) - :param str target: to target - :return: RestoreResults - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.restore_with_http_info(repository, archived_node_ids, **kwargs) # noqa: E501 - else: - (data) = self.restore_with_http_info(repository, archived_node_ids, **kwargs) # noqa: E501 - return data - - def restore_with_http_info(self, repository, archived_node_ids, **kwargs): # noqa: E501 - """restore archived nodes. # noqa: E501 - - restores archived nodes. restoreStatus can have the following values: FALLBACK_PARENT_NOT_EXISTS, FALLBACK_PARENT_NO_PERMISSION, DUPLICATENAME, FINE # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.restore_with_http_info(repository, archived_node_ids, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param list[str] archived_node_ids: archived nodes (required) - :param str target: to target - :return: RestoreResults - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'archived_node_ids', 'target'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method restore" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `restore`") # noqa: E501 - # verify the required parameter 'archived_node_ids' is set - if ('archived_node_ids' not in params or - params['archived_node_ids'] is None): - raise ValueError("Missing the required parameter `archived_node_ids` when calling `restore`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'archived_node_ids' in params: - query_params.append(('archivedNodeIds', params['archived_node_ids'])) # noqa: E501 - collection_formats['archivedNodeIds'] = 'multi' # noqa: E501 - if 'target' in params: - query_params.append(('target', params['target'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/archive/v1/restore/{repository}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='RestoreResults', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search(self, repository, pattern, **kwargs): # noqa: E501 - """Searches for archive nodes. # noqa: E501 - - Searches for archive nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: search pattern (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_with_http_info(repository, pattern, **kwargs) # noqa: E501 - else: - (data) = self.search_with_http_info(repository, pattern, **kwargs) # noqa: E501 - return data - - def search_with_http_info(self, repository, pattern, **kwargs): # noqa: E501 - """Searches for archive nodes. # noqa: E501 - - Searches for archive nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_with_http_info(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: search pattern (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'pattern', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search`") # noqa: E501 - # verify the required parameter 'pattern' is set - if ('pattern' not in params or - params['pattern'] is None): - raise ValueError("Missing the required parameter `pattern` when calling `search`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'pattern' in params: - path_params['pattern'] = params['pattern'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/archive/v1/search/{repository}/{pattern}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_0(self, repository, pattern, person, **kwargs): # noqa: E501 - """Searches for archive nodes. # noqa: E501 - - Searches for archive nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_0(repository, pattern, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: search pattern (required) - :param str person: person (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_0_with_http_info(repository, pattern, person, **kwargs) # noqa: E501 - else: - (data) = self.search_0_with_http_info(repository, pattern, person, **kwargs) # noqa: E501 - return data - - def search_0_with_http_info(self, repository, pattern, person, **kwargs): # noqa: E501 - """Searches for archive nodes. # noqa: E501 - - Searches for archive nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_0_with_http_info(repository, pattern, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: search pattern (required) - :param str person: person (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'pattern', 'person', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_0" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_0`") # noqa: E501 - # verify the required parameter 'pattern' is set - if ('pattern' not in params or - params['pattern'] is None): - raise ValueError("Missing the required parameter `pattern` when calling `search_0`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `search_0`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'pattern' in params: - path_params['pattern'] = params['pattern'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/archive/v1/search/{repository}/{pattern}/{person}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/authentication_v1_api.py b/edu_sharing_client/api/authentication_v1_api.py deleted file mode 100644 index c1e47fe4..00000000 --- a/edu_sharing_client/api/authentication_v1_api.py +++ /dev/null @@ -1,389 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class AUTHENTICATIONV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def has_access_to_scope(self, scope, **kwargs): # noqa: E501 - """Returns true if the current user has access to the given scope # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.has_access_to_scope(scope, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str scope: scope (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.has_access_to_scope_with_http_info(scope, **kwargs) # noqa: E501 - else: - (data) = self.has_access_to_scope_with_http_info(scope, **kwargs) # noqa: E501 - return data - - def has_access_to_scope_with_http_info(self, scope, **kwargs): # noqa: E501 - """Returns true if the current user has access to the given scope # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.has_access_to_scope_with_http_info(scope, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str scope: scope (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['scope'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method has_access_to_scope" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'scope' is set - if ('scope' not in params or - params['scope'] is None): - raise ValueError("Missing the required parameter `scope` when calling `has_access_to_scope`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'scope' in params: - query_params.append(('scope', params['scope'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/authentication/v1/hasAccessToScope', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def login(self, **kwargs): # noqa: E501 - """Validates the Basic Auth Credentials and check if the session is a logged in user # noqa: E501 - - Use the Basic auth header field to transfer the credentials # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.login(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Login - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.login_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.login_with_http_info(**kwargs) # noqa: E501 - return data - - def login_with_http_info(self, **kwargs): # noqa: E501 - """Validates the Basic Auth Credentials and check if the session is a logged in user # noqa: E501 - - Use the Basic auth header field to transfer the credentials # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.login_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Login - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method login" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/authentication/v1/validateSession', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Login', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def login_to_scope(self, body, **kwargs): # noqa: E501 - """Validates the Basic Auth Credentials and check if the session is a logged in user # noqa: E501 - - Use the Basic auth header field to transfer the credentials # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.login_to_scope(body, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param LoginCredentials body: credentials, example: test,test (required) - :return: Login - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.login_to_scope_with_http_info(body, **kwargs) # noqa: E501 - else: - (data) = self.login_to_scope_with_http_info(body, **kwargs) # noqa: E501 - return data - - def login_to_scope_with_http_info(self, body, **kwargs): # noqa: E501 - """Validates the Basic Auth Credentials and check if the session is a logged in user # noqa: E501 - - Use the Basic auth header field to transfer the credentials # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.login_to_scope_with_http_info(body, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param LoginCredentials body: credentials, example: test,test (required) - :return: Login - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method login_to_scope" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `login_to_scope`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/authentication/v1/loginToScope', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Login', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def logout(self, **kwargs): # noqa: E501 - """Destroys the current session and logout the user # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.logout(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.logout_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.logout_with_http_info(**kwargs) # noqa: E501 - return data - - def logout_with_http_info(self, **kwargs): # noqa: E501 - """Destroys the current session and logout the user # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.logout_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method logout" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/authentication/v1/destroySession', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/bulk_v1_api.py b/edu_sharing_client/api/bulk_v1_api.py deleted file mode 100644 index 935259f0..00000000 --- a/edu_sharing_client/api/bulk_v1_api.py +++ /dev/null @@ -1,270 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class BULKV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def find(self, body, **kwargs): # noqa: E501 - """gets a given node # noqa: E501 - - Get a given node based on the posted, multiple criterias. Make sure that they'll provide an unique result # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.find(body, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties that must match (with "AND" concatenated) (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.find_with_http_info(body, **kwargs) # noqa: E501 - else: - (data) = self.find_with_http_info(body, **kwargs) # noqa: E501 - return data - - def find_with_http_info(self, body, **kwargs): # noqa: E501 - """gets a given node # noqa: E501 - - Get a given node based on the posted, multiple criterias. Make sure that they'll provide an unique result # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.find_with_http_info(body, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties that must match (with "AND" concatenated) (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method find" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `find`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/bulk/v1/find', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def sync(self, body, match, type, group, **kwargs): # noqa: E501 - """Create or update a given node # noqa: E501 - - Depending on the given \"match\" properties either a new node will be created or the existing one will be updated # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.sync(body, match, type, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, they'll not get filtered via mds, so be careful what you add here (required) - :param list[str] match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) - :param str type: type of node. If the node already exists, this will not change the type afterwards (required) - :param str group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) - :param list[str] group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) - :param list[str] aspects: aspects of node - :param bool reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.sync_with_http_info(body, match, type, group, **kwargs) # noqa: E501 - else: - (data) = self.sync_with_http_info(body, match, type, group, **kwargs) # noqa: E501 - return data - - def sync_with_http_info(self, body, match, type, group, **kwargs): # noqa: E501 - """Create or update a given node # noqa: E501 - - Depending on the given \"match\" properties either a new node will be created or the existing one will be updated # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.sync_with_http_info(body, match, type, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, they'll not get filtered via mds, so be careful what you add here (required) - :param list[str] match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) - :param str type: type of node. If the node already exists, this will not change the type afterwards (required) - :param str group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) - :param list[str] group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) - :param list[str] aspects: aspects of node - :param bool reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'match', 'type', 'group', 'group_by', 'aspects', 'reset_version'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method sync" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `sync`") # noqa: E501 - # verify the required parameter 'match' is set - if ('match' not in params or - params['match'] is None): - raise ValueError("Missing the required parameter `match` when calling `sync`") # noqa: E501 - # verify the required parameter 'type' is set - if ('type' not in params or - params['type'] is None): - raise ValueError("Missing the required parameter `type` when calling `sync`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `sync`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - if 'match' in params: - query_params.append(('match', params['match'])) # noqa: E501 - collection_formats['match'] = 'multi' # noqa: E501 - if 'group_by' in params: - query_params.append(('groupBy', params['group_by'])) # noqa: E501 - collection_formats['groupBy'] = 'multi' # noqa: E501 - if 'type' in params: - query_params.append(('type', params['type'])) # noqa: E501 - if 'aspects' in params: - query_params.append(('aspects', params['aspects'])) # noqa: E501 - collection_formats['aspects'] = 'multi' # noqa: E501 - if 'reset_version' in params: - query_params.append(('resetVersion', params['reset_version'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/bulk/v1/sync/{group}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/clientutils_v1_api.py b/edu_sharing_client/api/clientutils_v1_api.py deleted file mode 100644 index bc7ae225..00000000 --- a/edu_sharing_client/api/clientutils_v1_api.py +++ /dev/null @@ -1,122 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class CLIENTUTILSV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get_website_information(self, **kwargs): # noqa: E501 - """Read generic information about a webpage # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_website_information(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str url: full url with http or https - :return: WebsiteInformation - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_website_information_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_website_information_with_http_info(**kwargs) # noqa: E501 - return data - - def get_website_information_with_http_info(self, **kwargs): # noqa: E501 - """Read generic information about a webpage # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_website_information_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str url: full url with http or https - :return: WebsiteInformation - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['url'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_website_information" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'url' in params: - query_params.append(('url', params['url'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/clientUtils/v1/getWebsiteInformation', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='WebsiteInformation', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/collection_v1_api.py b/edu_sharing_client/api/collection_v1_api.py deleted file mode 100644 index 59811149..00000000 --- a/edu_sharing_client/api/collection_v1_api.py +++ /dev/null @@ -1,1722 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class COLLECTIONV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_feedback_to_collection(self, repository, collection, **kwargs): # noqa: E501 - """Post feedback to collection. # noqa: E501 - - Requires permission \"Feedback\" on the specific collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_feedback_to_collection(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param dict(str, list[str]) body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_feedback_to_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.add_feedback_to_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def add_feedback_to_collection_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Post feedback to collection. # noqa: E501 - - Requires permission \"Feedback\" on the specific collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_feedback_to_collection_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param dict(str, list[str]) body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_feedback_to_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_feedback_to_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `add_feedback_to_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/feedback', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def add_to_collection(self, repository, collection, node, source_repo, **kwargs): # noqa: E501 - """Add a node to a collection. # noqa: E501 - - Add a node to a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_to_collection(repository, collection, node, source_repo, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param str node: ID of node (required) - :param str source_repo: ID of source repository (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_to_collection_with_http_info(repository, collection, node, source_repo, **kwargs) # noqa: E501 - else: - (data) = self.add_to_collection_with_http_info(repository, collection, node, source_repo, **kwargs) # noqa: E501 - return data - - def add_to_collection_with_http_info(self, repository, collection, node, source_repo, **kwargs): # noqa: E501 - """Add a node to a collection. # noqa: E501 - - Add a node to a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_to_collection_with_http_info(repository, collection, node, source_repo, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param str node: ID of node (required) - :param str source_repo: ID of source repository (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection', 'node', 'source_repo'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_to_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_to_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `add_to_collection`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `add_to_collection`") # noqa: E501 - # verify the required parameter 'source_repo' is set - if ('source_repo' not in params or - params['source_repo'] is None): - raise ValueError("Missing the required parameter `source_repo` when calling `add_to_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'source_repo' in params: - query_params.append(('sourceRepo', params['source_repo'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/references/{node}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_icon_of_collection(self, mimetype, repository, collection, **kwargs): # noqa: E501 - """Writes Preview Image of a collection. # noqa: E501 - - Writes Preview Image of a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_icon_of_collection(mimetype, repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mimetype: MIME-Type (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param str file: - :return: CollectionEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_icon_of_collection_with_http_info(mimetype, repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.change_icon_of_collection_with_http_info(mimetype, repository, collection, **kwargs) # noqa: E501 - return data - - def change_icon_of_collection_with_http_info(self, mimetype, repository, collection, **kwargs): # noqa: E501 - """Writes Preview Image of a collection. # noqa: E501 - - Writes Preview Image of a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_icon_of_collection_with_http_info(mimetype, repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mimetype: MIME-Type (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param str file: - :return: CollectionEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mimetype', 'repository', 'collection', 'file'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_icon_of_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mimetype' is set - if ('mimetype' not in params or - params['mimetype'] is None): - raise ValueError("Missing the required parameter `mimetype` when calling `change_icon_of_collection`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_icon_of_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `change_icon_of_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - if 'mimetype' in params: - query_params.append(('mimetype', params['mimetype'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'file' in params: - local_var_files['file'] = params['file'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/icon', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CollectionEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_collection(self, body, repository, collection, **kwargs): # noqa: E501 - """Create a new collection. # noqa: E501 - - Create a new collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_collection(body, repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Node body: collection (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of parent collection (or \"-root-\" for level0 collections) (required) - :return: CollectionEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_collection_with_http_info(body, repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.create_collection_with_http_info(body, repository, collection, **kwargs) # noqa: E501 - return data - - def create_collection_with_http_info(self, body, repository, collection, **kwargs): # noqa: E501 - """Create a new collection. # noqa: E501 - - Create a new collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_collection_with_http_info(body, repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Node body: collection (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of parent collection (or \"-root-\" for level0 collections) (required) - :return: CollectionEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'collection'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_collection`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `create_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/children', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CollectionEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_collection(self, repository, collection, **kwargs): # noqa: E501 - """Delete a collection. # noqa: E501 - - Delete a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_collection(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.delete_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def delete_collection_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Delete a collection. # noqa: E501 - - Delete a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_collection_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `delete_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_from_collection(self, repository, collection, node, **kwargs): # noqa: E501 - """Delete a node from a collection. # noqa: E501 - - Delete a node from a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_from_collection(repository, collection, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_from_collection_with_http_info(repository, collection, node, **kwargs) # noqa: E501 - else: - (data) = self.delete_from_collection_with_http_info(repository, collection, node, **kwargs) # noqa: E501 - return data - - def delete_from_collection_with_http_info(self, repository, collection, node, **kwargs): # noqa: E501 - """Delete a node from a collection. # noqa: E501 - - Delete a node from a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_from_collection_with_http_info(repository, collection, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_from_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_from_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `delete_from_collection`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `delete_from_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/references/{node}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_collection(self, repository, collection, **kwargs): # noqa: E501 - """Get a collection. # noqa: E501 - - Get a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_collection(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: CollectionEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.get_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def get_collection_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Get a collection. # noqa: E501 - - Get a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_collection_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: CollectionEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `get_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CollectionEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_collections_references(self, repository, collection, **kwargs): # noqa: E501 - """Get references objects for collection. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_collections_references(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of parent collection (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: ReferenceEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_collections_references_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.get_collections_references_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def get_collections_references_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Get references objects for collection. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_collections_references_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of parent collection (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: ReferenceEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_collections_references" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_collections_references`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `get_collections_references`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/children/references', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='ReferenceEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_collections_subcollections(self, repository, collection, scope, **kwargs): # noqa: E501 - """Get child collections for collection (or root). # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_collections_subcollections(repository, collection, scope, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of parent collection (or \"-root-\" for level0 collections) (required) - :param str scope: scope (only relevant if parent == -root-) (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: ReferenceEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_collections_subcollections_with_http_info(repository, collection, scope, **kwargs) # noqa: E501 - else: - (data) = self.get_collections_subcollections_with_http_info(repository, collection, scope, **kwargs) # noqa: E501 - return data - - def get_collections_subcollections_with_http_info(self, repository, collection, scope, **kwargs): # noqa: E501 - """Get child collections for collection (or root). # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_collections_subcollections_with_http_info(repository, collection, scope, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of parent collection (or \"-root-\" for level0 collections) (required) - :param str scope: scope (only relevant if parent == -root-) (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: ReferenceEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection', 'scope', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_collections_subcollections" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_collections_subcollections`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `get_collections_subcollections`") # noqa: E501 - # verify the required parameter 'scope' is set - if ('scope' not in params or - params['scope'] is None): - raise ValueError("Missing the required parameter `scope` when calling `get_collections_subcollections`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - if 'scope' in params: - query_params.append(('scope', params['scope'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/children/collections', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='ReferenceEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_feedback_of_collection(self, repository, collection, **kwargs): # noqa: E501 - """Get feedback of collection. # noqa: E501 - - Requires permission \"???\" on the specific permission # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_feedback_of_collection(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: list[CollectionFeedback] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_feedback_of_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.get_feedback_of_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def get_feedback_of_collection_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Get feedback of collection. # noqa: E501 - - Requires permission \"???\" on the specific permission # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_feedback_of_collection_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: list[CollectionFeedback] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_feedback_of_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_feedback_of_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `get_feedback_of_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/feedback', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[CollectionFeedback]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_icon_of_collection(self, repository, collection, **kwargs): # noqa: E501 - """Deletes Preview Image of a collection. # noqa: E501 - - Deletes Preview Image of a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_icon_of_collection(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_icon_of_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.remove_icon_of_collection_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def remove_icon_of_collection_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Deletes Preview Image of a collection. # noqa: E501 - - Deletes Preview Image of a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_icon_of_collection_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_icon_of_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `remove_icon_of_collection`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `remove_icon_of_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/icon', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search(self, repository, query, **kwargs): # noqa: E501 - """Search collections. # noqa: E501 - - Search collections. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search(repository, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str query: query string (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: CollectionEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_with_http_info(repository, query, **kwargs) # noqa: E501 - else: - (data) = self.search_with_http_info(repository, query, **kwargs) # noqa: E501 - return data - - def search_with_http_info(self, repository, query, **kwargs): # noqa: E501 - """Search collections. # noqa: E501 - - Search collections. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_with_http_info(repository, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str query: query string (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: CollectionEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'query', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search`") # noqa: E501 - # verify the required parameter 'query' is set - if ('query' not in params or - params['query'] is None): - raise ValueError("Missing the required parameter `query` when calling `search`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'query' in params: - query_params.append(('query', params['query'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/search', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='CollectionEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_collection_order(self, repository, collection, **kwargs): # noqa: E501 - """Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection # noqa: E501 - - Current order will be overriden. Requires full permissions for the parent collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_collection_order(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param list[str] body: List of nodes in the order to be saved. If empty, custom order of the collection will be disabled - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_collection_order_with_http_info(repository, collection, **kwargs) # noqa: E501 - else: - (data) = self.set_collection_order_with_http_info(repository, collection, **kwargs) # noqa: E501 - return data - - def set_collection_order_with_http_info(self, repository, collection, **kwargs): # noqa: E501 - """Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection # noqa: E501 - - Current order will be overriden. Requires full permissions for the parent collection # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_collection_order_with_http_info(repository, collection, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str collection: ID of collection (required) - :param list[str] body: List of nodes in the order to be saved. If empty, custom order of the collection will be disabled - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'collection', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_collection_order" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `set_collection_order`") # noqa: E501 - # verify the required parameter 'collection' is set - if ('collection' not in params or - params['collection'] is None): - raise ValueError("Missing the required parameter `collection` when calling `set_collection_order`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'collection' in params: - path_params['collection'] = params['collection'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}/order', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_pinned_collections(self, body, repository, **kwargs): # noqa: E501 - """Set pinned collections. # noqa: E501 - - Remove all currently pinned collections and set them in the order send. Requires TOOLPERMISSION_COLLECTION_PINNING # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_pinned_collections(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] body: List of collections that should be pinned (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_pinned_collections_with_http_info(body, repository, **kwargs) # noqa: E501 - else: - (data) = self.set_pinned_collections_with_http_info(body, repository, **kwargs) # noqa: E501 - return data - - def set_pinned_collections_with_http_info(self, body, repository, **kwargs): # noqa: E501 - """Set pinned collections. # noqa: E501 - - Remove all currently pinned collections and set them in the order send. Requires TOOLPERMISSION_COLLECTION_PINNING # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_pinned_collections_with_http_info(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] body: List of collections that should be pinned (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_pinned_collections" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `set_pinned_collections`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `set_pinned_collections`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/pinning', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_collection(self, body, repository, **kwargs): # noqa: E501 - """Update a collection. # noqa: E501 - - Update a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_collection(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Node body: collection (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_collection_with_http_info(body, repository, **kwargs) # noqa: E501 - else: - (data) = self.update_collection_with_http_info(body, repository, **kwargs) # noqa: E501 - return data - - def update_collection_with_http_info(self, body, repository, **kwargs): # noqa: E501 - """Update a collection. # noqa: E501 - - Update a collection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_collection_with_http_info(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Node body: collection (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_collection" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_collection`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `update_collection`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/collection/v1/collections/{repository}/{collection}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/comment_v1_api.py b/edu_sharing_client/api/comment_v1_api.py deleted file mode 100644 index f884e8a1..00000000 --- a/edu_sharing_client/api/comment_v1_api.py +++ /dev/null @@ -1,473 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class COMMENTV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_comment(self, body, repository, node, **kwargs): # noqa: E501 - """create a new comment # noqa: E501 - - Adds a comment to the given node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_comment(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Text content of comment (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str comment_reference: In reply to an other comment, can be null - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_comment_with_http_info(body, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.add_comment_with_http_info(body, repository, node, **kwargs) # noqa: E501 - return data - - def add_comment_with_http_info(self, body, repository, node, **kwargs): # noqa: E501 - """create a new comment # noqa: E501 - - Adds a comment to the given node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_comment_with_http_info(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Text content of comment (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str comment_reference: In reply to an other comment, can be null - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'node', 'comment_reference'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_comment" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `add_comment`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_comment`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `add_comment`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'comment_reference' in params: - query_params.append(('commentReference', params['comment_reference'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/comment/v1/comments/{repository}/{node}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_comment(self, repository, comment, **kwargs): # noqa: E501 - """delete a comment # noqa: E501 - - Delete the comment with the given id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_comment(repository, comment, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str comment: id of the comment to delete (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_comment_with_http_info(repository, comment, **kwargs) # noqa: E501 - else: - (data) = self.delete_comment_with_http_info(repository, comment, **kwargs) # noqa: E501 - return data - - def delete_comment_with_http_info(self, repository, comment, **kwargs): # noqa: E501 - """delete a comment # noqa: E501 - - Delete the comment with the given id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_comment_with_http_info(repository, comment, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str comment: id of the comment to delete (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_comment" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_comment`") # noqa: E501 - # verify the required parameter 'comment' is set - if ('comment' not in params or - params['comment'] is None): - raise ValueError("Missing the required parameter `comment` when calling `delete_comment`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'comment' in params: - path_params['comment'] = params['comment'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/comment/v1/comments/{repository}/{comment}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def edit_comment(self, body, repository, comment, **kwargs): # noqa: E501 - """edit a comment # noqa: E501 - - Edit the comment with the given id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.edit_comment(body, repository, comment, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Text content of comment (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str comment: id of the comment to edit (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.edit_comment_with_http_info(body, repository, comment, **kwargs) # noqa: E501 - else: - (data) = self.edit_comment_with_http_info(body, repository, comment, **kwargs) # noqa: E501 - return data - - def edit_comment_with_http_info(self, body, repository, comment, **kwargs): # noqa: E501 - """edit a comment # noqa: E501 - - Edit the comment with the given id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.edit_comment_with_http_info(body, repository, comment, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Text content of comment (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str comment: id of the comment to edit (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method edit_comment" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `edit_comment`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `edit_comment`") # noqa: E501 - # verify the required parameter 'comment' is set - if ('comment' not in params or - params['comment'] is None): - raise ValueError("Missing the required parameter `comment` when calling `edit_comment`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'comment' in params: - path_params['comment'] = params['comment'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/comment/v1/comments/{repository}/{comment}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_comments(self, repository, node, **kwargs): # noqa: E501 - """list comments # noqa: E501 - - List all comments # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_comments(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: Comments - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_comments_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_comments_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_comments_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """list comments # noqa: E501 - - List all comments # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_comments_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: Comments - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_comments" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_comments`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_comments`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/comment/v1/comments/{repository}/{node}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Comments', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/config_v1_api.py b/edu_sharing_client/api/config_v1_api.py deleted file mode 100644 index 348a0cdf..00000000 --- a/edu_sharing_client/api/config_v1_api.py +++ /dev/null @@ -1,587 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class CONFIGV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get_config(self, **kwargs): # noqa: E501 - """get repository config values # noqa: E501 - - Current is the actual (context-based) active config. Global is the default global config if no context is active (may be identical to the current) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_config(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Config - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_config_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_config_with_http_info(**kwargs) # noqa: E501 - return data - - def get_config_with_http_info(self, **kwargs): # noqa: E501 - """get repository config values # noqa: E501 - - Current is the actual (context-based) active config. Global is the default global config if no context is active (may be identical to the current) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_config_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Config - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_config" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/config/v1/values', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Config', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_dynamic_value(self, key, **kwargs): # noqa: E501 - """Get a config entry (appropriate rights for the entry are required) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_dynamic_value(key, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str key: Key of the config value that should be fetched (required) - :return: DynamicConfig - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_dynamic_value_with_http_info(key, **kwargs) # noqa: E501 - else: - (data) = self.get_dynamic_value_with_http_info(key, **kwargs) # noqa: E501 - return data - - def get_dynamic_value_with_http_info(self, key, **kwargs): # noqa: E501 - """Get a config entry (appropriate rights for the entry are required) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_dynamic_value_with_http_info(key, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str key: Key of the config value that should be fetched (required) - :return: DynamicConfig - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['key'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_dynamic_value" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'key' is set - if ('key' not in params or - params['key'] is None): - raise ValueError("Missing the required parameter `key` when calling `get_dynamic_value`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'key' in params: - path_params['key'] = params['key'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/config/v1/dynamic/{key}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='DynamicConfig', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_language(self, **kwargs): # noqa: E501 - """get override strings for the current language # noqa: E501 - - Language strings # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_language(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Language - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_language_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_language_with_http_info(**kwargs) # noqa: E501 - return data - - def get_language_with_http_info(self, **kwargs): # noqa: E501 - """get override strings for the current language # noqa: E501 - - Language strings # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_language_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Language - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_language" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/config/v1/language', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Language', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_language_defaults(self, **kwargs): # noqa: E501 - """get all inital language strings for angular # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_language_defaults(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_language_defaults_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_language_defaults_with_http_info(**kwargs) # noqa: E501 - return data - - def get_language_defaults_with_http_info(self, **kwargs): # noqa: E501 - """get all inital language strings for angular # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_language_defaults_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_language_defaults" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/config/v1/language/defaults', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_variables(self, **kwargs): # noqa: E501 - """get global config variables # noqa: E501 - - global config variables # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_variables(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Variables - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_variables_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_variables_with_http_info(**kwargs) # noqa: E501 - return data - - def get_variables_with_http_info(self, **kwargs): # noqa: E501 - """get global config variables # noqa: E501 - - global config variables # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_variables_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: Variables - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_variables" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/config/v1/variables', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Variables', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_dynamic_value(self, body, public, key, **kwargs): # noqa: E501 - """Set a config entry (admin rights required) # noqa: E501 - - the body must be a json encapsulated string # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_dynamic_value(body, public, key, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Must be a json-encapsulated string (required) - :param bool public: Is everyone allowed to read the value (required) - :param str key: Key of the config value that should be fetched (required) - :return: DynamicConfig - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_dynamic_value_with_http_info(body, public, key, **kwargs) # noqa: E501 - else: - (data) = self.set_dynamic_value_with_http_info(body, public, key, **kwargs) # noqa: E501 - return data - - def set_dynamic_value_with_http_info(self, body, public, key, **kwargs): # noqa: E501 - """Set a config entry (admin rights required) # noqa: E501 - - the body must be a json encapsulated string # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_dynamic_value_with_http_info(body, public, key, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Must be a json-encapsulated string (required) - :param bool public: Is everyone allowed to read the value (required) - :param str key: Key of the config value that should be fetched (required) - :return: DynamicConfig - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'public', 'key'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_dynamic_value" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `set_dynamic_value`") # noqa: E501 - # verify the required parameter 'public' is set - if ('public' not in params or - params['public'] is None): - raise ValueError("Missing the required parameter `public` when calling `set_dynamic_value`") # noqa: E501 - # verify the required parameter 'key' is set - if ('key' not in params or - params['key'] is None): - raise ValueError("Missing the required parameter `key` when calling `set_dynamic_value`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'key' in params: - path_params['key'] = params['key'] # noqa: E501 - - query_params = [] - if 'public' in params: - query_params.append(('public', params['public'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/config/v1/dynamic/{key}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='DynamicConfig', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/connector_v1_api.py b/edu_sharing_client/api/connector_v1_api.py deleted file mode 100644 index 57e7a84c..00000000 --- a/edu_sharing_client/api/connector_v1_api.py +++ /dev/null @@ -1,126 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class CONNECTORV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def list_connectors(self, repository, **kwargs): # noqa: E501 - """List all available connectors # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_connectors(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: ConnectorList - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_connectors_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.list_connectors_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def list_connectors_with_http_info(self, repository, **kwargs): # noqa: E501 - """List all available connectors # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_connectors_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: ConnectorList - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method list_connectors" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `list_connectors`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/connector/v1/connectors/{repository}/list', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='ConnectorList', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/iam_v1_api.py b/edu_sharing_client/api/iam_v1_api.py deleted file mode 100644 index c99977c8..00000000 --- a/edu_sharing_client/api/iam_v1_api.py +++ /dev/null @@ -1,2901 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class IAMV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_membership(self, repository, group, member, **kwargs): # noqa: E501 - """Add member to the group. # noqa: E501 - - Add member to the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_membership(repository, group, member, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :param str member: authorityName of member (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_membership_with_http_info(repository, group, member, **kwargs) # noqa: E501 - else: - (data) = self.add_membership_with_http_info(repository, group, member, **kwargs) # noqa: E501 - return data - - def add_membership_with_http_info(self, repository, group, member, **kwargs): # noqa: E501 - """Add member to the group. # noqa: E501 - - Add member to the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_membership_with_http_info(repository, group, member, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :param str member: authorityName of member (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'group', 'member'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_membership" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_membership`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `add_membership`") # noqa: E501 - # verify the required parameter 'member' is set - if ('member' not in params or - params['member'] is None): - raise ValueError("Missing the required parameter `member` when calling `add_membership`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - if 'member' in params: - path_params['member'] = params['member'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}/members/{member}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def add_node_list(self, repository, person, list, node, **kwargs): # noqa: E501 - """Add a node to node a list of a user # noqa: E501 - - For guest users, the list will be temporary stored in the current session # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_node_list(repository, person, list, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :param str list: list name. If this list does not exist, it will be created (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_node_list_with_http_info(repository, person, list, node, **kwargs) # noqa: E501 - else: - (data) = self.add_node_list_with_http_info(repository, person, list, node, **kwargs) # noqa: E501 - return data - - def add_node_list_with_http_info(self, repository, person, list, node, **kwargs): # noqa: E501 - """Add a node to node a list of a user # noqa: E501 - - For guest users, the list will be temporary stored in the current session # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_node_list_with_http_info(repository, person, list, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :param str list: list name. If this list does not exist, it will be created (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person', 'list', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_node_list" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_node_list`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `add_node_list`") # noqa: E501 - # verify the required parameter 'list' is set - if ('list' not in params or - params['list'] is None): - raise ValueError("Missing the required parameter `list` when calling `add_node_list`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `add_node_list`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - if 'list' in params: - path_params['list'] = params['list'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/nodeList/{list}/{node}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_group_profile(self, body, repository, group, **kwargs): # noqa: E501 - """Set profile of the group. # noqa: E501 - - Set profile of the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_group_profile(body, repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param GroupProfile body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_group_profile_with_http_info(body, repository, group, **kwargs) # noqa: E501 - else: - (data) = self.change_group_profile_with_http_info(body, repository, group, **kwargs) # noqa: E501 - return data - - def change_group_profile_with_http_info(self, body, repository, group, **kwargs): # noqa: E501 - """Set profile of the group. # noqa: E501 - - Set profile of the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_group_profile_with_http_info(body, repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param GroupProfile body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'group'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_group_profile" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `change_group_profile`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_group_profile`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `change_group_profile`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}/profile', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_user_avatar(self, avatar, repository, person, **kwargs): # noqa: E501 - """Set avatar of the user. # noqa: E501 - - Set avatar of the user. (To set foreign avatars, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_user_avatar(avatar, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str avatar: (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_user_avatar_with_http_info(avatar, repository, person, **kwargs) # noqa: E501 - else: - (data) = self.change_user_avatar_with_http_info(avatar, repository, person, **kwargs) # noqa: E501 - return data - - def change_user_avatar_with_http_info(self, avatar, repository, person, **kwargs): # noqa: E501 - """Set avatar of the user. # noqa: E501 - - Set avatar of the user. (To set foreign avatars, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_user_avatar_with_http_info(avatar, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str avatar: (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['avatar', 'repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_user_avatar" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'avatar' is set - if ('avatar' not in params or - params['avatar'] is None): - raise ValueError("Missing the required parameter `avatar` when calling `change_user_avatar`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_user_avatar`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `change_user_avatar`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'avatar' in params: - local_var_files['avatar'] = params['avatar'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/avatar', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_user_password(self, body, repository, person, **kwargs): # noqa: E501 - """Change/Set password of the user. # noqa: E501 - - Change/Set password of the user. (To change foreign passwords or set passwords, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_user_password(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param UserCredential body: credential (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_user_password_with_http_info(body, repository, person, **kwargs) # noqa: E501 - else: - (data) = self.change_user_password_with_http_info(body, repository, person, **kwargs) # noqa: E501 - return data - - def change_user_password_with_http_info(self, body, repository, person, **kwargs): # noqa: E501 - """Change/Set password of the user. # noqa: E501 - - Change/Set password of the user. (To change foreign passwords or set passwords, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_user_password_with_http_info(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param UserCredential body: credential (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_user_password" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `change_user_password`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_user_password`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `change_user_password`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/credential', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_user_profile(self, body, repository, person, **kwargs): # noqa: E501 - """Set profile of the user. # noqa: E501 - - Set profile of the user. (To set foreign profiles, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_user_profile(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param UserProfileEdit body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_user_profile_with_http_info(body, repository, person, **kwargs) # noqa: E501 - else: - (data) = self.change_user_profile_with_http_info(body, repository, person, **kwargs) # noqa: E501 - return data - - def change_user_profile_with_http_info(self, body, repository, person, **kwargs): # noqa: E501 - """Set profile of the user. # noqa: E501 - - Set profile of the user. (To set foreign profiles, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_user_profile_with_http_info(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param UserProfileEdit body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_user_profile" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `change_user_profile`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_user_profile`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `change_user_profile`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/profile', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_group(self, body, repository, group, **kwargs): # noqa: E501 - """Create a new group. # noqa: E501 - - Create a new group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_group(body, repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param GroupProfile body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :param str parent: parent (will be added to this parent, also for name hashing), may be null - :return: Group - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_group_with_http_info(body, repository, group, **kwargs) # noqa: E501 - else: - (data) = self.create_group_with_http_info(body, repository, group, **kwargs) # noqa: E501 - return data - - def create_group_with_http_info(self, body, repository, group, **kwargs): # noqa: E501 - """Create a new group. # noqa: E501 - - Create a new group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_group_with_http_info(body, repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param GroupProfile body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :param str parent: parent (will be added to this parent, also for name hashing), may be null - :return: Group - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'group', 'parent'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_group" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_group`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_group`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `create_group`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - if 'parent' in params: - query_params.append(('parent', params['parent'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Group', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_user(self, body, repository, person, **kwargs): # noqa: E501 - """Create a new user. # noqa: E501 - - Create a new user. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_user(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param UserProfileEdit body: profile (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (required) - :param str password: Password, leave empty if you don't want to set any - :return: User - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_user_with_http_info(body, repository, person, **kwargs) # noqa: E501 - else: - (data) = self.create_user_with_http_info(body, repository, person, **kwargs) # noqa: E501 - return data - - def create_user_with_http_info(self, body, repository, person, **kwargs): # noqa: E501 - """Create a new user. # noqa: E501 - - Create a new user. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_user_with_http_info(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param UserProfileEdit body: profile (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (required) - :param str password: Password, leave empty if you don't want to set any - :return: User - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'person', 'password'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_user" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_user`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_user`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `create_user`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - if 'password' in params: - query_params.append(('password', params['password'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='User', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_group(self, repository, group, **kwargs): # noqa: E501 - """Delete the group. # noqa: E501 - - Delete the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_group(repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_group_with_http_info(repository, group, **kwargs) # noqa: E501 - else: - (data) = self.delete_group_with_http_info(repository, group, **kwargs) # noqa: E501 - return data - - def delete_group_with_http_info(self, repository, group, **kwargs): # noqa: E501 - """Delete the group. # noqa: E501 - - Delete the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_group_with_http_info(repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'group'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_group" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_group`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `delete_group`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_membership(self, repository, group, member, **kwargs): # noqa: E501 - """Delete member from the group. # noqa: E501 - - Delete member from the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_membership(repository, group, member, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :param str member: authorityName of member (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_membership_with_http_info(repository, group, member, **kwargs) # noqa: E501 - else: - (data) = self.delete_membership_with_http_info(repository, group, member, **kwargs) # noqa: E501 - return data - - def delete_membership_with_http_info(self, repository, group, member, **kwargs): # noqa: E501 - """Delete member from the group. # noqa: E501 - - Delete member from the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_membership_with_http_info(repository, group, member, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :param str member: authorityName of member (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'group', 'member'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_membership" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_membership`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `delete_membership`") # noqa: E501 - # verify the required parameter 'member' is set - if ('member' not in params or - params['member'] is None): - raise ValueError("Missing the required parameter `member` when calling `delete_membership`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - if 'member' in params: - path_params['member'] = params['member'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}/members/{member}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_group(self, repository, group, **kwargs): # noqa: E501 - """Get the group. # noqa: E501 - - Get the group. (To get foreign profiles, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_group(repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :return: GroupEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_group_with_http_info(repository, group, **kwargs) # noqa: E501 - else: - (data) = self.get_group_with_http_info(repository, group, **kwargs) # noqa: E501 - return data - - def get_group_with_http_info(self, repository, group, **kwargs): # noqa: E501 - """Get the group. # noqa: E501 - - Get the group. (To get foreign profiles, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_group_with_http_info(repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: groupname (required) - :return: GroupEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'group'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_group" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_group`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `get_group`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='GroupEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_membership(self, repository, group, **kwargs): # noqa: E501 - """Get all members of the group. # noqa: E501 - - Get all members of the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_membership(repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: authority name (begins with GROUP_) (required) - :param str pattern: pattern - :param str authority_type: authorityType either GROUP or USER, empty to show all - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_membership_with_http_info(repository, group, **kwargs) # noqa: E501 - else: - (data) = self.get_membership_with_http_info(repository, group, **kwargs) # noqa: E501 - return data - - def get_membership_with_http_info(self, repository, group, **kwargs): # noqa: E501 - """Get all members of the group. # noqa: E501 - - Get all members of the group. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_membership_with_http_info(repository, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: authority name (begins with GROUP_) (required) - :param str pattern: pattern - :param str authority_type: authorityType either GROUP or USER, empty to show all - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'group', 'pattern', 'authority_type', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_membership" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_membership`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `get_membership`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - if 'pattern' in params: - query_params.append(('pattern', params['pattern'])) # noqa: E501 - if 'authority_type' in params: - query_params.append(('authorityType', params['authority_type'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}/members', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='AuthorityEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_node_list(self, repository, person, list, **kwargs): # noqa: E501 - """Get a specific node list for a user # noqa: E501 - - For guest users, the list will be temporary stored in the current session # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_node_list(repository, person, list, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :param str list: list name (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_node_list_with_http_info(repository, person, list, **kwargs) # noqa: E501 - else: - (data) = self.get_node_list_with_http_info(repository, person, list, **kwargs) # noqa: E501 - return data - - def get_node_list_with_http_info(self, repository, person, list, **kwargs): # noqa: E501 - """Get a specific node list for a user # noqa: E501 - - For guest users, the list will be temporary stored in the current session # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_node_list_with_http_info(repository, person, list, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :param str list: list name (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person', 'list', 'property_filter', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_node_list" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_node_list`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `get_node_list`") # noqa: E501 - # verify the required parameter 'list' is set - if ('list' not in params or - params['list'] is None): - raise ValueError("Missing the required parameter `list` when calling `get_node_list`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - if 'list' in params: - path_params['list'] = params['list'] # noqa: E501 - - query_params = [] - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/nodeList/{list}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_preferences(self, repository, person, **kwargs): # noqa: E501 - """Get preferences stored for user # noqa: E501 - - Will fail for guest # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_preferences(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: Preferences - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_preferences_with_http_info(repository, person, **kwargs) # noqa: E501 - else: - (data) = self.get_preferences_with_http_info(repository, person, **kwargs) # noqa: E501 - return data - - def get_preferences_with_http_info(self, repository, person, **kwargs): # noqa: E501 - """Get preferences stored for user # noqa: E501 - - Will fail for guest # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_preferences_with_http_info(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: Preferences - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_preferences" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_preferences`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `get_preferences`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/preferences', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Preferences', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_recently_invited(self, repository, **kwargs): # noqa: E501 - """Get recently invited authorities. # noqa: E501 - - Get the authorities the current user has recently invited. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_recently_invited(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_recently_invited_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.get_recently_invited_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def get_recently_invited_with_http_info(self, repository, **kwargs): # noqa: E501 - """Get recently invited authorities. # noqa: E501 - - Get the authorities the current user has recently invited. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_recently_invited_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_recently_invited" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_recently_invited`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/authorities/{repository}/recent', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='AuthorityEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_subgroup_by_type(self, repository, group, type, **kwargs): # noqa: E501 - """Get a subgroup by the specified type # noqa: E501 - - Get a subgroup by the specified type # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_subgroup_by_type(repository, group, type, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: authority name of the parent/primary group (begins with GROUP_) (required) - :param str type: authorityType either GROUP or USER, empty to show all (required) - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_subgroup_by_type_with_http_info(repository, group, type, **kwargs) # noqa: E501 - else: - (data) = self.get_subgroup_by_type_with_http_info(repository, group, type, **kwargs) # noqa: E501 - return data - - def get_subgroup_by_type_with_http_info(self, repository, group, type, **kwargs): # noqa: E501 - """Get a subgroup by the specified type # noqa: E501 - - Get a subgroup by the specified type # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_subgroup_by_type_with_http_info(repository, group, type, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str group: authority name of the parent/primary group (begins with GROUP_) (required) - :param str type: authorityType either GROUP or USER, empty to show all (required) - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'group', 'type'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_subgroup_by_type" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_subgroup_by_type`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `get_subgroup_by_type`") # noqa: E501 - # verify the required parameter 'type' is set - if ('type' not in params or - params['type'] is None): - raise ValueError("Missing the required parameter `type` when calling `get_subgroup_by_type`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - if 'type' in params: - path_params['type'] = params['type'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}/{group}/type/{type}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='AuthorityEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_user(self, repository, person, **kwargs): # noqa: E501 - """Get the user. # noqa: E501 - - Get the user. (Not all information are feteched for foreign profiles if current user is not an admin) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_user(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: UserEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_user_with_http_info(repository, person, **kwargs) # noqa: E501 - else: - (data) = self.get_user_with_http_info(repository, person, **kwargs) # noqa: E501 - return data - - def get_user_with_http_info(self, repository, person, **kwargs): # noqa: E501 - """Get the user. # noqa: E501 - - Get the user. (Not all information are feteched for foreign profiles if current user is not an admin) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_user_with_http_info(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: UserEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_user" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_user`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `get_user`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='UserEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_user_groups(self, repository, person, **kwargs): # noqa: E501 - """Get all groups the given user is member of. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_user_groups(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: authority name (required) - :param str pattern: pattern - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: GroupEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_user_groups_with_http_info(repository, person, **kwargs) # noqa: E501 - else: - (data) = self.get_user_groups_with_http_info(repository, person, **kwargs) # noqa: E501 - return data - - def get_user_groups_with_http_info(self, repository, person, **kwargs): # noqa: E501 - """Get all groups the given user is member of. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_user_groups_with_http_info(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: authority name (required) - :param str pattern: pattern - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: GroupEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person', 'pattern', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_user_groups" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_user_groups`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `get_user_groups`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - if 'pattern' in params: - query_params.append(('pattern', params['pattern'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/memberships', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='GroupEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_node_list(self, repository, person, list, node, **kwargs): # noqa: E501 - """Deelete a node of a node list of a user # noqa: E501 - - For guest users, the list will be temporary stored in the current session # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_node_list(repository, person, list, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :param str list: list name (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_node_list_with_http_info(repository, person, list, node, **kwargs) # noqa: E501 - else: - (data) = self.remove_node_list_with_http_info(repository, person, list, node, **kwargs) # noqa: E501 - return data - - def remove_node_list_with_http_info(self, repository, person, list, node, **kwargs): # noqa: E501 - """Deelete a node of a node list of a user # noqa: E501 - - For guest users, the list will be temporary stored in the current session # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_node_list_with_http_info(repository, person, list, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :param str list: list name (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person', 'list', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_node_list" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `remove_node_list`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `remove_node_list`") # noqa: E501 - # verify the required parameter 'list' is set - if ('list' not in params or - params['list'] is None): - raise ValueError("Missing the required parameter `list` when calling `remove_node_list`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `remove_node_list`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - if 'list' in params: - path_params['list'] = params['list'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/nodeList/{list}/{node}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_user_avatar(self, repository, person, **kwargs): # noqa: E501 - """Remove avatar of the user. # noqa: E501 - - Remove avatar of the user. (To Remove foreign avatars, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_user_avatar(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_user_avatar_with_http_info(repository, person, **kwargs) # noqa: E501 - else: - (data) = self.remove_user_avatar_with_http_info(repository, person, **kwargs) # noqa: E501 - return data - - def remove_user_avatar_with_http_info(self, repository, person, **kwargs): # noqa: E501 - """Remove avatar of the user. # noqa: E501 - - Remove avatar of the user. (To Remove foreign avatars, admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_user_avatar_with_http_info(repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_user_avatar" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `remove_user_avatar`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `remove_user_avatar`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/avatar', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_authorities(self, repository, pattern, **kwargs): # noqa: E501 - """Search authorities. # noqa: E501 - - Search authorities. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_authorities(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern (required) - :param bool _global: global search context, defaults to true, otherwise just searches for users within the organizations - :param str group_type: find a specific groupType (does nothing for persons) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_authorities_with_http_info(repository, pattern, **kwargs) # noqa: E501 - else: - (data) = self.search_authorities_with_http_info(repository, pattern, **kwargs) # noqa: E501 - return data - - def search_authorities_with_http_info(self, repository, pattern, **kwargs): # noqa: E501 - """Search authorities. # noqa: E501 - - Search authorities. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_authorities_with_http_info(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern (required) - :param bool _global: global search context, defaults to true, otherwise just searches for users within the organizations - :param str group_type: find a specific groupType (does nothing for persons) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :return: AuthorityEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'pattern', '_global', 'group_type', 'max_items', 'skip_count'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_authorities" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_authorities`") # noqa: E501 - # verify the required parameter 'pattern' is set - if ('pattern' not in params or - params['pattern'] is None): - raise ValueError("Missing the required parameter `pattern` when calling `search_authorities`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'pattern' in params: - query_params.append(('pattern', params['pattern'])) # noqa: E501 - if '_global' in params: - query_params.append(('global', params['_global'])) # noqa: E501 - if 'group_type' in params: - query_params.append(('groupType', params['group_type'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/authorities/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='AuthorityEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_groups(self, repository, pattern, **kwargs): # noqa: E501 - """Search groups. # noqa: E501 - - Search groups. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_groups(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern (required) - :param str group_type: find a specific groupType - :param bool _global: global search context, defaults to true, otherwise just searches for groups within the organizations - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: GroupEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_groups_with_http_info(repository, pattern, **kwargs) # noqa: E501 - else: - (data) = self.search_groups_with_http_info(repository, pattern, **kwargs) # noqa: E501 - return data - - def search_groups_with_http_info(self, repository, pattern, **kwargs): # noqa: E501 - """Search groups. # noqa: E501 - - Search groups. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_groups_with_http_info(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern (required) - :param str group_type: find a specific groupType - :param bool _global: global search context, defaults to true, otherwise just searches for groups within the organizations - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: GroupEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'pattern', 'group_type', '_global', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_groups" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_groups`") # noqa: E501 - # verify the required parameter 'pattern' is set - if ('pattern' not in params or - params['pattern'] is None): - raise ValueError("Missing the required parameter `pattern` when calling `search_groups`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'pattern' in params: - query_params.append(('pattern', params['pattern'])) # noqa: E501 - if 'group_type' in params: - query_params.append(('groupType', params['group_type'])) # noqa: E501 - if '_global' in params: - query_params.append(('global', params['_global'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/groups/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='GroupEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_user(self, repository, pattern, **kwargs): # noqa: E501 - """Search users. # noqa: E501 - - Search users. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_user(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern (required) - :param bool _global: global search context, defaults to true, otherwise just searches for users within the organizations - :param str status: the user status (e.g. active), if not set, all users are returned - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: UserEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_user_with_http_info(repository, pattern, **kwargs) # noqa: E501 - else: - (data) = self.search_user_with_http_info(repository, pattern, **kwargs) # noqa: E501 - return data - - def search_user_with_http_info(self, repository, pattern, **kwargs): # noqa: E501 - """Search users. # noqa: E501 - - Search users. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_user_with_http_info(repository, pattern, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern (required) - :param bool _global: global search context, defaults to true, otherwise just searches for users within the organizations - :param str status: the user status (e.g. active), if not set, all users are returned - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: UserEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'pattern', '_global', 'status', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_user" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_user`") # noqa: E501 - # verify the required parameter 'pattern' is set - if ('pattern' not in params or - params['pattern'] is None): - raise ValueError("Missing the required parameter `pattern` when calling `search_user`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'pattern' in params: - query_params.append(('pattern', params['pattern'])) # noqa: E501 - if '_global' in params: - query_params.append(('global', params['_global'])) # noqa: E501 - if 'status' in params: - query_params.append(('status', params['status'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='UserEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_preferences(self, body, repository, person, **kwargs): # noqa: E501 - """Set preferences for user # noqa: E501 - - Will fail for guest # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_preferences(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: preferences (json string) (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_preferences_with_http_info(body, repository, person, **kwargs) # noqa: E501 - else: - (data) = self.set_preferences_with_http_info(body, repository, person, **kwargs) # noqa: E501 - return data - - def set_preferences_with_http_info(self, body, repository, person, **kwargs): # noqa: E501 - """Set preferences for user # noqa: E501 - - Will fail for guest # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_preferences_with_http_info(body, repository, person, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: preferences (json string) (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (or \"-me-\" for current user) (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'person'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_preferences" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `set_preferences`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `set_preferences`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `set_preferences`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/preferences', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_user_status(self, repository, person, status, notify, **kwargs): # noqa: E501 - """update the user status. # noqa: E501 - - update the user status. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_user_status(repository, person, status, notify, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (required) - :param str status: the new status to set (required) - :param bool notify: notify the user via mail (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_user_status_with_http_info(repository, person, status, notify, **kwargs) # noqa: E501 - else: - (data) = self.update_user_status_with_http_info(repository, person, status, notify, **kwargs) # noqa: E501 - return data - - def update_user_status_with_http_info(self, repository, person, status, notify, **kwargs): # noqa: E501 - """update the user status. # noqa: E501 - - update the user status. (admin rights are required.) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_user_status_with_http_info(repository, person, status, notify, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str person: username (required) - :param str status: the new status to set (required) - :param bool notify: notify the user via mail (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'person', 'status', 'notify'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_user_status" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `update_user_status`") # noqa: E501 - # verify the required parameter 'person' is set - if ('person' not in params or - params['person'] is None): - raise ValueError("Missing the required parameter `person` when calling `update_user_status`") # noqa: E501 - # verify the required parameter 'status' is set - if ('status' not in params or - params['status'] is None): - raise ValueError("Missing the required parameter `status` when calling `update_user_status`") # noqa: E501 - # verify the required parameter 'notify' is set - if ('notify' not in params or - params['notify'] is None): - raise ValueError("Missing the required parameter `notify` when calling `update_user_status`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'person' in params: - path_params['person'] = params['person'] # noqa: E501 - if 'status' in params: - path_params['status'] = params['status'] # noqa: E501 - - query_params = [] - if 'notify' in params: - query_params.append(('notify', params['notify'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/iam/v1/people/{repository}/{person}/status/{status}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/mds_v1_api.py b/edu_sharing_client/api/mds_v1_api.py deleted file mode 100644 index fbef057b..00000000 --- a/edu_sharing_client/api/mds_v1_api.py +++ /dev/null @@ -1,342 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class MDSV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get_metadata_set_v2(self, repository, metadataset, **kwargs): # noqa: E501 - """Get metadata set new. # noqa: E501 - - Get metadata set new. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_metadata_set_v2(repository, metadataset, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :return: MdsV2 - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_metadata_set_v2_with_http_info(repository, metadataset, **kwargs) # noqa: E501 - else: - (data) = self.get_metadata_set_v2_with_http_info(repository, metadataset, **kwargs) # noqa: E501 - return data - - def get_metadata_set_v2_with_http_info(self, repository, metadataset, **kwargs): # noqa: E501 - """Get metadata set new. # noqa: E501 - - Get metadata set new. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_metadata_set_v2_with_http_info(repository, metadataset, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :return: MdsV2 - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'metadataset'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_metadata_set_v2" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_metadata_set_v2`") # noqa: E501 - # verify the required parameter 'metadataset' is set - if ('metadataset' not in params or - params['metadataset'] is None): - raise ValueError("Missing the required parameter `metadataset` when calling `get_metadata_set_v2`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'metadataset' in params: - path_params['metadataset'] = params['metadataset'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mds/v1/metadatasetsV2/{repository}/{metadataset}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='MdsV2', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_metadata_sets_v2(self, repository, **kwargs): # noqa: E501 - """Get metadata sets V2 of repository. # noqa: E501 - - Get metadata sets V2 of repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_metadata_sets_v2(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: MdsEntriesV2 - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_metadata_sets_v2_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.get_metadata_sets_v2_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def get_metadata_sets_v2_with_http_info(self, repository, **kwargs): # noqa: E501 - """Get metadata sets V2 of repository. # noqa: E501 - - Get metadata sets V2 of repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_metadata_sets_v2_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: MdsEntriesV2 - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_metadata_sets_v2" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_metadata_sets_v2`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mds/v1/metadatasetsV2/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='MdsEntriesV2', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_values_v2(self, repository, metadataset, **kwargs): # noqa: E501 - """Get values. # noqa: E501 - - Get values. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_values_v2(repository, metadataset, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :param SuggestionParam body: suggestionParam - :return: MdsEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_values_v2_with_http_info(repository, metadataset, **kwargs) # noqa: E501 - else: - (data) = self.get_values_v2_with_http_info(repository, metadataset, **kwargs) # noqa: E501 - return data - - def get_values_v2_with_http_info(self, repository, metadataset, **kwargs): # noqa: E501 - """Get values. # noqa: E501 - - Get values. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_values_v2_with_http_info(repository, metadataset, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :param SuggestionParam body: suggestionParam - :return: MdsEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'metadataset', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_values_v2" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_values_v2`") # noqa: E501 - # verify the required parameter 'metadataset' is set - if ('metadataset' not in params or - params['metadataset'] is None): - raise ValueError("Missing the required parameter `metadataset` when calling `get_values_v2`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'metadataset' in params: - path_params['metadataset'] = params['metadataset'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mds/v1/metadatasetsV2/{repository}/{metadataset}/values', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='MdsEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/mediacenter_v1_api.py b/edu_sharing_client/api/mediacenter_v1_api.py deleted file mode 100644 index 70d59c07..00000000 --- a/edu_sharing_client/api/mediacenter_v1_api.py +++ /dev/null @@ -1,1207 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class MEDIACENTERV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_mediacenter_group(self, repository, mediacenter, group, **kwargs): # noqa: E501 - """add a group that is managed by the given mediacenter # noqa: E501 - - although not restricted, it is recommended that the group is an edu-sharing organization (admin rights are required) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_mediacenter_group(repository, mediacenter, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :param str group: authorityName of the group that should be managed by that mediacenter (required) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_mediacenter_group_with_http_info(repository, mediacenter, group, **kwargs) # noqa: E501 - else: - (data) = self.add_mediacenter_group_with_http_info(repository, mediacenter, group, **kwargs) # noqa: E501 - return data - - def add_mediacenter_group_with_http_info(self, repository, mediacenter, group, **kwargs): # noqa: E501 - """add a group that is managed by the given mediacenter # noqa: E501 - - although not restricted, it is recommended that the group is an edu-sharing organization (admin rights are required) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_mediacenter_group_with_http_info(repository, mediacenter, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :param str group: authorityName of the group that should be managed by that mediacenter (required) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter', 'group'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_mediacenter_group" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_mediacenter_group`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `add_mediacenter_group`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `add_mediacenter_group`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Group]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_mediacenter(self, repository, mediacenter, **kwargs): # noqa: E501 - """create new mediacenter in repository. # noqa: E501 - - admin rights are required. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_mediacenter(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: mediacenter name (required) - :param Profile body: - :return: Mediacenter - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_mediacenter_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - else: - (data) = self.create_mediacenter_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - return data - - def create_mediacenter_with_http_info(self, repository, mediacenter, **kwargs): # noqa: E501 - """create new mediacenter in repository. # noqa: E501 - - admin rights are required. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_mediacenter_with_http_info(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: mediacenter name (required) - :param Profile body: - :return: Mediacenter - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_mediacenter" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_mediacenter`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `create_mediacenter`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Mediacenter', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_mediacenter(self, repository, mediacenter, **kwargs): # noqa: E501 - """delete a mediacenter group and it's admin group and proxy group # noqa: E501 - - admin rights are required. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_mediacenter(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_mediacenter_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - else: - (data) = self.delete_mediacenter_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - return data - - def delete_mediacenter_with_http_info(self, repository, mediacenter, **kwargs): # noqa: E501 - """delete a mediacenter group and it's admin group and proxy group # noqa: E501 - - admin rights are required. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_mediacenter_with_http_info(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_mediacenter" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_mediacenter`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `delete_mediacenter`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def edit_mediacenter(self, repository, mediacenter, **kwargs): # noqa: E501 - """edit a mediacenter in repository. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.edit_mediacenter(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: mediacenter name (required) - :param Profile body: - :return: Mediacenter - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.edit_mediacenter_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - else: - (data) = self.edit_mediacenter_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - return data - - def edit_mediacenter_with_http_info(self, repository, mediacenter, **kwargs): # noqa: E501 - """edit a mediacenter in repository. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.edit_mediacenter_with_http_info(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: mediacenter name (required) - :param Profile body: - :return: Mediacenter - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method edit_mediacenter" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `edit_mediacenter`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `edit_mediacenter`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Mediacenter', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_mediacenter_groups(self, repository, mediacenter, **kwargs): # noqa: E501 - """get groups that are managed by the given mediacenter # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_mediacenter_groups(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_mediacenter_groups_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - else: - (data) = self.get_mediacenter_groups_with_http_info(repository, mediacenter, **kwargs) # noqa: E501 - return data - - def get_mediacenter_groups_with_http_info(self, repository, mediacenter, **kwargs): # noqa: E501 - """get groups that are managed by the given mediacenter # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_mediacenter_groups_with_http_info(repository, mediacenter, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_mediacenter_groups" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_mediacenter_groups`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `get_mediacenter_groups`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Group]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_mediacenter_licensed_nodes(self, repository, mediacenter, searchword, **kwargs): # noqa: E501 - """get nodes that are licensed by the given mediacenter # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_mediacenter_licensed_nodes(repository, mediacenter, searchword, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that licenses nodes (required) - :param str searchword: searchword of licensed nodes (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_mediacenter_licensed_nodes_with_http_info(repository, mediacenter, searchword, **kwargs) # noqa: E501 - else: - (data) = self.get_mediacenter_licensed_nodes_with_http_info(repository, mediacenter, searchword, **kwargs) # noqa: E501 - return data - - def get_mediacenter_licensed_nodes_with_http_info(self, repository, mediacenter, searchword, **kwargs): # noqa: E501 - """get nodes that are licensed by the given mediacenter # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_mediacenter_licensed_nodes_with_http_info(repository, mediacenter, searchword, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that licenses nodes (required) - :param str searchword: searchword of licensed nodes (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter', 'searchword', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_mediacenter_licensed_nodes" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_mediacenter_licensed_nodes`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `get_mediacenter_licensed_nodes`") # noqa: E501 - # verify the required parameter 'searchword' is set - if ('searchword' not in params or - params['searchword'] is None): - raise ValueError("Missing the required parameter `searchword` when calling `get_mediacenter_licensed_nodes`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - if 'searchword' in params: - query_params.append(('searchword', params['searchword'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Group]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_mediacenters(self, repository, **kwargs): # noqa: E501 - """get mediacenters in the repository. # noqa: E501 - - Only shows the one available/managing the current user (only admin can access all) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_mediacenters(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: list[Mediacenter] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_mediacenters_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.get_mediacenters_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def get_mediacenters_with_http_info(self, repository, **kwargs): # noqa: E501 - """get mediacenters in the repository. # noqa: E501 - - Only shows the one available/managing the current user (only admin can access all) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_mediacenters_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: list[Mediacenter] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_mediacenters" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_mediacenters`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Mediacenter]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_mc_org_connections(self, mc_orgs, **kwargs): # noqa: E501 - """Import Mediacenter Organisation Connection # noqa: E501 - - Import Mediacenter Organisation Connection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_mc_org_connections(mc_orgs, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mc_orgs: (required) - :param bool remove_schools_from_mc: removeSchoolsFromMC - :return: McOrgConnectResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_mc_org_connections_with_http_info(mc_orgs, **kwargs) # noqa: E501 - else: - (data) = self.import_mc_org_connections_with_http_info(mc_orgs, **kwargs) # noqa: E501 - return data - - def import_mc_org_connections_with_http_info(self, mc_orgs, **kwargs): # noqa: E501 - """Import Mediacenter Organisation Connection # noqa: E501 - - Import Mediacenter Organisation Connection. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_mc_org_connections_with_http_info(mc_orgs, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mc_orgs: (required) - :param bool remove_schools_from_mc: removeSchoolsFromMC - :return: McOrgConnectResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mc_orgs', 'remove_schools_from_mc'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_mc_org_connections" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mc_orgs' is set - if ('mc_orgs' not in params or - params['mc_orgs'] is None): - raise ValueError("Missing the required parameter `mc_orgs` when calling `import_mc_org_connections`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'remove_schools_from_mc' in params: - query_params.append(('removeSchoolsFromMC', params['remove_schools_from_mc'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'mc_orgs' in params: - local_var_files['mcOrgs'] = params['mc_orgs'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/import/mc_org', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='McOrgConnectResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_mediacenters(self, mediacenters, **kwargs): # noqa: E501 - """Import mediacenters # noqa: E501 - - Import mediacenters. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_mediacenters(mediacenters, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mediacenters: (required) - :return: MediacentersImportResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_mediacenters_with_http_info(mediacenters, **kwargs) # noqa: E501 - else: - (data) = self.import_mediacenters_with_http_info(mediacenters, **kwargs) # noqa: E501 - return data - - def import_mediacenters_with_http_info(self, mediacenters, **kwargs): # noqa: E501 - """Import mediacenters # noqa: E501 - - Import mediacenters. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_mediacenters_with_http_info(mediacenters, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mediacenters: (required) - :return: MediacentersImportResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mediacenters'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_mediacenters" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mediacenters' is set - if ('mediacenters' not in params or - params['mediacenters'] is None): - raise ValueError("Missing the required parameter `mediacenters` when calling `import_mediacenters`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'mediacenters' in params: - local_var_files['mediacenters'] = params['mediacenters'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/import/mediacenters', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='MediacentersImportResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_organisations(self, organisations, **kwargs): # noqa: E501 - """Import Organisations # noqa: E501 - - Import Organisations. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_organisations(organisations, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str organisations: (required) - :return: OrganisationsImportResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_organisations_with_http_info(organisations, **kwargs) # noqa: E501 - else: - (data) = self.import_organisations_with_http_info(organisations, **kwargs) # noqa: E501 - return data - - def import_organisations_with_http_info(self, organisations, **kwargs): # noqa: E501 - """Import Organisations # noqa: E501 - - Import Organisations. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_organisations_with_http_info(organisations, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str organisations: (required) - :return: OrganisationsImportResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['organisations'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_organisations" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'organisations' is set - if ('organisations' not in params or - params['organisations'] is None): - raise ValueError("Missing the required parameter `organisations` when calling `import_organisations`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'organisations' in params: - local_var_files['organisations'] = params['organisations'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/import/organisations', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='OrganisationsImportResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_mediacenter_group(self, repository, mediacenter, group, **kwargs): # noqa: E501 - """delete a group that is managed by the given mediacenter # noqa: E501 - - admin rights are required. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_mediacenter_group(repository, mediacenter, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :param str group: authorityName of the group that should not longer be managed by that mediacenter (required) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_mediacenter_group_with_http_info(repository, mediacenter, group, **kwargs) # noqa: E501 - else: - (data) = self.remove_mediacenter_group_with_http_info(repository, mediacenter, group, **kwargs) # noqa: E501 - return data - - def remove_mediacenter_group_with_http_info(self, repository, mediacenter, group, **kwargs): # noqa: E501 - """delete a group that is managed by the given mediacenter # noqa: E501 - - admin rights are required. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_mediacenter_group_with_http_info(repository, mediacenter, group, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str mediacenter: authorityName of the mediacenter that should manage the group (required) - :param str group: authorityName of the group that should not longer be managed by that mediacenter (required) - :return: list[Group] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'mediacenter', 'group'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_mediacenter_group" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `remove_mediacenter_group`") # noqa: E501 - # verify the required parameter 'mediacenter' is set - if ('mediacenter' not in params or - params['mediacenter'] is None): - raise ValueError("Missing the required parameter `mediacenter` when calling `remove_mediacenter_group`") # noqa: E501 - # verify the required parameter 'group' is set - if ('group' not in params or - params['group'] is None): - raise ValueError("Missing the required parameter `group` when calling `remove_mediacenter_group`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'mediacenter' in params: - path_params['mediacenter'] = params['mediacenter'] # noqa: E501 - if 'group' in params: - path_params['group'] = params['group'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Group]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/network_v1_api.py b/edu_sharing_client/api/network_v1_api.py deleted file mode 100644 index 56377704..00000000 --- a/edu_sharing_client/api/network_v1_api.py +++ /dev/null @@ -1,496 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class NETWORKV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_service(self, **kwargs): # noqa: E501 - """Register service. # noqa: E501 - - Register a new service. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_service(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Service body: Service data object - :return: StoredService - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_service_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.add_service_with_http_info(**kwargs) # noqa: E501 - return data - - def add_service_with_http_info(self, **kwargs): # noqa: E501 - """Register service. # noqa: E501 - - Register a new service. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_service_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Service body: Service data object - :return: StoredService - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_service" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/network/v1/services', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StoredService', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_repositories(self, **kwargs): # noqa: E501 - """Get repositories. # noqa: E501 - - Get repositories. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_repositories(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[RepoEntries] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_repositories_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_repositories_with_http_info(**kwargs) # noqa: E501 - return data - - def get_repositories_with_http_info(self, **kwargs): # noqa: E501 - """Get repositories. # noqa: E501 - - Get repositories. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_repositories_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: list[RepoEntries] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_repositories" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/network/v1/repositories', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[RepoEntries]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_service(self, **kwargs): # noqa: E501 - """Get own service. # noqa: E501 - - Get the servic entry from the current repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_service(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: StoredService - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_service_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_service_with_http_info(**kwargs) # noqa: E501 - return data - - def get_service_with_http_info(self, **kwargs): # noqa: E501 - """Get own service. # noqa: E501 - - Get the servic entry from the current repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_service_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :return: StoredService - If the method is called asynchronously, - returns the request thread. - """ - - all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_service" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/network/v1/service', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StoredService', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_services(self, **kwargs): # noqa: E501 - """Get services. # noqa: E501 - - Get registerted services. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_services(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str query: search or filter for services - :return: list[StoredService] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_services_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_services_with_http_info(**kwargs) # noqa: E501 - return data - - def get_services_with_http_info(self, **kwargs): # noqa: E501 - """Get services. # noqa: E501 - - Get registerted services. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_services_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str query: search or filter for services - :return: list[StoredService] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['query'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_services" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'query' in params: - query_params.append(('query', params['query'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/network/v1/services', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[StoredService]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_service(self, id, **kwargs): # noqa: E501 - """Update a service. # noqa: E501 - - Update an existing service. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_service(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Service id (required) - :param Service body: Service data object - :return: StoredService - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_service_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.update_service_with_http_info(id, **kwargs) # noqa: E501 - return data - - def update_service_with_http_info(self, id, **kwargs): # noqa: E501 - """Update a service. # noqa: E501 - - Update an existing service. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_service_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Service id (required) - :param Service body: Service data object - :return: StoredService - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_service" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `update_service`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/network/v1/services/{id}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StoredService', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/node_v1_api.py b/edu_sharing_client/api/node_v1_api.py deleted file mode 100644 index 58ec61a1..00000000 --- a/edu_sharing_client/api/node_v1_api.py +++ /dev/null @@ -1,4749 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class NODEV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_aspects(self, body, repository, node, **kwargs): # noqa: E501 - """Add aspect to node. # noqa: E501 - - Add aspect to node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_aspects(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] body: aspect name, e.g. ccm:lomreplication (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_aspects_with_http_info(body, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.add_aspects_with_http_info(body, repository, node, **kwargs) # noqa: E501 - return data - - def add_aspects_with_http_info(self, body, repository, node, **kwargs): # noqa: E501 - """Add aspect to node. # noqa: E501 - - Add aspect to node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_aspects_with_http_info(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[str] body: aspect name, e.g. ccm:lomreplication (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_aspects" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `add_aspects`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_aspects`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `add_aspects`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/aspects', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def add_workflow_history(self, body, repository, node, **kwargs): # noqa: E501 - """Add workflow. # noqa: E501 - - Add workflow entry to node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_workflow_history(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param WorkflowHistory body: The history entry to put (editor and time can be null and will be filled automatically) (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_workflow_history_with_http_info(body, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.add_workflow_history_with_http_info(body, repository, node, **kwargs) # noqa: E501 - return data - - def add_workflow_history_with_http_info(self, body, repository, node, **kwargs): # noqa: E501 - """Add workflow. # noqa: E501 - - Add workflow entry to node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_workflow_history_with_http_info(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param WorkflowHistory body: The history entry to put (editor and time can be null and will be filled automatically) (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_workflow_history" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `add_workflow_history`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_workflow_history`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `add_workflow_history`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/workflow', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_content(self, mimetype, repository, node, **kwargs): # noqa: E501 - """Change content of node. # noqa: E501 - - Change content of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_content(mimetype, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mimetype: MIME-Type (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str file: - :param str version_comment: comment, leave empty = no new version, otherwise new version is generated - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_content_with_http_info(mimetype, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.change_content_with_http_info(mimetype, repository, node, **kwargs) # noqa: E501 - return data - - def change_content_with_http_info(self, mimetype, repository, node, **kwargs): # noqa: E501 - """Change content of node. # noqa: E501 - - Change content of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_content_with_http_info(mimetype, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mimetype: MIME-Type (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str file: - :param str version_comment: comment, leave empty = no new version, otherwise new version is generated - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mimetype', 'repository', 'node', 'file', 'version_comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_content" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mimetype' is set - if ('mimetype' not in params or - params['mimetype'] is None): - raise ValueError("Missing the required parameter `mimetype` when calling `change_content`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_content`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `change_content`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - if 'mimetype' in params: - query_params.append(('mimetype', params['mimetype'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'file' in params: - local_var_files['file'] = params['file'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/content', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_content_as_text(self, repository, node, mimetype, **kwargs): # noqa: E501 - """Change content of node as text. # noqa: E501 - - Change content of node as text. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_content_as_text(repository, node, mimetype, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str mimetype: MIME-Type (required) - :param str version_comment: comment, leave empty = no new version, otherwise new version is generated - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_content_as_text_with_http_info(repository, node, mimetype, **kwargs) # noqa: E501 - else: - (data) = self.change_content_as_text_with_http_info(repository, node, mimetype, **kwargs) # noqa: E501 - return data - - def change_content_as_text_with_http_info(self, repository, node, mimetype, **kwargs): # noqa: E501 - """Change content of node as text. # noqa: E501 - - Change content of node as text. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_content_as_text_with_http_info(repository, node, mimetype, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str mimetype: MIME-Type (required) - :param str version_comment: comment, leave empty = no new version, otherwise new version is generated - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'mimetype', 'version_comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_content_as_text" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_content_as_text`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `change_content_as_text`") # noqa: E501 - # verify the required parameter 'mimetype' is set - if ('mimetype' not in params or - params['mimetype'] is None): - raise ValueError("Missing the required parameter `mimetype` when calling `change_content_as_text`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - if 'mimetype' in params: - query_params.append(('mimetype', params['mimetype'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/textContent', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_metadata(self, body, repository, node, **kwargs): # noqa: E501 - """Change metadata of node. # noqa: E501 - - Change metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_metadata(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_metadata_with_http_info(body, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.change_metadata_with_http_info(body, repository, node, **kwargs) # noqa: E501 - return data - - def change_metadata_with_http_info(self, body, repository, node, **kwargs): # noqa: E501 - """Change metadata of node. # noqa: E501 - - Change metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_metadata_with_http_info(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_metadata" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `change_metadata`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_metadata`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `change_metadata`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/metadata', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_metadata_with_versioning(self, body, version_comment, repository, node, **kwargs): # noqa: E501 - """Change metadata of node (new version). # noqa: E501 - - Change metadata of node (new version). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_metadata_with_versioning(body, version_comment, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties (required) - :param str version_comment: comment (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_metadata_with_versioning_with_http_info(body, version_comment, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.change_metadata_with_versioning_with_http_info(body, version_comment, repository, node, **kwargs) # noqa: E501 - return data - - def change_metadata_with_versioning_with_http_info(self, body, version_comment, repository, node, **kwargs): # noqa: E501 - """Change metadata of node (new version). # noqa: E501 - - Change metadata of node (new version). # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_metadata_with_versioning_with_http_info(body, version_comment, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties (required) - :param str version_comment: comment (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'version_comment', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_metadata_with_versioning" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `change_metadata_with_versioning`") # noqa: E501 - # verify the required parameter 'version_comment' is set - if ('version_comment' not in params or - params['version_comment'] is None): - raise ValueError("Missing the required parameter `version_comment` when calling `change_metadata_with_versioning`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_metadata_with_versioning`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `change_metadata_with_versioning`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/metadata', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_preview(self, mimetype, repository, node, **kwargs): # noqa: E501 - """Change preview of node. # noqa: E501 - - Change preview of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_preview(mimetype, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mimetype: MIME-Type (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str image: - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_preview_with_http_info(mimetype, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.change_preview_with_http_info(mimetype, repository, node, **kwargs) # noqa: E501 - return data - - def change_preview_with_http_info(self, mimetype, repository, node, **kwargs): # noqa: E501 - """Change preview of node. # noqa: E501 - - Change preview of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_preview_with_http_info(mimetype, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mimetype: MIME-Type (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str image: - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mimetype', 'repository', 'node', 'image'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_preview" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mimetype' is set - if ('mimetype' not in params or - params['mimetype'] is None): - raise ValueError("Missing the required parameter `mimetype` when calling `change_preview`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_preview`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `change_preview`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'mimetype' in params: - query_params.append(('mimetype', params['mimetype'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - if 'image' in params: - local_var_files['image'] = params['image'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/preview', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def change_template_metadata(self, body, enable, repository, node, **kwargs): # noqa: E501 - """Set the metadata template for this folder. # noqa: E501 - - All the given metadata will be inherited to child nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_template_metadata(body, enable, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties (required) - :param bool enable: Is the inherition currently enabled (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.change_template_metadata_with_http_info(body, enable, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.change_template_metadata_with_http_info(body, enable, repository, node, **kwargs) # noqa: E501 - return data - - def change_template_metadata_with_http_info(self, body, enable, repository, node, **kwargs): # noqa: E501 - """Set the metadata template for this folder. # noqa: E501 - - All the given metadata will be inherited to child nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.change_template_metadata_with_http_info(body, enable, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties (required) - :param bool enable: Is the inherition currently enabled (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'enable', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method change_template_metadata" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `change_template_metadata`") # noqa: E501 - # verify the required parameter 'enable' is set - if ('enable' not in params or - params['enable'] is None): - raise ValueError("Missing the required parameter `enable` when calling `change_template_metadata`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `change_template_metadata`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `change_template_metadata`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'enable' in params: - query_params.append(('enable', params['enable'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/metadata/template', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_child(self, body, type, repository, node, **kwargs): # noqa: E501 - """Create a new child. # noqa: E501 - - Create a new child. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_child(body, type, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str type: type of node (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node use -userhome- for userhome or -inbox- for inbox node (required) - :param list[str] aspects: aspects of node - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :param str assoc_type: Association type, can be empty - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_child_with_http_info(body, type, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.create_child_with_http_info(body, type, repository, node, **kwargs) # noqa: E501 - return data - - def create_child_with_http_info(self, body, type, repository, node, **kwargs): # noqa: E501 - """Create a new child. # noqa: E501 - - Create a new child. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_child_with_http_info(body, type, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str type: type of node (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node use -userhome- for userhome or -inbox- for inbox node (required) - :param list[str] aspects: aspects of node - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :param str assoc_type: Association type, can be empty - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'type', 'repository', 'node', 'aspects', 'rename_if_exists', 'version_comment', 'assoc_type'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_child" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_child`") # noqa: E501 - # verify the required parameter 'type' is set - if ('type' not in params or - params['type'] is None): - raise ValueError("Missing the required parameter `type` when calling `create_child`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_child`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `create_child`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'type' in params: - query_params.append(('type', params['type'])) # noqa: E501 - if 'aspects' in params: - query_params.append(('aspects', params['aspects'])) # noqa: E501 - collection_formats['aspects'] = 'multi' # noqa: E501 - if 'rename_if_exists' in params: - query_params.append(('renameIfExists', params['rename_if_exists'])) # noqa: E501 - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - if 'assoc_type' in params: - query_params.append(('assocType', params['assoc_type'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/children', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_child_by_copying(self, repository, node, source, with_children, **kwargs): # noqa: E501 - """Create a new child by copying. # noqa: E501 - - Create a new child by copying. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_child_by_copying(repository, node, source, with_children, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (required) - :param str source: ID of source node (required) - :param bool with_children: flag for children (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_child_by_copying_with_http_info(repository, node, source, with_children, **kwargs) # noqa: E501 - else: - (data) = self.create_child_by_copying_with_http_info(repository, node, source, with_children, **kwargs) # noqa: E501 - return data - - def create_child_by_copying_with_http_info(self, repository, node, source, with_children, **kwargs): # noqa: E501 - """Create a new child by copying. # noqa: E501 - - Create a new child by copying. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_child_by_copying_with_http_info(repository, node, source, with_children, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (required) - :param str source: ID of source node (required) - :param bool with_children: flag for children (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'source', 'with_children'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_child_by_copying" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_child_by_copying`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `create_child_by_copying`") # noqa: E501 - # verify the required parameter 'source' is set - if ('source' not in params or - params['source'] is None): - raise ValueError("Missing the required parameter `source` when calling `create_child_by_copying`") # noqa: E501 - # verify the required parameter 'with_children' is set - if ('with_children' not in params or - params['with_children'] is None): - raise ValueError("Missing the required parameter `with_children` when calling `create_child_by_copying`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'source' in params: - query_params.append(('source', params['source'])) # noqa: E501 - if 'with_children' in params: - query_params.append(('withChildren', params['with_children'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/children/_copy', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_child_by_moving(self, repository, node, source, **kwargs): # noqa: E501 - """Create a new child by moving. # noqa: E501 - - Create a new child by moving. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_child_by_moving(repository, node, source, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (required) - :param str source: ID of source node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_child_by_moving_with_http_info(repository, node, source, **kwargs) # noqa: E501 - else: - (data) = self.create_child_by_moving_with_http_info(repository, node, source, **kwargs) # noqa: E501 - return data - - def create_child_by_moving_with_http_info(self, repository, node, source, **kwargs): # noqa: E501 - """Create a new child by moving. # noqa: E501 - - Create a new child by moving. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_child_by_moving_with_http_info(repository, node, source, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (required) - :param str source: ID of source node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'source'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_child_by_moving" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_child_by_moving`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `create_child_by_moving`") # noqa: E501 - # verify the required parameter 'source' is set - if ('source' not in params or - params['source'] is None): - raise ValueError("Missing the required parameter `source` when calling `create_child_by_moving`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'source' in params: - query_params.append(('source', params['source'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/children/_move', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_fork_of_node(self, repository, node, source, with_children, **kwargs): # noqa: E501 - """Create a copy of a node by creating a forked version (variant). # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_fork_of_node(repository, node, source, with_children, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (required) - :param str source: ID of source node (required) - :param bool with_children: flag for children (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_fork_of_node_with_http_info(repository, node, source, with_children, **kwargs) # noqa: E501 - else: - (data) = self.create_fork_of_node_with_http_info(repository, node, source, with_children, **kwargs) # noqa: E501 - return data - - def create_fork_of_node_with_http_info(self, repository, node, source, with_children, **kwargs): # noqa: E501 - """Create a copy of a node by creating a forked version (variant). # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_fork_of_node_with_http_info(repository, node, source, with_children, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (required) - :param str source: ID of source node (required) - :param bool with_children: flag for children (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'source', 'with_children'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_fork_of_node" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_fork_of_node`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `create_fork_of_node`") # noqa: E501 - # verify the required parameter 'source' is set - if ('source' not in params or - params['source'] is None): - raise ValueError("Missing the required parameter `source` when calling `create_fork_of_node`") # noqa: E501 - # verify the required parameter 'with_children' is set - if ('with_children' not in params or - params['with_children'] is None): - raise ValueError("Missing the required parameter `with_children` when calling `create_fork_of_node`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'source' in params: - query_params.append(('source', params['source'])) # noqa: E501 - if 'with_children' in params: - query_params.append(('withChildren', params['with_children'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/children/_fork', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_share(self, repository, node, **kwargs): # noqa: E501 - """Create a share for a node. # noqa: E501 - - Create a new share for a node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_share(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param int expiry_date: expiry date for this share, leave empty or -1 for unlimited - :param str password: password for this share, use none to not use a password - :return: NodeShare - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_share_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.create_share_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def create_share_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Create a share for a node. # noqa: E501 - - Create a new share for a node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_share_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param int expiry_date: expiry date for this share, leave empty or -1 for unlimited - :param str password: password for this share, use none to not use a password - :return: NodeShare - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'expiry_date', 'password'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_share" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_share`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `create_share`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'expiry_date' in params: - query_params.append(('expiryDate', params['expiry_date'])) # noqa: E501 - if 'password' in params: - query_params.append(('password', params['password'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/shares', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeShare', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete(self, repository, node, **kwargs): # noqa: E501 - """Delete node. # noqa: E501 - - Delete node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param bool recycle: move the node to recycle - :param str protocol: protocol - :param str store: store - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.delete_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def delete_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Delete node. # noqa: E501 - - Delete node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param bool recycle: move the node to recycle - :param str protocol: protocol - :param str store: store - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'recycle', 'protocol', 'store'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `delete`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'recycle' in params: - query_params.append(('recycle', params['recycle'])) # noqa: E501 - if 'protocol' in params: - query_params.append(('protocol', params['protocol'])) # noqa: E501 - if 'store' in params: - query_params.append(('store', params['store'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_preview(self, repository, node, **kwargs): # noqa: E501 - """Delete preview of node. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_preview(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_preview_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.delete_preview_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def delete_preview_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Delete preview of node. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_preview_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_preview" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_preview`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `delete_preview`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/preview', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_assocs(self, repository, node, direction, **kwargs): # noqa: E501 - """Get related nodes. # noqa: E501 - - Get nodes related based on an assoc. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_assocs(repository, node, direction, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str direction: Either where the given node should be the \"SOURCE\" or the \"TARGET\" (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param str assoc_name: Association name (e.g. ccm:forkio). - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_assocs_with_http_info(repository, node, direction, **kwargs) # noqa: E501 - else: - (data) = self.get_assocs_with_http_info(repository, node, direction, **kwargs) # noqa: E501 - return data - - def get_assocs_with_http_info(self, repository, node, direction, **kwargs): # noqa: E501 - """Get related nodes. # noqa: E501 - - Get nodes related based on an assoc. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_assocs_with_http_info(repository, node, direction, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str direction: Either where the given node should be the \"SOURCE\" or the \"TARGET\" (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param str assoc_name: Association name (e.g. ccm:forkio). - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'direction', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'assoc_name', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_assocs" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_assocs`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_assocs`") # noqa: E501 - # verify the required parameter 'direction' is set - if ('direction' not in params or - params['direction'] is None): - raise ValueError("Missing the required parameter `direction` when calling `get_assocs`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'direction' in params: - query_params.append(('direction', params['direction'])) # noqa: E501 - if 'assoc_name' in params: - query_params.append(('assocName', params['assoc_name'])) # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/assocs', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_children(self, repository, node, **kwargs): # noqa: E501 - """Get children of node. # noqa: E501 - - Get children of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_children(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] filter: filter by type files,folders - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param str assoc_name: Filter for a specific association. May be empty - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_children_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_children_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_children_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get children of node. # noqa: E501 - - Get children of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_children_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] filter: filter by type files,folders - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param str assoc_name: Filter for a specific association. May be empty - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'max_items', 'skip_count', 'filter', 'sort_properties', 'sort_ascending', 'assoc_name', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_children" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_children`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_children`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 - collection_formats['filter'] = 'multi' # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'assoc_name' in params: - query_params.append(('assocName', params['assoc_name'])) # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/children', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_metadata(self, repository, node, **kwargs): # noqa: E501 - """Get metadata of node. # noqa: E501 - - Get metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_metadata(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_metadata_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_metadata_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_metadata_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get metadata of node. # noqa: E501 - - Get metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_metadata_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_metadata" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_metadata`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_metadata`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/metadata', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_nodes(self, repository, query, **kwargs): # noqa: E501 - """Searching nodes. # noqa: E501 - - Searching nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_nodes(repository, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str query: lucene query (required) - :param list[str] facettes: facettes - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_nodes_with_http_info(repository, query, **kwargs) # noqa: E501 - else: - (data) = self.get_nodes_with_http_info(repository, query, **kwargs) # noqa: E501 - return data - - def get_nodes_with_http_info(self, repository, query, **kwargs): # noqa: E501 - """Searching nodes. # noqa: E501 - - Searching nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_nodes_with_http_info(repository, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str query: lucene query (required) - :param list[str] facettes: facettes - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResult - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'query', 'facettes', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_nodes" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_nodes`") # noqa: E501 - # verify the required parameter 'query' is set - if ('query' not in params or - params['query'] is None): - raise ValueError("Missing the required parameter `query` when calling `get_nodes`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'query' in params: - query_params.append(('query', params['query'])) # noqa: E501 - if 'facettes' in params: - query_params.append(('facettes', params['facettes'])) # noqa: E501 - collection_formats['facettes'] = 'multi' # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResult', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_notify_list(self, repository, node, **kwargs): # noqa: E501 - """Get notifys (sharing history) of the node. # noqa: E501 - - Ordered by the time of each notify # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_notify_list(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: list[NotifyEntry] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_notify_list_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_notify_list_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_notify_list_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get notifys (sharing history) of the node. # noqa: E501 - - Ordered by the time of each notify # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_notify_list_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: list[NotifyEntry] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_notify_list" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_notify_list`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_notify_list`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/notifys', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[NotifyEntry]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_parents(self, repository, node, **kwargs): # noqa: E501 - """Get parents of node. # noqa: E501 - - Get all parents metadata + own metadata of node. Index 0 is always the current node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_parents(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param bool full_path: activate to return the full alfresco path, otherwise the path for the user home is resolved - :return: ParentEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_parents_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_parents_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_parents_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get parents of node. # noqa: E501 - - Get all parents metadata + own metadata of node. Index 0 is always the current node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_parents_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param bool full_path: activate to return the full alfresco path, otherwise the path for the user home is resolved - :return: ParentEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'property_filter', 'full_path'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_parents" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_parents`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_parents`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - if 'full_path' in params: - query_params.append(('fullPath', params['full_path'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/parents', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='ParentEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_permission(self, repository, node, **kwargs): # noqa: E501 - """Get all permission of node. # noqa: E501 - - Get all permission of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_permission(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodePermissionEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_permission_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_permission_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_permission_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get all permission of node. # noqa: E501 - - Get all permission of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_permission_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodePermissionEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_permission" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_permission`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_permission`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/permissions', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodePermissionEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_shares(self, repository, node, **kwargs): # noqa: E501 - """Get shares of node. # noqa: E501 - - Get list of shares (via mail/token) for a node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_shares(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str email: Filter for a specific email or use LINK for link shares (Optional) - :return: list[NodeShare] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_shares_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_shares_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_shares_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get shares of node. # noqa: E501 - - Get list of shares (via mail/token) for a node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_shares_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str email: Filter for a specific email or use LINK for link shares (Optional) - :return: list[NodeShare] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'email'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_shares" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_shares`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_shares`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'email' in params: - query_params.append(('email', params['email'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/shares', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[NodeShare]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_template_metadata(self, repository, node, **kwargs): # noqa: E501 - """Get the metadata template + status for this folder. # noqa: E501 - - All the given metadata will be inherited to child nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_template_metadata(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_template_metadata_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_template_metadata_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_template_metadata_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get the metadata template + status for this folder. # noqa: E501 - - All the given metadata will be inherited to child nodes. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_template_metadata_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_template_metadata" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_template_metadata`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_template_metadata`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/metadata/template', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_text_content(self, repository, node, **kwargs): # noqa: E501 - """Get the text content of a document. # noqa: E501 - - May fails with 500 if the node can not be read. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_text_content(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeText - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_text_content_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_text_content_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_text_content_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get the text content of a document. # noqa: E501 - - May fails with 500 if the node can not be read. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_text_content_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeText - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_text_content" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_text_content`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_text_content`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/textContent', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeText', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_version_metadata(self, repository, node, major, minor, **kwargs): # noqa: E501 - """Get metadata of node version. # noqa: E501 - - Get metadata of node version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_version_metadata(repository, node, major, minor, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param int major: major version (required) - :param int minor: minor version (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeVersionEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_version_metadata_with_http_info(repository, node, major, minor, **kwargs) # noqa: E501 - else: - (data) = self.get_version_metadata_with_http_info(repository, node, major, minor, **kwargs) # noqa: E501 - return data - - def get_version_metadata_with_http_info(self, repository, node, major, minor, **kwargs): # noqa: E501 - """Get metadata of node version. # noqa: E501 - - Get metadata of node version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_version_metadata_with_http_info(repository, node, major, minor, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param int major: major version (required) - :param int minor: minor version (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: NodeVersionEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'major', 'minor', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_version_metadata" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_version_metadata`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_version_metadata`") # noqa: E501 - # verify the required parameter 'major' is set - if ('major' not in params or - params['major'] is None): - raise ValueError("Missing the required parameter `major` when calling `get_version_metadata`") # noqa: E501 - # verify the required parameter 'minor' is set - if ('minor' not in params or - params['minor'] is None): - raise ValueError("Missing the required parameter `minor` when calling `get_version_metadata`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'major' in params: - path_params['major'] = params['major'] # noqa: E501 - if 'minor' in params: - path_params['minor'] = params['minor'] # noqa: E501 - - query_params = [] - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/metadata', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeVersionEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_versions(self, repository, node, **kwargs): # noqa: E501 - """Get all versions of node. # noqa: E501 - - Get all versions of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_versions(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeVersionRefEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_versions_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_versions_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_versions_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get all versions of node. # noqa: E501 - - Get all versions of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_versions_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeVersionRefEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_versions" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_versions`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_versions`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/versions', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeVersionRefEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_workflow_history(self, repository, node, **kwargs): # noqa: E501 - """Get workflow history. # noqa: E501 - - Get workflow history of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow_history(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: list[WorkflowHistory] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_workflow_history_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_workflow_history_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_workflow_history_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get workflow history. # noqa: E501 - - Get workflow history of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow_history_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: list[WorkflowHistory] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_workflow_history" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_workflow_history`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_workflow_history`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/workflow', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[WorkflowHistory]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def has_permission(self, repository, node, user, **kwargs): # noqa: E501 - """Which permissions has user/group for node. # noqa: E501 - - Check for actual permissions (also when user is in groups) for a specific node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.has_permission(repository, node, user, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str user: Authority (user/group) to check (use \"-me-\" for current user (required) - :return: list[str] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.has_permission_with_http_info(repository, node, user, **kwargs) # noqa: E501 - else: - (data) = self.has_permission_with_http_info(repository, node, user, **kwargs) # noqa: E501 - return data - - def has_permission_with_http_info(self, repository, node, user, **kwargs): # noqa: E501 - """Which permissions has user/group for node. # noqa: E501 - - Check for actual permissions (also when user is in groups) for a specific node # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.has_permission_with_http_info(repository, node, user, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str user: Authority (user/group) to check (use \"-me-\" for current user (required) - :return: list[str] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'user'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method has_permission" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `has_permission`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `has_permission`") # noqa: E501 - # verify the required parameter 'user' is set - if ('user' not in params or - params['user'] is None): - raise ValueError("Missing the required parameter `user` when calling `has_permission`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'user' in params: - path_params['user'] = params['user'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/permissions/{user}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[str]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def import_node(self, repository, node, parent, **kwargs): # noqa: E501 - """Import node # noqa: E501 - - Import a node from a foreign repository to the local repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_node(repository, node, parent, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: The id of the foreign repository (required) - :param str node: ID of node (required) - :param str parent: Parent node where to store it locally, may also use -userhome- or -inbox- (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.import_node_with_http_info(repository, node, parent, **kwargs) # noqa: E501 - else: - (data) = self.import_node_with_http_info(repository, node, parent, **kwargs) # noqa: E501 - return data - - def import_node_with_http_info(self, repository, node, parent, **kwargs): # noqa: E501 - """Import node # noqa: E501 - - Import a node from a foreign repository to the local repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.import_node_with_http_info(repository, node, parent, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: The id of the foreign repository (required) - :param str node: ID of node (required) - :param str parent: Parent node where to store it locally, may also use -userhome- or -inbox- (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'parent'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method import_node" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `import_node`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `import_node`") # noqa: E501 - # verify the required parameter 'parent' is set - if ('parent' not in params or - params['parent'] is None): - raise ValueError("Missing the required parameter `parent` when calling `import_node`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'parent' in params: - query_params.append(('parent', params['parent'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/import', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def islocked(self, repository, node, **kwargs): # noqa: E501 - """locked status of a node. # noqa: E501 - - locked status of a node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.islocked(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeLocked - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.islocked_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.islocked_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def islocked_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """locked status of a node. # noqa: E501 - - locked status of a node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.islocked_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeLocked - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method islocked" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `islocked`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `islocked`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/lock/status', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeLocked', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def prepare_usage(self, repository, node, **kwargs): # noqa: E501 - """create remote object and get properties. # noqa: E501 - - create remote object and get properties. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.prepare_usage(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeRemote - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.prepare_usage_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.prepare_usage_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def prepare_usage_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """create remote object and get properties. # noqa: E501 - - create remote object and get properties. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.prepare_usage_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: NodeRemote - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method prepare_usage" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `prepare_usage`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `prepare_usage`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/prepareUsage', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeRemote', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_share(self, repository, node, share_id, **kwargs): # noqa: E501 - """Remove share of a node. # noqa: E501 - - Remove the specified share id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_share(repository, node, share_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share_id: share id (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_share_with_http_info(repository, node, share_id, **kwargs) # noqa: E501 - else: - (data) = self.remove_share_with_http_info(repository, node, share_id, **kwargs) # noqa: E501 - return data - - def remove_share_with_http_info(self, repository, node, share_id, **kwargs): # noqa: E501 - """Remove share of a node. # noqa: E501 - - Remove the specified share id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_share_with_http_info(repository, node, share_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share_id: share id (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'share_id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_share" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `remove_share`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `remove_share`") # noqa: E501 - # verify the required parameter 'share_id' is set - if ('share_id' not in params or - params['share_id'] is None): - raise ValueError("Missing the required parameter `share_id` when calling `remove_share`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'share_id' in params: - path_params['shareId'] = params['share_id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/shares/{shareId}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def report_node(self, repository, node, reason, user_email, **kwargs): # noqa: E501 - """Report the node. # noqa: E501 - - Report a node to notify the admin about an issue) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.report_node(repository, node, reason, user_email, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str reason: the reason for the report (required) - :param str user_email: mail of reporting user (required) - :param str user_comment: additional user comment - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.report_node_with_http_info(repository, node, reason, user_email, **kwargs) # noqa: E501 - else: - (data) = self.report_node_with_http_info(repository, node, reason, user_email, **kwargs) # noqa: E501 - return data - - def report_node_with_http_info(self, repository, node, reason, user_email, **kwargs): # noqa: E501 - """Report the node. # noqa: E501 - - Report a node to notify the admin about an issue) # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.report_node_with_http_info(repository, node, reason, user_email, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str reason: the reason for the report (required) - :param str user_email: mail of reporting user (required) - :param str user_comment: additional user comment - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'reason', 'user_email', 'user_comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method report_node" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `report_node`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `report_node`") # noqa: E501 - # verify the required parameter 'reason' is set - if ('reason' not in params or - params['reason'] is None): - raise ValueError("Missing the required parameter `reason` when calling `report_node`") # noqa: E501 - # verify the required parameter 'user_email' is set - if ('user_email' not in params or - params['user_email'] is None): - raise ValueError("Missing the required parameter `user_email` when calling `report_node`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'reason' in params: - query_params.append(('reason', params['reason'])) # noqa: E501 - if 'user_email' in params: - query_params.append(('userEmail', params['user_email'])) # noqa: E501 - if 'user_comment' in params: - query_params.append(('userComment', params['user_comment'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/report', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def revert_version(self, repository, node, major, minor, **kwargs): # noqa: E501 - """Revert to node version. # noqa: E501 - - Revert to node version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.revert_version(repository, node, major, minor, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param int major: major version (required) - :param int minor: minor version (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.revert_version_with_http_info(repository, node, major, minor, **kwargs) # noqa: E501 - else: - (data) = self.revert_version_with_http_info(repository, node, major, minor, **kwargs) # noqa: E501 - return data - - def revert_version_with_http_info(self, repository, node, major, minor, **kwargs): # noqa: E501 - """Revert to node version. # noqa: E501 - - Revert to node version. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.revert_version_with_http_info(repository, node, major, minor, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param int major: major version (required) - :param int minor: minor version (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'major', 'minor'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method revert_version" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `revert_version`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `revert_version`") # noqa: E501 - # verify the required parameter 'major' is set - if ('major' not in params or - params['major'] is None): - raise ValueError("Missing the required parameter `major` when calling `revert_version`") # noqa: E501 - # verify the required parameter 'minor' is set - if ('minor' not in params or - params['minor'] is None): - raise ValueError("Missing the required parameter `minor` when calling `revert_version`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'major' in params: - path_params['major'] = params['major'] # noqa: E501 - if 'minor' in params: - path_params['minor'] = params['minor'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/_revert', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_owner(self, repository, node, **kwargs): # noqa: E501 - """Set owner of node. # noqa: E501 - - Set owner of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_owner(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str username: username - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_owner_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.set_owner_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def set_owner_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Set owner of node. # noqa: E501 - - Set owner of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_owner_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str username: username - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'username'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_owner" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `set_owner`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `set_owner`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'username' in params: - query_params.append(('username', params['username'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/owner', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_permission(self, body, send_mail, send_copy, repository, node, **kwargs): # noqa: E501 - """Set local permissions of node. # noqa: E501 - - Set local permissions of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_permission(body, send_mail, send_copy, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param ACL body: permissions (required) - :param bool send_mail: sendMail (required) - :param bool send_copy: sendCopy (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str mailtext: mailtext - :param bool create_handle: createHandle - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_permission_with_http_info(body, send_mail, send_copy, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.set_permission_with_http_info(body, send_mail, send_copy, repository, node, **kwargs) # noqa: E501 - return data - - def set_permission_with_http_info(self, body, send_mail, send_copy, repository, node, **kwargs): # noqa: E501 - """Set local permissions of node. # noqa: E501 - - Set local permissions of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_permission_with_http_info(body, send_mail, send_copy, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param ACL body: permissions (required) - :param bool send_mail: sendMail (required) - :param bool send_copy: sendCopy (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str mailtext: mailtext - :param bool create_handle: createHandle - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'send_mail', 'send_copy', 'repository', 'node', 'mailtext', 'create_handle'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_permission" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `set_permission`") # noqa: E501 - # verify the required parameter 'send_mail' is set - if ('send_mail' not in params or - params['send_mail'] is None): - raise ValueError("Missing the required parameter `send_mail` when calling `set_permission`") # noqa: E501 - # verify the required parameter 'send_copy' is set - if ('send_copy' not in params or - params['send_copy'] is None): - raise ValueError("Missing the required parameter `send_copy` when calling `set_permission`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `set_permission`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `set_permission`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'mailtext' in params: - query_params.append(('mailtext', params['mailtext'])) # noqa: E501 - if 'send_mail' in params: - query_params.append(('sendMail', params['send_mail'])) # noqa: E501 - if 'send_copy' in params: - query_params.append(('sendCopy', params['send_copy'])) # noqa: E501 - if 'create_handle' in params: - query_params.append(('createHandle', params['create_handle'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/permissions', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def set_property(self, repository, node, _property, **kwargs): # noqa: E501 - """Set single property of node. # noqa: E501 - - When the property is unset (null), it will be removed # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_property(repository, node, _property, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str _property: property (required) - :param str value: value - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_property_with_http_info(repository, node, _property, **kwargs) # noqa: E501 - else: - (data) = self.set_property_with_http_info(repository, node, _property, **kwargs) # noqa: E501 - return data - - def set_property_with_http_info(self, repository, node, _property, **kwargs): # noqa: E501 - """Set single property of node. # noqa: E501 - - When the property is unset (null), it will be removed # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.set_property_with_http_info(repository, node, _property, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str _property: property (required) - :param str value: value - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', '_property', 'value'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method set_property" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `set_property`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `set_property`") # noqa: E501 - # verify the required parameter '_property' is set - if ('_property' not in params or - params['_property'] is None): - raise ValueError("Missing the required parameter `_property` when calling `set_property`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if '_property' in params: - query_params.append(('property', params['_property'])) # noqa: E501 - if 'value' in params: - query_params.append(('value', params['value'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/property', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def store_x_api_data(self, body, repository, node, **kwargs): # noqa: E501 - """Store xApi-Conform data for a given node # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.store_x_api_data(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: xApi conform json data (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: object - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.store_x_api_data_with_http_info(body, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.store_x_api_data_with_http_info(body, repository, node, **kwargs) # noqa: E501 - return data - - def store_x_api_data_with_http_info(self, body, repository, node, **kwargs): # noqa: E501 - """Store xApi-Conform data for a given node # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.store_x_api_data_with_http_info(body, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: xApi conform json data (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: object - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method store_x_api_data" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `store_x_api_data`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `store_x_api_data`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `store_x_api_data`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/xapi', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='object', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def unlock(self, repository, node, **kwargs): # noqa: E501 - """unlock node. # noqa: E501 - - unlock node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.unlock(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.unlock_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.unlock_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def unlock_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """unlock node. # noqa: E501 - - unlock node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.unlock_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method unlock" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `unlock`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `unlock`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/lock/unlock', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_share(self, repository, node, share_id, **kwargs): # noqa: E501 - """update share of a node. # noqa: E501 - - update the specified share id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_share(repository, node, share_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share_id: share id (required) - :param int expiry_date: expiry date for this share, leave empty or -1 for unlimited - :param str password: new password for share, leave empty if you don't want to change it - :return: NodeShare - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_share_with_http_info(repository, node, share_id, **kwargs) # noqa: E501 - else: - (data) = self.update_share_with_http_info(repository, node, share_id, **kwargs) # noqa: E501 - return data - - def update_share_with_http_info(self, repository, node, share_id, **kwargs): # noqa: E501 - """update share of a node. # noqa: E501 - - update the specified share id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_share_with_http_info(repository, node, share_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share_id: share id (required) - :param int expiry_date: expiry date for this share, leave empty or -1 for unlimited - :param str password: new password for share, leave empty if you don't want to change it - :return: NodeShare - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'share_id', 'expiry_date', 'password'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_share" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `update_share`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `update_share`") # noqa: E501 - # verify the required parameter 'share_id' is set - if ('share_id' not in params or - params['share_id'] is None): - raise ValueError("Missing the required parameter `share_id` when calling `update_share`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'share_id' in params: - path_params['shareId'] = params['share_id'] # noqa: E501 - - query_params = [] - if 'expiry_date' in params: - query_params.append(('expiryDate', params['expiry_date'])) # noqa: E501 - if 'password' in params: - query_params.append(('password', params['password'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/node/v1/nodes/{repository}/{node}/shares/{shareId}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeShare', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/organization_v1_api.py b/edu_sharing_client/api/organization_v1_api.py deleted file mode 100644 index bf2772f0..00000000 --- a/edu_sharing_client/api/organization_v1_api.py +++ /dev/null @@ -1,578 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class ORGANIZATIONV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def create_organizations(self, repository, organization, **kwargs): # noqa: E501 - """create organization in repository. # noqa: E501 - - create organization in repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_organizations(repository, organization, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: organization name (required) - :param str eduscope: eduscope (may be null) - :return: Organization - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_organizations_with_http_info(repository, organization, **kwargs) # noqa: E501 - else: - (data) = self.create_organizations_with_http_info(repository, organization, **kwargs) # noqa: E501 - return data - - def create_organizations_with_http_info(self, repository, organization, **kwargs): # noqa: E501 - """create organization in repository. # noqa: E501 - - create organization in repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_organizations_with_http_info(repository, organization, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: organization name (required) - :param str eduscope: eduscope (may be null) - :return: Organization - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'organization', 'eduscope'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_organizations" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_organizations`") # noqa: E501 - # verify the required parameter 'organization' is set - if ('organization' not in params or - params['organization'] is None): - raise ValueError("Missing the required parameter `organization` when calling `create_organizations`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'organization' in params: - path_params['organization'] = params['organization'] # noqa: E501 - - query_params = [] - if 'eduscope' in params: - query_params.append(('eduscope', params['eduscope'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/organization/v1/organizations/{repository}/{organization}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Organization', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_organizations(self, repository, organization, **kwargs): # noqa: E501 - """Delete organization of repository. # noqa: E501 - - Delete organization of repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_organizations(repository, organization, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: groupname (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_organizations_with_http_info(repository, organization, **kwargs) # noqa: E501 - else: - (data) = self.delete_organizations_with_http_info(repository, organization, **kwargs) # noqa: E501 - return data - - def delete_organizations_with_http_info(self, repository, organization, **kwargs): # noqa: E501 - """Delete organization of repository. # noqa: E501 - - Delete organization of repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_organizations_with_http_info(repository, organization, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: groupname (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'organization'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_organizations" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_organizations`") # noqa: E501 - # verify the required parameter 'organization' is set - if ('organization' not in params or - params['organization'] is None): - raise ValueError("Missing the required parameter `organization` when calling `delete_organizations`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'organization' in params: - path_params['organization'] = params['organization'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/organization/v1/organizations/{repository}/{organization}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_organization(self, repository, organization, **kwargs): # noqa: E501 - """Get organization by id. # noqa: E501 - - Get organization by id. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_organization(repository, organization, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: ID of organization (required) - :return: Organization - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_organization_with_http_info(repository, organization, **kwargs) # noqa: E501 - else: - (data) = self.get_organization_with_http_info(repository, organization, **kwargs) # noqa: E501 - return data - - def get_organization_with_http_info(self, repository, organization, **kwargs): # noqa: E501 - """Get organization by id. # noqa: E501 - - Get organization by id. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_organization_with_http_info(repository, organization, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: ID of organization (required) - :return: Organization - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'organization'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_organization" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_organization`") # noqa: E501 - # verify the required parameter 'organization' is set - if ('organization' not in params or - params['organization'] is None): - raise ValueError("Missing the required parameter `organization` when calling `get_organization`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'organization' in params: - path_params['organization'] = params['organization'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/organization/v1/organizations/{repository}/{organization}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Organization', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_organizations(self, repository, **kwargs): # noqa: E501 - """Get organizations of repository. # noqa: E501 - - Get organizations of repository the current user is member. May returns an empty list. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_organizations(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param bool only_memberships: search only in memberships, false can only be done by admin - :return: OrganizationEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_organizations_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.get_organizations_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def get_organizations_with_http_info(self, repository, **kwargs): # noqa: E501 - """Get organizations of repository. # noqa: E501 - - Get organizations of repository the current user is member. May returns an empty list. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_organizations_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str pattern: pattern - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param bool only_memberships: search only in memberships, false can only be done by admin - :return: OrganizationEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'pattern', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'only_memberships'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_organizations" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_organizations`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'pattern' in params: - query_params.append(('pattern', params['pattern'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'only_memberships' in params: - query_params.append(('onlyMemberships', params['only_memberships'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/organization/v1/organizations/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='OrganizationEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def remove_from_organization(self, repository, organization, member, **kwargs): # noqa: E501 - """Remove member from organization. # noqa: E501 - - Remove member from organization. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_from_organization(repository, organization, member, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: groupname (required) - :param str member: authorityName of member (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.remove_from_organization_with_http_info(repository, organization, member, **kwargs) # noqa: E501 - else: - (data) = self.remove_from_organization_with_http_info(repository, organization, member, **kwargs) # noqa: E501 - return data - - def remove_from_organization_with_http_info(self, repository, organization, member, **kwargs): # noqa: E501 - """Remove member from organization. # noqa: E501 - - Remove member from organization. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.remove_from_organization_with_http_info(repository, organization, member, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str organization: groupname (required) - :param str member: authorityName of member (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'organization', 'member'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method remove_from_organization" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `remove_from_organization`") # noqa: E501 - # verify the required parameter 'organization' is set - if ('organization' not in params or - params['organization'] is None): - raise ValueError("Missing the required parameter `organization` when calling `remove_from_organization`") # noqa: E501 - # verify the required parameter 'member' is set - if ('member' not in params or - params['member'] is None): - raise ValueError("Missing the required parameter `member` when calling `remove_from_organization`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'organization' in params: - path_params['organization'] = params['organization'] # noqa: E501 - if 'member' in params: - path_params['member'] = params['member'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/organization/v1/organizations/{repository}/{organization}/member/{member}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/rating_v1_api.py b/edu_sharing_client/api/rating_v1_api.py deleted file mode 100644 index 619bc152..00000000 --- a/edu_sharing_client/api/rating_v1_api.py +++ /dev/null @@ -1,259 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class RATINGV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_or_update_rating(self, body, rating, repository, node, **kwargs): # noqa: E501 - """create or update a rating # noqa: E501 - - Adds the rating. If the current user already rated that element, the rating will be altered # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_or_update_rating(body, rating, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Text content of rating (required) - :param float rating: The rating (usually in range 1-5) (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_or_update_rating_with_http_info(body, rating, repository, node, **kwargs) # noqa: E501 - else: - (data) = self.add_or_update_rating_with_http_info(body, rating, repository, node, **kwargs) # noqa: E501 - return data - - def add_or_update_rating_with_http_info(self, body, rating, repository, node, **kwargs): # noqa: E501 - """create or update a rating # noqa: E501 - - Adds the rating. If the current user already rated that element, the rating will be altered # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_or_update_rating_with_http_info(body, rating, repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str body: Text content of rating (required) - :param float rating: The rating (usually in range 1-5) (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'rating', 'repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_or_update_rating" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `add_or_update_rating`") # noqa: E501 - # verify the required parameter 'rating' is set - if ('rating' not in params or - params['rating'] is None): - raise ValueError("Missing the required parameter `rating` when calling `add_or_update_rating`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_or_update_rating`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `add_or_update_rating`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'rating' in params: - query_params.append(('rating', params['rating'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/rating/v1/ratings/{repository}/{node}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_rating(self, repository, node, **kwargs): # noqa: E501 - """delete a comment # noqa: E501 - - Delete the comment with the given id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_rating(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_rating_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.delete_rating_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def delete_rating_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """delete a comment # noqa: E501 - - Delete the comment with the given id # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_rating_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_rating" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_rating`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `delete_rating`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/rating/v1/ratings/{repository}/{node}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/register_v1_api.py b/edu_sharing_client/api/register_v1_api.py deleted file mode 100644 index 4b637c5e..00000000 --- a/edu_sharing_client/api/register_v1_api.py +++ /dev/null @@ -1,601 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class REGISTERV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def activate(self, key, **kwargs): # noqa: E501 - """Activate a new user (by using a supplied key) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.activate(key, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str key: The key for the user to activate (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.activate_with_http_info(key, **kwargs) # noqa: E501 - else: - (data) = self.activate_with_http_info(key, **kwargs) # noqa: E501 - return data - - def activate_with_http_info(self, key, **kwargs): # noqa: E501 - """Activate a new user (by using a supplied key) # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.activate_with_http_info(key, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str key: The key for the user to activate (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['key'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method activate" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'key' is set - if ('key' not in params or - params['key'] is None): - raise ValueError("Missing the required parameter `key` when calling `activate`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'key' in params: - path_params['key'] = params['key'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/register/v1/activate/{key}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def mail_exists(self, mail, **kwargs): # noqa: E501 - """Check if the given mail is already successfully registered # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.mail_exists(mail, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mail: The mail (authority) of the user to check (required) - :return: RegisterExists - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.mail_exists_with_http_info(mail, **kwargs) # noqa: E501 - else: - (data) = self.mail_exists_with_http_info(mail, **kwargs) # noqa: E501 - return data - - def mail_exists_with_http_info(self, mail, **kwargs): # noqa: E501 - """Check if the given mail is already successfully registered # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.mail_exists_with_http_info(mail, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mail: The mail (authority) of the user to check (required) - :return: RegisterExists - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mail'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method mail_exists" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mail' is set - if ('mail' not in params or - params['mail'] is None): - raise ValueError("Missing the required parameter `mail` when calling `mail_exists`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'mail' in params: - path_params['mail'] = params['mail'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/register/v1/exists/{mail}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='RegisterExists', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def recover_password(self, mail, **kwargs): # noqa: E501 - """Send a mail to recover/reset password # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.recover_password(mail, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mail: The mail (authority) of the user to recover (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.recover_password_with_http_info(mail, **kwargs) # noqa: E501 - else: - (data) = self.recover_password_with_http_info(mail, **kwargs) # noqa: E501 - return data - - def recover_password_with_http_info(self, mail, **kwargs): # noqa: E501 - """Send a mail to recover/reset password # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.recover_password_with_http_info(mail, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mail: The mail (authority) of the user to recover (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mail'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method recover_password" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mail' is set - if ('mail' not in params or - params['mail'] is None): - raise ValueError("Missing the required parameter `mail` when calling `recover_password`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'mail' in params: - path_params['mail'] = params['mail'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/register/v1/recover/{mail}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def register(self, **kwargs): # noqa: E501 - """Register a new user # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.register(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param RegisterInformation body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.register_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.register_with_http_info(**kwargs) # noqa: E501 - return data - - def register_with_http_info(self, **kwargs): # noqa: E501 - """Register a new user # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.register_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param RegisterInformation body: - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method register" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/register/v1/register', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def resend_mail(self, mail, **kwargs): # noqa: E501 - """Resend a registration mail for a given mail address # noqa: E501 - - The method will return false if there is no pending registration for the given mail # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.resend_mail(mail, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mail: The mail a registration is pending for and should be resend to (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.resend_mail_with_http_info(mail, **kwargs) # noqa: E501 - else: - (data) = self.resend_mail_with_http_info(mail, **kwargs) # noqa: E501 - return data - - def resend_mail_with_http_info(self, mail, **kwargs): # noqa: E501 - """Resend a registration mail for a given mail address # noqa: E501 - - The method will return false if there is no pending registration for the given mail # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.resend_mail_with_http_info(mail, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str mail: The mail a registration is pending for and should be resend to (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['mail'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method resend_mail" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'mail' is set - if ('mail' not in params or - params['mail'] is None): - raise ValueError("Missing the required parameter `mail` when calling `resend_mail`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'mail' in params: - path_params['mail'] = params['mail'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/register/v1/resend/{mail}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def reset_password(self, key, password, **kwargs): # noqa: E501 - """Send a mail to recover/reset password # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.reset_password(key, password, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str key: The key for the password reset request (required) - :param str password: The new password for the user (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.reset_password_with_http_info(key, password, **kwargs) # noqa: E501 - else: - (data) = self.reset_password_with_http_info(key, password, **kwargs) # noqa: E501 - return data - - def reset_password_with_http_info(self, key, password, **kwargs): # noqa: E501 - """Send a mail to recover/reset password # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.reset_password_with_http_info(key, password, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str key: The key for the password reset request (required) - :param str password: The new password for the user (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['key', 'password'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method reset_password" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'key' is set - if ('key' not in params or - params['key'] is None): - raise ValueError("Missing the required parameter `key` when calling `reset_password`") # noqa: E501 - # verify the required parameter 'password' is set - if ('password' not in params or - params['password'] is None): - raise ValueError("Missing the required parameter `password` when calling `reset_password`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'key' in params: - path_params['key'] = params['key'] # noqa: E501 - if 'password' in params: - path_params['password'] = params['password'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/register/v1/reset/{key}/{password}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/rendering_v1_api.py b/edu_sharing_client/api/rendering_v1_api.py deleted file mode 100644 index 9e5fd60b..00000000 --- a/edu_sharing_client/api/rendering_v1_api.py +++ /dev/null @@ -1,263 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class RENDERINGV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get_details_snippet(self, repository, node, **kwargs): # noqa: E501 - """Get metadata of node. # noqa: E501 - - Get metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_details_snippet(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str version: version of node - :param str display_mode: Rendering displayMode - :return: RenderingDetailsEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_details_snippet_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_details_snippet_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_details_snippet_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get metadata of node. # noqa: E501 - - Get metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_details_snippet_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str version: version of node - :param str display_mode: Rendering displayMode - :return: RenderingDetailsEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'version', 'display_mode'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_details_snippet" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_details_snippet`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_details_snippet`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'version' in params: - query_params.append(('version', params['version'])) # noqa: E501 - if 'display_mode' in params: - query_params.append(('displayMode', params['display_mode'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/rendering/v1/details/{repository}/{node}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='RenderingDetailsEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_details_snippet_with_parameters(self, repository, node, **kwargs): # noqa: E501 - """Get metadata of node. # noqa: E501 - - Get metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_details_snippet_with_parameters(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param dict(str, str) body: additional parameters to send to the rendering service - :param str version: version of node - :param str display_mode: Rendering displayMode - :return: RenderingDetailsEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_details_snippet_with_parameters_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.get_details_snippet_with_parameters_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def get_details_snippet_with_parameters_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """Get metadata of node. # noqa: E501 - - Get metadata of node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_details_snippet_with_parameters_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param dict(str, str) body: additional parameters to send to the rendering service - :param str version: version of node - :param str display_mode: Rendering displayMode - :return: RenderingDetailsEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'body', 'version', 'display_mode'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_details_snippet_with_parameters" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_details_snippet_with_parameters`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_details_snippet_with_parameters`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - if 'version' in params: - query_params.append(('version', params['version'])) # noqa: E501 - if 'display_mode' in params: - query_params.append(('displayMode', params['display_mode'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/rendering/v1/details/{repository}/{node}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='RenderingDetailsEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/search_v1_api.py b/edu_sharing_client/api/search_v1_api.py deleted file mode 100644 index 047e6579..00000000 --- a/edu_sharing_client/api/search_v1_api.py +++ /dev/null @@ -1,821 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class SEARCHV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get_relevant_nodes(self, repository, **kwargs): # noqa: E501 - """Get relevant nodes for the current user # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_relevant_nodes(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_relevant_nodes_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.get_relevant_nodes_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def get_relevant_nodes_with_http_info(self, repository, **kwargs): # noqa: E501 - """Get relevant nodes for the current user # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_relevant_nodes_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'property_filter', 'max_items', 'skip_count'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_relevant_nodes" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_relevant_nodes`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/search/v1/relevant/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResultNode', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def load_save_search(self, node_id, **kwargs): # noqa: E501 - """Load a saved search query. # noqa: E501 - - Load a saved search query. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.load_save_search(node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: Node id of the search item (required) - :param list[str] body: facettes - :param str content_type: Type of element - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: Node - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.load_save_search_with_http_info(node_id, **kwargs) # noqa: E501 - else: - (data) = self.load_save_search_with_http_info(node_id, **kwargs) # noqa: E501 - return data - - def load_save_search_with_http_info(self, node_id, **kwargs): # noqa: E501 - """Load a saved search query. # noqa: E501 - - Load a saved search query. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.load_save_search_with_http_info(node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: Node id of the search item (required) - :param list[str] body: facettes - :param str content_type: Type of element - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: Node - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['node_id', 'body', 'content_type', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method load_save_search" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'node_id' is set - if ('node_id' not in params or - params['node_id'] is None): - raise ValueError("Missing the required parameter `node_id` when calling `load_save_search`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'node_id' in params: - path_params['nodeId'] = params['node_id'] # noqa: E501 - - query_params = [] - if 'content_type' in params: - query_params.append(('contentType', params['content_type'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/search/v1/queriesV2/load/{nodeId}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Node', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def save_search(self, body, name, repository, metadataset, query, **kwargs): # noqa: E501 - """Save a search query. # noqa: E501 - - Save a search query. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.save_search(body, name, repository, metadataset, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[MdsQueryCriteria] body: search parameters (required) - :param str name: Name of the new search item (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :param str query: ID of query (required) - :param bool replace: Replace if search with the same name exists - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.save_search_with_http_info(body, name, repository, metadataset, query, **kwargs) # noqa: E501 - else: - (data) = self.save_search_with_http_info(body, name, repository, metadataset, query, **kwargs) # noqa: E501 - return data - - def save_search_with_http_info(self, body, name, repository, metadataset, query, **kwargs): # noqa: E501 - """Save a search query. # noqa: E501 - - Save a search query. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.save_search_with_http_info(body, name, repository, metadataset, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param list[MdsQueryCriteria] body: search parameters (required) - :param str name: Name of the new search item (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :param str query: ID of query (required) - :param bool replace: Replace if search with the same name exists - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'name', 'repository', 'metadataset', 'query', 'replace'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method save_search" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `save_search`") # noqa: E501 - # verify the required parameter 'name' is set - if ('name' not in params or - params['name'] is None): - raise ValueError("Missing the required parameter `name` when calling `save_search`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `save_search`") # noqa: E501 - # verify the required parameter 'metadataset' is set - if ('metadataset' not in params or - params['metadataset'] is None): - raise ValueError("Missing the required parameter `metadataset` when calling `save_search`") # noqa: E501 - # verify the required parameter 'query' is set - if ('query' not in params or - params['query'] is None): - raise ValueError("Missing the required parameter `query` when calling `save_search`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'metadataset' in params: - path_params['metadataset'] = params['metadataset'] # noqa: E501 - if 'query' in params: - path_params['query'] = params['query'] # noqa: E501 - - query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 - if 'replace' in params: - query_params.append(('replace', params['replace'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/search/v1/queriesV2/{repository}/{metadataset}/{query}/save', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_by_property(self, repository, **kwargs): # noqa: E501 - """Search for custom properties with custom values # noqa: E501 - - e.g. property=cm:name, value:*Test* # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_by_property(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str content_type: Type of element - :param str combine_mode: Combine mode, AND or OR, defaults to AND - :param list[str] _property: One (or more) properties to search for, will be combined by specified combine mode - :param list[str] value: One (or more) values to search for, matching the properties defined before - :param list[str] comparator: (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_by_property_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.search_by_property_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def search_by_property_with_http_info(self, repository, **kwargs): # noqa: E501 - """Search for custom properties with custom values # noqa: E501 - - e.g. property=cm:name, value:*Test* # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_by_property_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str content_type: Type of element - :param str combine_mode: Combine mode, AND or OR, defaults to AND - :param list[str] _property: One (or more) properties to search for, will be combined by specified combine mode - :param list[str] value: One (or more) values to search for, matching the properties defined before - :param list[str] comparator: (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'content_type', 'combine_mode', '_property', 'value', 'comparator', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_by_property" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_by_property`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'content_type' in params: - query_params.append(('contentType', params['content_type'])) # noqa: E501 - if 'combine_mode' in params: - query_params.append(('combineMode', params['combine_mode'])) # noqa: E501 - if '_property' in params: - query_params.append(('property', params['_property'])) # noqa: E501 - collection_formats['property'] = 'multi' # noqa: E501 - if 'value' in params: - query_params.append(('value', params['value'])) # noqa: E501 - collection_formats['value'] = 'multi' # noqa: E501 - if 'comparator' in params: - query_params.append(('comparator', params['comparator'])) # noqa: E501 - collection_formats['comparator'] = 'multi' # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/search/v1/custom/{repository}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResultNode', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_fingerprint(self, repository, nodeid, **kwargs): # noqa: E501 - """Perform queries based on metadata sets. # noqa: E501 - - Perform queries based on metadata sets. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_fingerprint(repository, nodeid, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str nodeid: nodeid (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_fingerprint_with_http_info(repository, nodeid, **kwargs) # noqa: E501 - else: - (data) = self.search_fingerprint_with_http_info(repository, nodeid, **kwargs) # noqa: E501 - return data - - def search_fingerprint_with_http_info(self, repository, nodeid, **kwargs): # noqa: E501 - """Perform queries based on metadata sets. # noqa: E501 - - Perform queries based on metadata sets. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_fingerprint_with_http_info(repository, nodeid, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str nodeid: nodeid (required) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'nodeid', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_fingerprint" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_fingerprint`") # noqa: E501 - # verify the required parameter 'nodeid' is set - if ('nodeid' not in params or - params['nodeid'] is None): - raise ValueError("Missing the required parameter `nodeid` when calling `search_fingerprint`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'nodeid' in params: - path_params['nodeid'] = params['nodeid'] # noqa: E501 - - query_params = [] - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/search/v1/queries/{repository}/fingerprint/{nodeid}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResultNode', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search_v2(self, body, repository, metadataset, query, **kwargs): # noqa: E501 - """Perform queries based on metadata sets V2. # noqa: E501 - - Perform queries based on metadata sets V2. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_v2(body, repository, metadataset, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param SearchParameters body: search parameters (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :param str query: ID of query (required) - :param str content_type: Type of element - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_v2_with_http_info(body, repository, metadataset, query, **kwargs) # noqa: E501 - else: - (data) = self.search_v2_with_http_info(body, repository, metadataset, query, **kwargs) # noqa: E501 - return data - - def search_v2_with_http_info(self, body, repository, metadataset, query, **kwargs): # noqa: E501 - """Perform queries based on metadata sets V2. # noqa: E501 - - Perform queries based on metadata sets V2. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_v2_with_http_info(body, repository, metadataset, query, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param SearchParameters body: search parameters (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) - :param str query: ID of query (required) - :param str content_type: Type of element - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :param list[str] property_filter: property filter for result nodes (or \"-all-\" for all properties) - :return: SearchResultNode - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'metadataset', 'query', 'content_type', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending', 'property_filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search_v2" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `search_v2`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search_v2`") # noqa: E501 - # verify the required parameter 'metadataset' is set - if ('metadataset' not in params or - params['metadataset'] is None): - raise ValueError("Missing the required parameter `metadataset` when calling `search_v2`") # noqa: E501 - # verify the required parameter 'query' is set - if ('query' not in params or - params['query'] is None): - raise ValueError("Missing the required parameter `query` when calling `search_v2`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'metadataset' in params: - path_params['metadataset'] = params['metadataset'] # noqa: E501 - if 'query' in params: - path_params['query'] = params['query'] # noqa: E501 - - query_params = [] - if 'content_type' in params: - query_params.append(('contentType', params['content_type'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - if 'property_filter' in params: - query_params.append(('propertyFilter', params['property_filter'])) # noqa: E501 - collection_formats['propertyFilter'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/search/v1/queriesV2/{repository}/{metadataset}/{query}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SearchResultNode', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/sharing_v1_api.py b/edu_sharing_client/api/sharing_v1_api.py deleted file mode 100644 index 8c945353..00000000 --- a/edu_sharing_client/api/sharing_v1_api.py +++ /dev/null @@ -1,279 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class SHARINGV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get_children(self, repository, node, share, **kwargs): # noqa: E501 - """Get all children of this share. # noqa: E501 - - Only valid for shared folders # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_children(repository, node, share, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share: Share token (required) - :param str password: Password (required if share is locked) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_children_with_http_info(repository, node, share, **kwargs) # noqa: E501 - else: - (data) = self.get_children_with_http_info(repository, node, share, **kwargs) # noqa: E501 - return data - - def get_children_with_http_info(self, repository, node, share, **kwargs): # noqa: E501 - """Get all children of this share. # noqa: E501 - - Only valid for shared folders # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_children_with_http_info(repository, node, share, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share: Share token (required) - :param str password: Password (required if share is locked) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: NodeEntries - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'share', 'password', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_children" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_children`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_children`") # noqa: E501 - # verify the required parameter 'share' is set - if ('share' not in params or - params['share'] is None): - raise ValueError("Missing the required parameter `share` when calling `get_children`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'share' in params: - path_params['share'] = params['share'] # noqa: E501 - - query_params = [] - if 'password' in params: - query_params.append(('password', params['password'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/sharing/v1/sharing/{repository}/{node}/{share}/children', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntries', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_info(self, repository, node, share, **kwargs): # noqa: E501 - """Get general info of a share. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_info(repository, node, share, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share: Share token (required) - :param str password: Password to validate (optional) - :return: SharingInfo - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_info_with_http_info(repository, node, share, **kwargs) # noqa: E501 - else: - (data) = self.get_info_with_http_info(repository, node, share, **kwargs) # noqa: E501 - return data - - def get_info_with_http_info(self, repository, node, share, **kwargs): # noqa: E501 - """Get general info of a share. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_info_with_http_info(repository, node, share, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: ID of node (required) - :param str share: Share token (required) - :param str password: Password to validate (optional) - :return: SharingInfo - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node', 'share', 'password'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_info" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_info`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `get_info`") # noqa: E501 - # verify the required parameter 'share' is set - if ('share' not in params or - params['share'] is None): - raise ValueError("Missing the required parameter `share` when calling `get_info`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - if 'share' in params: - path_params['share'] = params['share'] # noqa: E501 - - query_params = [] - if 'password' in params: - query_params.append(('password', params['password'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/sharing/v1/sharing/{repository}/{node}/{share}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='SharingInfo', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/statistic_v1_api.py b/edu_sharing_client/api/statistic_v1_api.py deleted file mode 100644 index b7be38e5..00000000 --- a/edu_sharing_client/api/statistic_v1_api.py +++ /dev/null @@ -1,507 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class STATISTICV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def get(self, body, context, **kwargs): # noqa: E501 - """Get statistics of repository. # noqa: E501 - - Statistics. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get(body, context, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Filter body: filter (required) - :param str context: context, the node where to start (required) - :param list[str] properties: properties - :return: Statistics - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_with_http_info(body, context, **kwargs) # noqa: E501 - else: - (data) = self.get_with_http_info(body, context, **kwargs) # noqa: E501 - return data - - def get_with_http_info(self, body, context, **kwargs): # noqa: E501 - """Get statistics of repository. # noqa: E501 - - Statistics. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_with_http_info(body, context, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param Filter body: filter (required) - :param str context: context, the node where to start (required) - :param list[str] properties: properties - :return: Statistics - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'context', 'properties'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `get`") # noqa: E501 - # verify the required parameter 'context' is set - if ('context' not in params or - params['context'] is None): - raise ValueError("Missing the required parameter `context` when calling `get`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'context' in params: - path_params['context'] = params['context'] # noqa: E501 - - query_params = [] - if 'properties' in params: - query_params.append(('properties', params['properties'])) # noqa: E501 - collection_formats['properties'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/statistic/v1/facettes/{context}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Statistics', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_global_statistics(self, **kwargs): # noqa: E501 - """Get stats. # noqa: E501 - - Get global statistics for this repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_global_statistics(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str group: primary property to build facettes and count+group values - :param list[str] sub_group: additional properties to build facettes and count+sub-group values - :return: StatisticsGlobal - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_global_statistics_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.get_global_statistics_with_http_info(**kwargs) # noqa: E501 - return data - - def get_global_statistics_with_http_info(self, **kwargs): # noqa: E501 - """Get stats. # noqa: E501 - - Get global statistics for this repository. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_global_statistics_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str group: primary property to build facettes and count+group values - :param list[str] sub_group: additional properties to build facettes and count+sub-group values - :return: StatisticsGlobal - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['group', 'sub_group'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_global_statistics" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'group' in params: - query_params.append(('group', params['group'])) # noqa: E501 - if 'sub_group' in params: - query_params.append(('subGroup', params['sub_group'])) # noqa: E501 - collection_formats['subGroup'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/statistic/v1/public', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StatisticsGlobal', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_statistics_node(self, grouping, date_from, date_to, **kwargs): # noqa: E501 - """get statistics for node actions # noqa: E501 - - requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_NODES for global stats or to be admin of the requested mediacenter # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_statistics_node(grouping, date_from, date_to, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str grouping: Grouping type (by date) (required) - :param int date_from: date range from (required) - :param int date_to: date range to (required) - :param dict(str, str) body: filters for the custom json object stored in each entry - :param str mediacenter: the mediacenter to filter for statistics - :param list[str] additional_fields: additionals fields of the custom json object stored in each query that should be returned - :param list[str] group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) - :return: list[TrackingNode] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_statistics_node_with_http_info(grouping, date_from, date_to, **kwargs) # noqa: E501 - else: - (data) = self.get_statistics_node_with_http_info(grouping, date_from, date_to, **kwargs) # noqa: E501 - return data - - def get_statistics_node_with_http_info(self, grouping, date_from, date_to, **kwargs): # noqa: E501 - """get statistics for node actions # noqa: E501 - - requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_NODES for global stats or to be admin of the requested mediacenter # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_statistics_node_with_http_info(grouping, date_from, date_to, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str grouping: Grouping type (by date) (required) - :param int date_from: date range from (required) - :param int date_to: date range to (required) - :param dict(str, str) body: filters for the custom json object stored in each entry - :param str mediacenter: the mediacenter to filter for statistics - :param list[str] additional_fields: additionals fields of the custom json object stored in each query that should be returned - :param list[str] group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) - :return: list[TrackingNode] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['grouping', 'date_from', 'date_to', 'body', 'mediacenter', 'additional_fields', 'group_field'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_statistics_node" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'grouping' is set - if ('grouping' not in params or - params['grouping'] is None): - raise ValueError("Missing the required parameter `grouping` when calling `get_statistics_node`") # noqa: E501 - # verify the required parameter 'date_from' is set - if ('date_from' not in params or - params['date_from'] is None): - raise ValueError("Missing the required parameter `date_from` when calling `get_statistics_node`") # noqa: E501 - # verify the required parameter 'date_to' is set - if ('date_to' not in params or - params['date_to'] is None): - raise ValueError("Missing the required parameter `date_to` when calling `get_statistics_node`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'grouping' in params: - query_params.append(('grouping', params['grouping'])) # noqa: E501 - if 'date_from' in params: - query_params.append(('dateFrom', params['date_from'])) # noqa: E501 - if 'date_to' in params: - query_params.append(('dateTo', params['date_to'])) # noqa: E501 - if 'mediacenter' in params: - query_params.append(('mediacenter', params['mediacenter'])) # noqa: E501 - if 'additional_fields' in params: - query_params.append(('additionalFields', params['additional_fields'])) # noqa: E501 - collection_formats['additionalFields'] = 'multi' # noqa: E501 - if 'group_field' in params: - query_params.append(('groupField', params['group_field'])) # noqa: E501 - collection_formats['groupField'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/statistic/v1/statistics/nodes', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[TrackingNode]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_statistics_user(self, grouping, date_from, date_to, **kwargs): # noqa: E501 - """get statistics for user actions (login, logout) # noqa: E501 - - requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_USER for global stats or to be admin of the requested mediacenter # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_statistics_user(grouping, date_from, date_to, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str grouping: Grouping type (by date) (required) - :param int date_from: date range from (required) - :param int date_to: date range to (required) - :param dict(str, str) body: filters for the custom json object stored in each entry - :param str mediacenter: the mediacenter to filter for statistics - :param list[str] additional_fields: additionals fields of the custom json object stored in each query that should be returned - :param list[str] group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) - :return: list[Tracking] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_statistics_user_with_http_info(grouping, date_from, date_to, **kwargs) # noqa: E501 - else: - (data) = self.get_statistics_user_with_http_info(grouping, date_from, date_to, **kwargs) # noqa: E501 - return data - - def get_statistics_user_with_http_info(self, grouping, date_from, date_to, **kwargs): # noqa: E501 - """get statistics for user actions (login, logout) # noqa: E501 - - requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_USER for global stats or to be admin of the requested mediacenter # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_statistics_user_with_http_info(grouping, date_from, date_to, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str grouping: Grouping type (by date) (required) - :param int date_from: date range from (required) - :param int date_to: date range to (required) - :param dict(str, str) body: filters for the custom json object stored in each entry - :param str mediacenter: the mediacenter to filter for statistics - :param list[str] additional_fields: additionals fields of the custom json object stored in each query that should be returned - :param list[str] group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) - :return: list[Tracking] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['grouping', 'date_from', 'date_to', 'body', 'mediacenter', 'additional_fields', 'group_field'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_statistics_user" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'grouping' is set - if ('grouping' not in params or - params['grouping'] is None): - raise ValueError("Missing the required parameter `grouping` when calling `get_statistics_user`") # noqa: E501 - # verify the required parameter 'date_from' is set - if ('date_from' not in params or - params['date_from'] is None): - raise ValueError("Missing the required parameter `date_from` when calling `get_statistics_user`") # noqa: E501 - # verify the required parameter 'date_to' is set - if ('date_to' not in params or - params['date_to'] is None): - raise ValueError("Missing the required parameter `date_to` when calling `get_statistics_user`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'grouping' in params: - query_params.append(('grouping', params['grouping'])) # noqa: E501 - if 'date_from' in params: - query_params.append(('dateFrom', params['date_from'])) # noqa: E501 - if 'date_to' in params: - query_params.append(('dateTo', params['date_to'])) # noqa: E501 - if 'mediacenter' in params: - query_params.append(('mediacenter', params['mediacenter'])) # noqa: E501 - if 'additional_fields' in params: - query_params.append(('additionalFields', params['additional_fields'])) # noqa: E501 - collection_formats['additionalFields'] = 'multi' # noqa: E501 - if 'group_field' in params: - query_params.append(('groupField', params['group_field'])) # noqa: E501 - collection_formats['groupField'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/statistic/v1/statistics/users', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Tracking]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/stream_v1_api.py b/edu_sharing_client/api/stream_v1_api.py deleted file mode 100644 index a1b73e8e..00000000 --- a/edu_sharing_client/api/stream_v1_api.py +++ /dev/null @@ -1,689 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class STREAMV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def add_entry(self, body, repository, **kwargs): # noqa: E501 - """add a new stream object. # noqa: E501 - - will return the object and add the id to the object if creation succeeded # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_entry(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param StreamEntryInput body: Stream object to add (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: StreamEntryInput - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.add_entry_with_http_info(body, repository, **kwargs) # noqa: E501 - else: - (data) = self.add_entry_with_http_info(body, repository, **kwargs) # noqa: E501 - return data - - def add_entry_with_http_info(self, body, repository, **kwargs): # noqa: E501 - """add a new stream object. # noqa: E501 - - will return the object and add the id to the object if creation succeeded # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.add_entry_with_http_info(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param StreamEntryInput body: Stream object to add (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: StreamEntryInput - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method add_entry" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `add_entry`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `add_entry`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/stream/v1/add/{repository}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StreamEntryInput', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def can_access(self, repository, node, **kwargs): # noqa: E501 - """test # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.can_access(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: The property to aggregate (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.can_access_with_http_info(repository, node, **kwargs) # noqa: E501 - else: - (data) = self.can_access_with_http_info(repository, node, **kwargs) # noqa: E501 - return data - - def can_access_with_http_info(self, repository, node, **kwargs): # noqa: E501 - """test # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.can_access_with_http_info(repository, node, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str node: The property to aggregate (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method can_access" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `can_access`") # noqa: E501 - # verify the required parameter 'node' is set - if ('node' not in params or - params['node'] is None): - raise ValueError("Missing the required parameter `node` when calling `can_access`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'node' in params: - path_params['node'] = params['node'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/stream/v1/access/{repository}/{node}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_entry(self, repository, entry, **kwargs): # noqa: E501 - """delete a stream object # noqa: E501 - - the current user must be author of the given stream object # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_entry(repository, entry, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str entry: entry id to delete (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_entry_with_http_info(repository, entry, **kwargs) # noqa: E501 - else: - (data) = self.delete_entry_with_http_info(repository, entry, **kwargs) # noqa: E501 - return data - - def delete_entry_with_http_info(self, repository, entry, **kwargs): # noqa: E501 - """delete a stream object # noqa: E501 - - the current user must be author of the given stream object # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_entry_with_http_info(repository, entry, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str entry: entry id to delete (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'entry'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_entry" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `delete_entry`") # noqa: E501 - # verify the required parameter 'entry' is set - if ('entry' not in params or - params['entry'] is None): - raise ValueError("Missing the required parameter `entry` when calling `delete_entry`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'entry' in params: - path_params['entry'] = params['entry'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/stream/v1/delete/{repository}/{entry}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_property_values(self, repository, _property, **kwargs): # noqa: E501 - """Get top values for a property # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_property_values(repository, _property, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str _property: The property to aggregate (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_property_values_with_http_info(repository, _property, **kwargs) # noqa: E501 - else: - (data) = self.get_property_values_with_http_info(repository, _property, **kwargs) # noqa: E501 - return data - - def get_property_values_with_http_info(self, repository, _property, **kwargs): # noqa: E501 - """Get top values for a property # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_property_values_with_http_info(repository, _property, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str _property: The property to aggregate (required) - :return: dict(str, object) - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', '_property'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_property_values" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_property_values`") # noqa: E501 - # verify the required parameter '_property' is set - if ('_property' not in params or - params['_property'] is None): - raise ValueError("Missing the required parameter `_property` when calling `get_property_values`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if '_property' in params: - path_params['property'] = params['_property'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/stream/v1/properties/{repository}/{property}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='dict(str, object)', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def search(self, repository, **kwargs): # noqa: E501 - """Get the stream content for the current user with the given status. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param dict(str, str) body: map with property + value to search - :param str status: Stream object status to search for - :param str query: generic text to search for (in title or description) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties, currently supported: created, priority, default: priority desc, created desc - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: StreamList - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.search_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.search_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def search_with_http_info(self, repository, **kwargs): # noqa: E501 - """Get the stream content for the current user with the given status. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.search_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param dict(str, str) body: map with property + value to search - :param str status: Stream object status to search for - :param str query: generic text to search for (in title or description) - :param int max_items: maximum items per page - :param int skip_count: skip a number of items - :param list[str] sort_properties: sort properties, currently supported: created, priority, default: priority desc, created desc - :param list[bool] sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index - :return: StreamList - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'body', 'status', 'query', 'max_items', 'skip_count', 'sort_properties', 'sort_ascending'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method search" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `search`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'status' in params: - query_params.append(('status', params['status'])) # noqa: E501 - if 'query' in params: - query_params.append(('query', params['query'])) # noqa: E501 - if 'max_items' in params: - query_params.append(('maxItems', params['max_items'])) # noqa: E501 - if 'skip_count' in params: - query_params.append(('skipCount', params['skip_count'])) # noqa: E501 - if 'sort_properties' in params: - query_params.append(('sortProperties', params['sort_properties'])) # noqa: E501 - collection_formats['sortProperties'] = 'multi' # noqa: E501 - if 'sort_ascending' in params: - query_params.append(('sortAscending', params['sort_ascending'])) # noqa: E501 - collection_formats['sortAscending'] = 'multi' # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/stream/v1/search/{repository}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StreamList', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_entry(self, repository, entry, authority, status, **kwargs): # noqa: E501 - """update status for a stream object and authority # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_entry(repository, entry, authority, status, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str entry: entry id to update (required) - :param str authority: authority to set/change status (required) - :param str status: New status for this authority (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_entry_with_http_info(repository, entry, authority, status, **kwargs) # noqa: E501 - else: - (data) = self.update_entry_with_http_info(repository, entry, authority, status, **kwargs) # noqa: E501 - return data - - def update_entry_with_http_info(self, repository, entry, authority, status, **kwargs): # noqa: E501 - """update status for a stream object and authority # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_entry_with_http_info(repository, entry, authority, status, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str entry: entry id to update (required) - :param str authority: authority to set/change status (required) - :param str status: New status for this authority (required) - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'entry', 'authority', 'status'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_entry" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `update_entry`") # noqa: E501 - # verify the required parameter 'entry' is set - if ('entry' not in params or - params['entry'] is None): - raise ValueError("Missing the required parameter `entry` when calling `update_entry`") # noqa: E501 - # verify the required parameter 'authority' is set - if ('authority' not in params or - params['authority'] is None): - raise ValueError("Missing the required parameter `authority` when calling `update_entry`") # noqa: E501 - # verify the required parameter 'status' is set - if ('status' not in params or - params['status'] is None): - raise ValueError("Missing the required parameter `status` when calling `update_entry`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'entry' in params: - path_params['entry'] = params['entry'] # noqa: E501 - - query_params = [] - if 'authority' in params: - query_params.append(('authority', params['authority'])) # noqa: E501 - if 'status' in params: - query_params.append(('status', params['status'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/stream/v1/status/{repository}/{entry}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/tool_v1_api.py b/edu_sharing_client/api/tool_v1_api.py deleted file mode 100644 index 5e85ee80..00000000 --- a/edu_sharing_client/api/tool_v1_api.py +++ /dev/null @@ -1,695 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class TOOLV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def create_tool_defintition(self, body, repository, **kwargs): # noqa: E501 - """Create a new tool definition object. # noqa: E501 - - Create a new tool definition object. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_tool_defintition(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_tool_defintition_with_http_info(body, repository, **kwargs) # noqa: E501 - else: - (data) = self.create_tool_defintition_with_http_info(body, repository, **kwargs) # noqa: E501 - return data - - def create_tool_defintition_with_http_info(self, body, repository, **kwargs): # noqa: E501 - """Create a new tool definition object. # noqa: E501 - - Create a new tool definition object. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_tool_defintition_with_http_info(body, repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'rename_if_exists', 'version_comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_tool_defintition" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_tool_defintition`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_tool_defintition`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - if 'rename_if_exists' in params: - query_params.append(('renameIfExists', params['rename_if_exists'])) # noqa: E501 - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tool/v1/tools/{repository}/tooldefinitions', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_tool_instance(self, body, repository, tool_definition, **kwargs): # noqa: E501 - """Create a new tool Instance object. # noqa: E501 - - Create a new tool Instance object. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_tool_instance(body, repository, tool_definition, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str tool_definition: ID of parent node must have tool_definition aspect (required) - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_tool_instance_with_http_info(body, repository, tool_definition, **kwargs) # noqa: E501 - else: - (data) = self.create_tool_instance_with_http_info(body, repository, tool_definition, **kwargs) # noqa: E501 - return data - - def create_tool_instance_with_http_info(self, body, repository, tool_definition, **kwargs): # noqa: E501 - """Create a new tool Instance object. # noqa: E501 - - Create a new tool Instance object. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_tool_instance_with_http_info(body, repository, tool_definition, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str tool_definition: ID of parent node must have tool_definition aspect (required) - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'tool_definition', 'rename_if_exists', 'version_comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_tool_instance" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_tool_instance`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_tool_instance`") # noqa: E501 - # verify the required parameter 'tool_definition' is set - if ('tool_definition' not in params or - params['tool_definition'] is None): - raise ValueError("Missing the required parameter `tool_definition` when calling `create_tool_instance`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'tool_definition' in params: - path_params['toolDefinition'] = params['tool_definition'] # noqa: E501 - - query_params = [] - if 'rename_if_exists' in params: - query_params.append(('renameIfExists', params['rename_if_exists'])) # noqa: E501 - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tool/v1/tools/{repository}/{toolDefinition}/toolinstances', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_tool_object(self, body, repository, toolinstance, **kwargs): # noqa: E501 - """Create a new tool object for a given tool instance. # noqa: E501 - - Create a new tool object for a given tool instance. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_tool_object(body, repository, toolinstance, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str toolinstance: ID of parent node (a tool instance object) (required) - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_tool_object_with_http_info(body, repository, toolinstance, **kwargs) # noqa: E501 - else: - (data) = self.create_tool_object_with_http_info(body, repository, toolinstance, **kwargs) # noqa: E501 - return data - - def create_tool_object_with_http_info(self, body, repository, toolinstance, **kwargs): # noqa: E501 - """Create a new tool object for a given tool instance. # noqa: E501 - - Create a new tool object for a given tool instance. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_tool_object_with_http_info(body, repository, toolinstance, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param dict(str, list[str]) body: properties, example: {"{http://www.alfresco.org/model/content/1.0}name": ["test"]} (required) - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str toolinstance: ID of parent node (a tool instance object) (required) - :param bool rename_if_exists: rename if the same node name exists - :param str version_comment: comment, leave empty = no inital version - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['body', 'repository', 'toolinstance', 'rename_if_exists', 'version_comment'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_tool_object" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_tool_object`") # noqa: E501 - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `create_tool_object`") # noqa: E501 - # verify the required parameter 'toolinstance' is set - if ('toolinstance' not in params or - params['toolinstance'] is None): - raise ValueError("Missing the required parameter `toolinstance` when calling `create_tool_object`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'toolinstance' in params: - path_params['toolinstance'] = params['toolinstance'] # noqa: E501 - - query_params = [] - if 'rename_if_exists' in params: - query_params.append(('renameIfExists', params['rename_if_exists'])) # noqa: E501 - if 'version_comment' in params: - query_params.append(('versionComment', params['version_comment'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'body' in params: - body_params = params['body'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tool/v1/tools/{repository}/{toolinstance}/toolobject', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_all_tool_definitions(self, repository, **kwargs): # noqa: E501 - """Get all ToolDefinitions. # noqa: E501 - - Get all ToolDefinitions. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_all_tool_definitions(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_all_tool_definitions_with_http_info(repository, **kwargs) # noqa: E501 - else: - (data) = self.get_all_tool_definitions_with_http_info(repository, **kwargs) # noqa: E501 - return data - - def get_all_tool_definitions_with_http_info(self, repository, **kwargs): # noqa: E501 - """Get all ToolDefinitions. # noqa: E501 - - Get all ToolDefinitions. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_all_tool_definitions_with_http_info(repository, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_all_tool_definitions" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_all_tool_definitions`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tool/v1/tools/{repository}/tooldefinitions', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_instance(self, repository, nodeid, **kwargs): # noqa: E501 - """Get Instances of a ToolDefinition. # noqa: E501 - - Get Instances of a ToolDefinition. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_instance(repository, nodeid, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str nodeid: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_instance_with_http_info(repository, nodeid, **kwargs) # noqa: E501 - else: - (data) = self.get_instance_with_http_info(repository, nodeid, **kwargs) # noqa: E501 - return data - - def get_instance_with_http_info(self, repository, nodeid, **kwargs): # noqa: E501 - """Get Instances of a ToolDefinition. # noqa: E501 - - Get Instances of a ToolDefinition. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_instance_with_http_info(repository, nodeid, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str nodeid: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'nodeid'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_instance" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_instance`") # noqa: E501 - # verify the required parameter 'nodeid' is set - if ('nodeid' not in params or - params['nodeid'] is None): - raise ValueError("Missing the required parameter `nodeid` when calling `get_instance`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'nodeid' in params: - path_params['nodeid'] = params['nodeid'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tool/v1/tools/{repository}/{nodeid}/toolinstance', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_instances(self, repository, tool_definition, **kwargs): # noqa: E501 - """Get Instances of a ToolDefinition. # noqa: E501 - - Get Instances of a ToolDefinition. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_instances(repository, tool_definition, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str tool_definition: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_instances_with_http_info(repository, tool_definition, **kwargs) # noqa: E501 - else: - (data) = self.get_instances_with_http_info(repository, tool_definition, **kwargs) # noqa: E501 - return data - - def get_instances_with_http_info(self, repository, tool_definition, **kwargs): # noqa: E501 - """Get Instances of a ToolDefinition. # noqa: E501 - - Get Instances of a ToolDefinition. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_instances_with_http_info(repository, tool_definition, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str tool_definition: ID of node (required) - :return: NodeEntry - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'tool_definition'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_instances" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `get_instances`") # noqa: E501 - # verify the required parameter 'tool_definition' is set - if ('tool_definition' not in params or - params['tool_definition'] is None): - raise ValueError("Missing the required parameter `tool_definition` when calling `get_instances`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'tool_definition' in params: - path_params['toolDefinition'] = params['tool_definition'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tool/v1/tools/{repository}/{toolDefinition}/toolinstances', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='NodeEntry', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/tracking_v1_api.py b/edu_sharing_client/api/tracking_v1_api.py deleted file mode 100644 index 9523ff5a..00000000 --- a/edu_sharing_client/api/tracking_v1_api.py +++ /dev/null @@ -1,140 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class TRACKINGV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def track_event(self, repository, event, **kwargs): # noqa: E501 - """Track a user interaction # noqa: E501 - - Currently limited to video / audio play interactions # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.track_event(repository, event, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str event: type of event to track (required) - :param str node: node id for which the event is tracked. For some event, this can be null - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.track_event_with_http_info(repository, event, **kwargs) # noqa: E501 - else: - (data) = self.track_event_with_http_info(repository, event, **kwargs) # noqa: E501 - return data - - def track_event_with_http_info(self, repository, event, **kwargs): # noqa: E501 - """Track a user interaction # noqa: E501 - - Currently limited to video / audio play interactions # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.track_event_with_http_info(repository, event, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository: ID of repository (or \"-home-\" for home repository) (required) - :param str event: type of event to track (required) - :param str node: node id for which the event is tracked. For some event, this can be null - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository', 'event', 'node'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method track_event" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository' is set - if ('repository' not in params or - params['repository'] is None): - raise ValueError("Missing the required parameter `repository` when calling `track_event`") # noqa: E501 - # verify the required parameter 'event' is set - if ('event' not in params or - params['event'] is None): - raise ValueError("Missing the required parameter `event` when calling `track_event`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository' in params: - path_params['repository'] = params['repository'] # noqa: E501 - if 'event' in params: - path_params['event'] = params['event'] # noqa: E501 - - query_params = [] - if 'node' in params: - query_params.append(('node', params['node'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/tracking/v1/tracking/{repository}/{event}', 'PUT', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api/usage_v1_api.py b/edu_sharing_client/api/usage_v1_api.py deleted file mode 100644 index cdc7d82c..00000000 --- a/edu_sharing_client/api/usage_v1_api.py +++ /dev/null @@ -1,625 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from edu_sharing_client.api_client import ApiClient - - -class USAGEV1Api(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def delete_usage(self, node_id, usage_id, **kwargs): # noqa: E501 - """Delete an usage of a node. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_usage(node_id, usage_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: ID of node (required) - :param str usage_id: ID of usage (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_usage_with_http_info(node_id, usage_id, **kwargs) # noqa: E501 - else: - (data) = self.delete_usage_with_http_info(node_id, usage_id, **kwargs) # noqa: E501 - return data - - def delete_usage_with_http_info(self, node_id, usage_id, **kwargs): # noqa: E501 - """Delete an usage of a node. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_usage_with_http_info(node_id, usage_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: ID of node (required) - :param str usage_id: ID of usage (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['node_id', 'usage_id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_usage" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'node_id' is set - if ('node_id' not in params or - params['node_id'] is None): - raise ValueError("Missing the required parameter `node_id` when calling `delete_usage`") # noqa: E501 - # verify the required parameter 'usage_id' is set - if ('usage_id' not in params or - params['usage_id'] is None): - raise ValueError("Missing the required parameter `usage_id` when calling `delete_usage`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'node_id' in params: - path_params['nodeId'] = params['node_id'] # noqa: E501 - if 'usage_id' in params: - path_params['usageId'] = params['usage_id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/usage/v1/usages/node/{nodeId}/{usageId}', 'DELETE', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Usages', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_usages(self, repository_id, node_id, **kwargs): # noqa: E501 - """get_usages # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages(repository_id, node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository_id: ID of repository (required) - :param str node_id: ID of node. Use -all- for getting usages of all nodes (required) - :param int _from: from date - :param int to: to date - :return: None - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_usages_with_http_info(repository_id, node_id, **kwargs) # noqa: E501 - else: - (data) = self.get_usages_with_http_info(repository_id, node_id, **kwargs) # noqa: E501 - return data - - def get_usages_with_http_info(self, repository_id, node_id, **kwargs): # noqa: E501 - """get_usages # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_with_http_info(repository_id, node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str repository_id: ID of repository (required) - :param str node_id: ID of node. Use -all- for getting usages of all nodes (required) - :param int _from: from date - :param int to: to date - :return: None - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['repository_id', 'node_id', '_from', 'to'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_usages" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'repository_id' is set - if ('repository_id' not in params or - params['repository_id'] is None): - raise ValueError("Missing the required parameter `repository_id` when calling `get_usages`") # noqa: E501 - # verify the required parameter 'node_id' is set - if ('node_id' not in params or - params['node_id'] is None): - raise ValueError("Missing the required parameter `node_id` when calling `get_usages`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'repository_id' in params: - path_params['repositoryId'] = params['repository_id'] # noqa: E501 - if 'node_id' in params: - path_params['nodeId'] = params['node_id'] # noqa: E501 - - query_params = [] - if '_from' in params: - query_params.append(('from', params['_from'])) # noqa: E501 - if 'to' in params: - query_params.append(('to', params['to'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/usage/v1/usages/repository/{repositoryId}/{nodeid}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type=None, # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_usages_0(self, app_id, **kwargs): # noqa: E501 - """Get all usages of an application. # noqa: E501 - - Get all usages of an application. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_0(app_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str app_id: ID of application (or \"-home-\" for home repository) (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_usages_0_with_http_info(app_id, **kwargs) # noqa: E501 - else: - (data) = self.get_usages_0_with_http_info(app_id, **kwargs) # noqa: E501 - return data - - def get_usages_0_with_http_info(self, app_id, **kwargs): # noqa: E501 - """Get all usages of an application. # noqa: E501 - - Get all usages of an application. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_0_with_http_info(app_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str app_id: ID of application (or \"-home-\" for home repository) (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['app_id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_usages_0" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'app_id' is set - if ('app_id' not in params or - params['app_id'] is None): - raise ValueError("Missing the required parameter `app_id` when calling `get_usages_0`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'app_id' in params: - path_params['appId'] = params['app_id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/usage/v1/usages/{appId}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Usages', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_usages_by_course(self, app_id, course_id, **kwargs): # noqa: E501 - """Get all usages of an course. # noqa: E501 - - Get all usages of an course. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_by_course(app_id, course_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str app_id: ID of application (or \"-home-\" for home repository) (required) - :param str course_id: ID of course (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_usages_by_course_with_http_info(app_id, course_id, **kwargs) # noqa: E501 - else: - (data) = self.get_usages_by_course_with_http_info(app_id, course_id, **kwargs) # noqa: E501 - return data - - def get_usages_by_course_with_http_info(self, app_id, course_id, **kwargs): # noqa: E501 - """Get all usages of an course. # noqa: E501 - - Get all usages of an course. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_by_course_with_http_info(app_id, course_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str app_id: ID of application (or \"-home-\" for home repository) (required) - :param str course_id: ID of course (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['app_id', 'course_id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_usages_by_course" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'app_id' is set - if ('app_id' not in params or - params['app_id'] is None): - raise ValueError("Missing the required parameter `app_id` when calling `get_usages_by_course`") # noqa: E501 - # verify the required parameter 'course_id' is set - if ('course_id' not in params or - params['course_id'] is None): - raise ValueError("Missing the required parameter `course_id` when calling `get_usages_by_course`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'app_id' in params: - path_params['appId'] = params['app_id'] # noqa: E501 - if 'course_id' in params: - path_params['courseId'] = params['course_id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/usage/v1/usages/course/{appId}/{courseId}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Usages', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_usages_by_node(self, node_id, **kwargs): # noqa: E501 - """Get all usages of an node. # noqa: E501 - - Get all usages of an node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_by_node(node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: ID of node (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_usages_by_node_with_http_info(node_id, **kwargs) # noqa: E501 - else: - (data) = self.get_usages_by_node_with_http_info(node_id, **kwargs) # noqa: E501 - return data - - def get_usages_by_node_with_http_info(self, node_id, **kwargs): # noqa: E501 - """Get all usages of an node. # noqa: E501 - - Get all usages of an node. # noqa: E501 - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_by_node_with_http_info(node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: ID of node (required) - :return: Usages - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['node_id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_usages_by_node" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'node_id' is set - if ('node_id' not in params or - params['node_id'] is None): - raise ValueError("Missing the required parameter `node_id` when calling `get_usages_by_node`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'node_id' in params: - path_params['nodeId'] = params['node_id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/usage/v1/usages/node/{nodeId}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Usages', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_usages_by_node_collections(self, node_id, **kwargs): # noqa: E501 - """Get all collections where this node is used. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_by_node_collections(node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: ID of node (required) - :return: list[Collection] - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_usages_by_node_collections_with_http_info(node_id, **kwargs) # noqa: E501 - else: - (data) = self.get_usages_by_node_collections_with_http_info(node_id, **kwargs) # noqa: E501 - return data - - def get_usages_by_node_collections_with_http_info(self, node_id, **kwargs): # noqa: E501 - """Get all collections where this node is used. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_usages_by_node_collections_with_http_info(node_id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str node_id: ID of node (required) - :return: list[Collection] - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['node_id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_usages_by_node_collections" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'node_id' is set - if ('node_id' not in params or - params['node_id'] is None): - raise ValueError("Missing the required parameter `node_id` when calling `get_usages_by_node_collections`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'node_id' in params: - path_params['nodeId'] = params['node_id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['*/*']) # noqa: E501 - - # Authentication setting - auth_settings = [] # noqa: E501 - - return self.api_client.call_api( - '/usage/v1/usages/node/{nodeId}/collections', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='list[Collection]', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/edu_sharing_client/api_client.py b/edu_sharing_client/api_client.py deleted file mode 100644 index 46a61d63..00000000 --- a/edu_sharing_client/api_client.py +++ /dev/null @@ -1,628 +0,0 @@ -# coding: utf-8 -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" -from __future__ import absolute_import - -import datetime -import json -import mimetypes -from multiprocessing.pool import ThreadPool -import os -import re -import tempfile - -# python 2 and python 3 compatibility library -import six -from six.moves.urllib.parse import quote - -from edu_sharing_client.configuration import Configuration -import edu_sharing_client.models -from edu_sharing_client import rest - - -class ApiClient(object): - """Generic API client for Swagger client library builds. - - Swagger generic API client. This client handles the client- - server communication, and is invariant across implementations. Specifics of - the methods and models for each application are generated from the Swagger - templates. - - NOTE: This class is auto generated by the swagger code generator program. - Ref: https://github.com/swagger-api/swagger-codegen - Do not edit the class manually. - - :param configuration: .Configuration object for this client - :param header_name: a header to pass when making calls to the API. - :param header_value: a header value to pass when making calls to - the API. - :param cookie: a cookie to include in the header when making calls - to the API - """ - - PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types - NATIVE_TYPES_MAPPING = { - 'int': int, - 'long': int if six.PY3 else long, # noqa: F821 - 'float': float, - 'str': str, - 'bool': bool, - 'date': datetime.date, - 'datetime': datetime.datetime, - 'object': object, - } - - def __init__(self, configuration=None, header_name=None, header_value=None, - cookie=None): - if configuration is None: - configuration = Configuration() - self.configuration = configuration - - self.pool = ThreadPool() - self.rest_client = rest.RESTClientObject(configuration) - self.default_headers = {} - if header_name is not None: - self.default_headers[header_name] = header_value - self.cookie = cookie - # Set default User-Agent. - self.user_agent = 'Swagger-Codegen/6.0-DEV/python' - - def __del__(self): - self.pool.close() - self.pool.join() - - @property - def user_agent(self): - """User agent for this API client""" - return self.default_headers['User-Agent'] - - @user_agent.setter - def user_agent(self, value): - self.default_headers['User-Agent'] = value - - def set_default_header(self, header_name, header_value): - self.default_headers[header_name] = header_value - - def __call_api( - self, resource_path, method, path_params=None, - query_params=None, header_params=None, body=None, post_params=None, - files=None, response_type=None, auth_settings=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None): - - config = self.configuration - - # header parameters - header_params = header_params or {} - header_params.update(self.default_headers) - if self.cookie: - header_params['Cookie'] = self.cookie - if header_params: - header_params = self.sanitize_for_serialization(header_params) - header_params = dict(self.parameters_to_tuples(header_params, - collection_formats)) - - # path parameters - if path_params: - path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, - collection_formats) - for k, v in path_params: - # specified safe chars, encode everything - resource_path = resource_path.replace( - '{%s}' % k, - quote(str(v), safe=config.safe_chars_for_path_param) - ) - - # query parameters - if query_params: - query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, - collection_formats) - - # post parameters - if post_params or files: - post_params = self.prepare_post_parameters(post_params, files) - post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, - collection_formats) - - # auth setting - self.update_params_for_auth(header_params, query_params, auth_settings) - - # body - if body: - body = self.sanitize_for_serialization(body) - - # request url - url = self.configuration.host + resource_path - - # perform request and return response - response_data = self.request( - method, url, query_params=query_params, headers=header_params, - post_params=post_params, body=body, - _preload_content=_preload_content, - _request_timeout=_request_timeout) - - self.last_response = response_data - - return_data = response_data - if _preload_content: - # deserialize response data - if response_type: - return_data = self.deserialize(response_data, response_type) - else: - return_data = None - - if _return_http_data_only: - return (return_data) - else: - return (return_data, response_data.status, - response_data.getheaders()) - - def sanitize_for_serialization(self, obj): - """Builds a JSON POST object. - - If obj is None, return None. - If obj is str, int, long, float, bool, return directly. - If obj is datetime.datetime, datetime.date - convert to string in iso8601 format. - If obj is list, sanitize each element in the list. - If obj is dict, return the dict. - If obj is swagger model, return the properties dict. - - :param obj: The data to serialize. - :return: The serialized form of data. - """ - if obj is None: - return None - elif isinstance(obj, self.PRIMITIVE_TYPES): - return obj - elif isinstance(obj, list): - return [self.sanitize_for_serialization(sub_obj) - for sub_obj in obj] - elif isinstance(obj, tuple): - return tuple(self.sanitize_for_serialization(sub_obj) - for sub_obj in obj) - elif isinstance(obj, (datetime.datetime, datetime.date)): - return obj.isoformat() - - if isinstance(obj, dict): - obj_dict = obj - else: - # Convert model obj to dict except - # attributes `swagger_types`, `attribute_map` - # and attributes which value is not None. - # Convert attribute name to json key in - # model definition for request. - obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) - for attr, _ in six.iteritems(obj.swagger_types) - if getattr(obj, attr) is not None} - - return {key: self.sanitize_for_serialization(val) - for key, val in six.iteritems(obj_dict)} - - def deserialize(self, response, response_type): - """Deserializes response into an object. - - :param response: RESTResponse object to be deserialized. - :param response_type: class literal for - deserialized object, or string of class name. - - :return: deserialized object. - """ - # handle file downloading - # save response body into a tmp file and return the instance - if response_type == "file": - return self.__deserialize_file(response) - - # fetch data from response object - try: - data = json.loads(response.data) - except ValueError: - data = response.data - - return self.__deserialize(data, response_type) - - def __deserialize(self, data, klass): - """Deserializes dict, list, str into an object. - - :param data: dict, list or str. - :param klass: class literal, or string of class name. - - :return: object. - """ - if data is None: - return None - - if type(klass) == str: - if klass.startswith('list['): - sub_kls = re.match(r'list\[(.*)\]', klass).group(1) - return [self.__deserialize(sub_data, sub_kls) - for sub_data in data] - - if klass.startswith('dict('): - sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) - return {k: self.__deserialize(v, sub_kls) - for k, v in six.iteritems(data)} - - # convert str to class - if klass in self.NATIVE_TYPES_MAPPING: - klass = self.NATIVE_TYPES_MAPPING[klass] - else: - klass = getattr(edu_sharing_client.models, klass) - - if klass in self.PRIMITIVE_TYPES: - return self.__deserialize_primitive(data, klass) - elif klass == object: - return self.__deserialize_object(data) - elif klass == datetime.date: - return self.__deserialize_date(data) - elif klass == datetime.datetime: - return self.__deserialize_datatime(data) - else: - return self.__deserialize_model(data, klass) - - def call_api(self, resource_path, method, - path_params=None, query_params=None, header_params=None, - body=None, post_params=None, files=None, - response_type=None, auth_settings=None, async_req=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None): - """Makes the HTTP request (synchronous) and returns deserialized data. - - To make an async request, set the async_req parameter. - - :param resource_path: Path to method endpoint. - :param method: Method to call. - :param path_params: Path parameters in the url. - :param query_params: Query parameters in the url. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param auth_settings list: Auth Settings names for the request. - :param response: Response data type. - :param files dict: key -> filename, value -> filepath, - for `multipart/form-data`. - :param async_req bool: execute request asynchronously - :param _return_http_data_only: response data without head status code - and headers - :param collection_formats: dict of collection formats for path, query, - header, and post parameters. - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: - If async_req parameter is True, - the request will be called asynchronously. - The method will return the request thread. - If parameter async_req is False or missing, - then the method will return the response directly. - """ - if not async_req: - return self.__call_api(resource_path, method, - path_params, query_params, header_params, - body, post_params, files, - response_type, auth_settings, - _return_http_data_only, collection_formats, - _preload_content, _request_timeout) - else: - thread = self.pool.apply_async(self.__call_api, (resource_path, - method, path_params, query_params, - header_params, body, - post_params, files, - response_type, auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, _request_timeout)) - return thread - - def request(self, method, url, query_params=None, headers=None, - post_params=None, body=None, _preload_content=True, - _request_timeout=None): - """Makes the HTTP request using RESTClient.""" - if method == "GET": - return self.rest_client.GET(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "HEAD": - return self.rest_client.HEAD(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "OPTIONS": - return self.rest_client.OPTIONS(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "POST": - return self.rest_client.POST(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PUT": - return self.rest_client.PUT(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PATCH": - return self.rest_client.PATCH(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "DELETE": - return self.rest_client.DELETE(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - else: - raise ValueError( - "http method must be `GET`, `HEAD`, `OPTIONS`," - " `POST`, `PATCH`, `PUT` or `DELETE`." - ) - - def parameters_to_tuples(self, params, collection_formats): - """Get parameters as list of tuples, formatting collections. - - :param params: Parameters as dict or list of two-tuples - :param dict collection_formats: Parameter collection formats - :return: Parameters as list of tuples, collections formatted - """ - new_params = [] - if collection_formats is None: - collection_formats = {} - for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501 - if k in collection_formats: - collection_format = collection_formats[k] - if collection_format == 'multi': - new_params.extend((k, value) for value in v) - else: - if collection_format == 'ssv': - delimiter = ' ' - elif collection_format == 'tsv': - delimiter = '\t' - elif collection_format == 'pipes': - delimiter = '|' - else: # csv is the default - delimiter = ',' - new_params.append( - (k, delimiter.join(str(value) for value in v))) - else: - new_params.append((k, v)) - return new_params - - def prepare_post_parameters(self, post_params=None, files=None): - """Builds form parameters. - - :param post_params: Normal form parameters. - :param files: File parameters. - :return: Form parameters with files. - """ - params = [] - - if post_params: - params = post_params - - if files: - for k, v in six.iteritems(files): - if not v: - continue - file_names = v if type(v) is list else [v] - for n in file_names: - with open(n, 'rb') as f: - filename = os.path.basename(f.name) - filedata = f.read() - mimetype = (mimetypes.guess_type(filename)[0] or - 'application/octet-stream') - params.append( - tuple([k, tuple([filename, filedata, mimetype])])) - - return params - - def select_header_accept(self, accepts): - """Returns `Accept` based on an array of accepts provided. - - :param accepts: List of headers. - :return: Accept (e.g. application/json). - """ - if not accepts: - return - - accepts = [x.lower() for x in accepts] - - if 'application/json' in accepts: - return 'application/json' - else: - return ', '.join(accepts) - - def select_header_content_type(self, content_types): - """Returns `Content-Type` based on an array of content_types provided. - - :param content_types: List of content-types. - :return: Content-Type (e.g. application/json). - """ - if not content_types: - return 'application/json' - - content_types = [x.lower() for x in content_types] - - if 'application/json' in content_types or '*/*' in content_types: - return 'application/json' - else: - return content_types[0] - - def update_params_for_auth(self, headers, querys, auth_settings): - """Updates header and query params based on authentication setting. - - :param headers: Header parameters dict to be updated. - :param querys: Query parameters tuple list to be updated. - :param auth_settings: Authentication setting identifiers list. - """ - if not auth_settings: - return - - for auth in auth_settings: - auth_setting = self.configuration.auth_settings().get(auth) - if auth_setting: - if not auth_setting['value']: - continue - elif auth_setting['in'] == 'header': - headers[auth_setting['key']] = auth_setting['value'] - elif auth_setting['in'] == 'query': - querys.append((auth_setting['key'], auth_setting['value'])) - else: - raise ValueError( - 'Authentication token must be in `query` or `header`' - ) - - def __deserialize_file(self, response): - """Deserializes body to file - - Saves response body into a file in a temporary folder, - using the filename from the `Content-Disposition` header if provided. - - :param response: RESTResponse. - :return: file path. - """ - fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) - os.close(fd) - os.remove(path) - - content_disposition = response.getheader("Content-Disposition") - if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition).group(1) - path = os.path.join(os.path.dirname(path), filename) - - with open(path, "wb") as f: - f.write(response.data) - - return path - - def __deserialize_primitive(self, data, klass): - """Deserializes string to primitive type. - - :param data: str. - :param klass: class literal. - - :return: int, long, float, str, bool. - """ - try: - return klass(data) - except UnicodeEncodeError: - return six.text_type(data) - except TypeError: - return data - - def __deserialize_object(self, value): - """Return a original value. - - :return: object. - """ - return value - - def __deserialize_date(self, string): - """Deserializes string to date. - - :param string: str. - :return: date. - """ - try: - from dateutil.parser import parse - return parse(string).date() - except ImportError: - return string - except ValueError: - raise rest.ApiException( - status=0, - reason="Failed to parse `{0}` as date object".format(string) - ) - - def __deserialize_datatime(self, string): - """Deserializes string to datetime. - - The string should be in iso8601 datetime format. - - :param string: str. - :return: datetime. - """ - try: - from dateutil.parser import parse - return parse(string) - except ImportError: - return string - except ValueError: - raise rest.ApiException( - status=0, - reason=( - "Failed to parse `{0}` as datetime object" - .format(string) - ) - ) - - def __hasattr(self, object, name): - return name in object.__class__.__dict__ - - def __deserialize_model(self, data, klass): - """Deserializes list or dict to model. - - :param data: dict, list. - :param klass: class literal. - :return: model object. - """ - - if not klass.swagger_types and not self.__hasattr(klass, 'get_real_child_model'): - return data - - kwargs = {} - if klass.swagger_types is not None: - for attr, attr_type in six.iteritems(klass.swagger_types): - if (data is not None and - klass.attribute_map[attr] in data and - isinstance(data, (list, dict))): - value = data[klass.attribute_map[attr]] - kwargs[attr] = self.__deserialize(value, attr_type) - - instance = klass(**kwargs) - - if (isinstance(instance, dict) and - klass.swagger_types is not None and - isinstance(data, dict)): - for key, value in data.items(): - if key not in klass.swagger_types: - instance[key] = value - if self.__hasattr(instance, 'get_real_child_model'): - klass_name = instance.get_real_child_model(data) - if klass_name: - instance = self.__deserialize(data, klass_name) - return instance diff --git a/edu_sharing_client/configuration.py b/edu_sharing_client/configuration.py deleted file mode 100644 index 0d63b560..00000000 --- a/edu_sharing_client/configuration.py +++ /dev/null @@ -1,244 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import copy -import logging -import multiprocessing -import sys -import urllib3 - -import six -from six.moves import http_client as httplib - - -class TypeWithDefault(type): - def __init__(cls, name, bases, dct): - super(TypeWithDefault, cls).__init__(name, bases, dct) - cls._default = None - - def __call__(cls): - if cls._default is None: - cls._default = type.__call__(cls) - return copy.copy(cls._default) - - def set_default(cls, default): - cls._default = copy.copy(default) - - -class Configuration(six.with_metaclass(TypeWithDefault, object)): - """NOTE: This class is auto generated by the swagger code generator program. - - Ref: https://github.com/swagger-api/swagger-codegen - Do not edit the class manually. - """ - - def __init__(self): - """Constructor""" - # Default Base url - self.host = "/edu-sharing/rest" - # Temp file folder for downloading files - self.temp_folder_path = None - - # Authentication Settings - # dict to store API key(s) - self.api_key = {} - # dict to store API prefix (e.g. Bearer) - self.api_key_prefix = {} - # function to refresh API key if expired - self.refresh_api_key_hook = None - # Username for HTTP basic authentication - self.username = "" - # Password for HTTP basic authentication - self.password = "" - # Logging Settings - self.logger = {} - self.logger["package_logger"] = logging.getLogger("edu_sharing_client") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") - # Log format - self.logger_format = '%(asctime)s %(levelname)s %(message)s' - # Log stream handler - self.logger_stream_handler = None - # Log file handler - self.logger_file_handler = None - # Debug file location - self.logger_file = None - # Debug switch - self.debug = False - - # SSL/TLS verification - # Set this to false to skip verifying SSL certificate when calling API - # from https server. - self.verify_ssl = True - # Set this to customize the certificate file to verify the peer. - self.ssl_ca_cert = None - # client certificate file - self.cert_file = None - # client key file - self.key_file = None - # Set this to True/False to enable/disable SSL hostname verification. - self.assert_hostname = None - - # urllib3 connection pool's maximum number of connections saved - # per pool. urllib3 uses 1 connection as default value, but this is - # not the best value when you are making a lot of possibly parallel - # requests to the same host, which is often the case here. - # cpu_count * 5 is used as default value to increase performance. - self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 - - # Proxy URL - self.proxy = None - # Safe chars for path_param - self.safe_chars_for_path_param = '' - - @property - def logger_file(self): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - return self.__logger_file - - @logger_file.setter - def logger_file(self, value): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - self.__logger_file = value - if self.__logger_file: - # If set logging file, - # then add file handler and remove stream handler. - self.logger_file_handler = logging.FileHandler(self.__logger_file) - self.logger_file_handler.setFormatter(self.logger_formatter) - for _, logger in six.iteritems(self.logger): - logger.addHandler(self.logger_file_handler) - if self.logger_stream_handler: - logger.removeHandler(self.logger_stream_handler) - else: - # If not set logging file, - # then add stream handler and remove file handler. - self.logger_stream_handler = logging.StreamHandler() - self.logger_stream_handler.setFormatter(self.logger_formatter) - for _, logger in six.iteritems(self.logger): - logger.addHandler(self.logger_stream_handler) - if self.logger_file_handler: - logger.removeHandler(self.logger_file_handler) - - @property - def debug(self): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - return self.__debug - - @debug.setter - def debug(self, value): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging - for _, logger in six.iteritems(self.logger): - logger.setLevel(logging.DEBUG) - # turn on httplib debug - httplib.HTTPConnection.debuglevel = 1 - else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in six.iteritems(self.logger): - logger.setLevel(logging.WARNING) - # turn off httplib debug - httplib.HTTPConnection.debuglevel = 0 - - @property - def logger_format(self): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - return self.__logger_format - - @logger_format.setter - def logger_format(self, value): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - self.__logger_format = value - self.logger_formatter = logging.Formatter(self.__logger_format) - - def get_api_key_with_prefix(self, identifier): - """Gets API key (with prefix if set). - - :param identifier: The identifier of apiKey. - :return: The token for api key authentication. - """ - if self.refresh_api_key_hook: - self.refresh_api_key_hook(self) - - key = self.api_key.get(identifier) - if key: - prefix = self.api_key_prefix.get(identifier) - if prefix: - return "%s %s" % (prefix, key) - else: - return key - - def get_basic_auth_token(self): - """Gets HTTP basic authentication header (string). - - :return: The token for basic HTTP authentication. - """ - return urllib3.util.make_headers( - basic_auth=self.username + ':' + self.password - ).get('authorization') - - def auth_settings(self): - """Gets Auth Settings dict for api client. - - :return: The Auth Settings information dict. - """ - return { - } - - def to_debug_report(self): - """Gets the essential information for debugging. - - :return: The report for debugging. - """ - return "Python SDK Debug Report:\n"\ - "OS: {env}\n"\ - "Python Version: {pyversion}\n"\ - "Version of the API: 1.1\n"\ - "SDK Package Version: 6.0-DEV".\ - format(env=sys.platform, pyversion=sys.version) diff --git a/edu_sharing_client/models/__init__.py b/edu_sharing_client/models/__init__.py deleted file mode 100644 index b7c2b993..00000000 --- a/edu_sharing_client/models/__init__.py +++ /dev/null @@ -1,235 +0,0 @@ -# coding: utf-8 - -# flake8: noqa -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -# import models into model package -from edu_sharing_client.models.ace import ACE -from edu_sharing_client.models.acl import ACL -from edu_sharing_client.models.about import About -from edu_sharing_client.models.accumulated_ratings import AccumulatedRatings -from edu_sharing_client.models.admin import Admin -from edu_sharing_client.models.admin_statistics import AdminStatistics -from edu_sharing_client.models.application import Application -from edu_sharing_client.models.audience import Audience -from edu_sharing_client.models.authority import Authority -from edu_sharing_client.models.authority_entries import AuthorityEntries -from edu_sharing_client.models.available_mds import AvailableMds -from edu_sharing_client.models.banner import Banner -from edu_sharing_client.models.body import Body -from edu_sharing_client.models.body1 import Body1 -from edu_sharing_client.models.body10 import Body10 -from edu_sharing_client.models.body11 import Body11 -from edu_sharing_client.models.body2 import Body2 -from edu_sharing_client.models.body3 import Body3 -from edu_sharing_client.models.body4 import Body4 -from edu_sharing_client.models.body5 import Body5 -from edu_sharing_client.models.body6 import Body6 -from edu_sharing_client.models.body7 import Body7 -from edu_sharing_client.models.body8 import Body8 -from edu_sharing_client.models.body9 import Body9 -from edu_sharing_client.models.cache_cluster import CacheCluster -from edu_sharing_client.models.cache_info import CacheInfo -from edu_sharing_client.models.cache_member import CacheMember -from edu_sharing_client.models.catalog import Catalog -from edu_sharing_client.models.collection import Collection -from edu_sharing_client.models.collection_counts import CollectionCounts -from edu_sharing_client.models.collection_entries import CollectionEntries -from edu_sharing_client.models.collection_entry import CollectionEntry -from edu_sharing_client.models.collection_feedback import CollectionFeedback -from edu_sharing_client.models.collection_options import CollectionOptions -from edu_sharing_client.models.collection_reference import CollectionReference -from edu_sharing_client.models.collections import Collections -from edu_sharing_client.models.collections_result import CollectionsResult -from edu_sharing_client.models.column_v2 import ColumnV2 -from edu_sharing_client.models.comment import Comment -from edu_sharing_client.models.comments import Comments -from edu_sharing_client.models.condition import Condition -from edu_sharing_client.models.config import Config -from edu_sharing_client.models.connector import Connector -from edu_sharing_client.models.connector_file_type import ConnectorFileType -from edu_sharing_client.models.connector_list import ConnectorList -from edu_sharing_client.models.content import Content -from edu_sharing_client.models.context_menu_entry import ContextMenuEntry -from edu_sharing_client.models.counts import Counts -from edu_sharing_client.models.create import Create -from edu_sharing_client.models.delete_option import DeleteOption -from edu_sharing_client.models.dynamic_config import DynamicConfig -from edu_sharing_client.models.element import Element -from edu_sharing_client.models.error_response import ErrorResponse -from edu_sharing_client.models.excel_result import ExcelResult -from edu_sharing_client.models.facette import Facette -from edu_sharing_client.models.filter import Filter -from edu_sharing_client.models.filter_entry import FilterEntry -from edu_sharing_client.models.frontpage import Frontpage -from edu_sharing_client.models.general import General -from edu_sharing_client.models.geo import Geo -from edu_sharing_client.models.group import Group -from edu_sharing_client.models.group_entries import GroupEntries -from edu_sharing_client.models.group_entry import GroupEntry -from edu_sharing_client.models.group_profile import GroupProfile -from edu_sharing_client.models.group_v2 import GroupV2 -from edu_sharing_client.models.guest import Guest -from edu_sharing_client.models.help_menu_options import HelpMenuOptions -from edu_sharing_client.models.home_folder_options import HomeFolderOptions -from edu_sharing_client.models.icon import Icon -from edu_sharing_client.models.image import Image -from edu_sharing_client.models.interface import Interface -from edu_sharing_client.models.job_detail import JobDetail -from edu_sharing_client.models.job_info import JobInfo -from edu_sharing_client.models.key import Key -from edu_sharing_client.models.key_value_pair import KeyValuePair -from edu_sharing_client.models.language import Language -from edu_sharing_client.models.level import Level -from edu_sharing_client.models.license import License -from edu_sharing_client.models.license_agreement import LicenseAgreement -from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode -from edu_sharing_client.models.list_v2 import ListV2 -from edu_sharing_client.models.location import Location -from edu_sharing_client.models.log_entry import LogEntry -from edu_sharing_client.models.login import Login -from edu_sharing_client.models.login_credentials import LoginCredentials -from edu_sharing_client.models.logout_info import LogoutInfo -from edu_sharing_client.models.mainnav import Mainnav -from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult -from edu_sharing_client.models.mds import Mds -from edu_sharing_client.models.mds_entries_v2 import MdsEntriesV2 -from edu_sharing_client.models.mds_entry import MdsEntry -from edu_sharing_client.models.mds_form import MdsForm -from edu_sharing_client.models.mds_form_panel import MdsFormPanel -from edu_sharing_client.models.mds_form_property import MdsFormProperty -from edu_sharing_client.models.mds_form_property_parameter import MdsFormPropertyParameter -from edu_sharing_client.models.mds_form_property_value import MdsFormPropertyValue -from edu_sharing_client.models.mds_list import MdsList -from edu_sharing_client.models.mds_list_property import MdsListProperty -from edu_sharing_client.models.mds_list_property_parameter import MdsListPropertyParameter -from edu_sharing_client.models.mds_list_property_value import MdsListPropertyValue -from edu_sharing_client.models.mds_property import MdsProperty -from edu_sharing_client.models.mds_queries import MdsQueries -from edu_sharing_client.models.mds_query import MdsQuery -from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria -from edu_sharing_client.models.mds_query_property import MdsQueryProperty -from edu_sharing_client.models.mds_query_property_parameter import MdsQueryPropertyParameter -from edu_sharing_client.models.mds_query_property_value import MdsQueryPropertyValue -from edu_sharing_client.models.mds_ref import MdsRef -from edu_sharing_client.models.mds_type import MdsType -from edu_sharing_client.models.mds_v2 import MdsV2 -from edu_sharing_client.models.mds_view import MdsView -from edu_sharing_client.models.mds_view_property import MdsViewProperty -from edu_sharing_client.models.mds_view_property_parameter import MdsViewPropertyParameter -from edu_sharing_client.models.mds_view_property_value import MdsViewPropertyValue -from edu_sharing_client.models.mediacenter import Mediacenter -from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension -from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult -from edu_sharing_client.models.menu_entry import MenuEntry -from edu_sharing_client.models.metadata_set_info import MetadataSetInfo -from edu_sharing_client.models.node import Node -from edu_sharing_client.models.node_entries import NodeEntries -from edu_sharing_client.models.node_entry import NodeEntry -from edu_sharing_client.models.node_locked import NodeLocked -from edu_sharing_client.models.node_permission_entry import NodePermissionEntry -from edu_sharing_client.models.node_permissions import NodePermissions -from edu_sharing_client.models.node_ref import NodeRef -from edu_sharing_client.models.node_remote import NodeRemote -from edu_sharing_client.models.node_share import NodeShare -from edu_sharing_client.models.node_text import NodeText -from edu_sharing_client.models.node_version import NodeVersion -from edu_sharing_client.models.node_version_entry import NodeVersionEntry -from edu_sharing_client.models.node_version_ref import NodeVersionRef -from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries -from edu_sharing_client.models.notify_entry import NotifyEntry -from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult -from edu_sharing_client.models.organization import Organization -from edu_sharing_client.models.organization_entries import OrganizationEntries -from edu_sharing_client.models.pagination import Pagination -from edu_sharing_client.models.parameters import Parameters -from edu_sharing_client.models.parent_entries import ParentEntries -from edu_sharing_client.models.person import Person -from edu_sharing_client.models.person_delete_options import PersonDeleteOptions -from edu_sharing_client.models.person_delete_result import PersonDeleteResult -from edu_sharing_client.models.person_report import PersonReport -from edu_sharing_client.models.preferences import Preferences -from edu_sharing_client.models.preview import Preview -from edu_sharing_client.models.profile import Profile -from edu_sharing_client.models.provider import Provider -from edu_sharing_client.models.query import Query -from edu_sharing_client.models.rating_data import RatingData -from edu_sharing_client.models.reference_entries import ReferenceEntries -from edu_sharing_client.models.register import Register -from edu_sharing_client.models.register_exists import RegisterExists -from edu_sharing_client.models.register_information import RegisterInformation -from edu_sharing_client.models.remote import Remote -from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription -from edu_sharing_client.models.rendering import Rendering -from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry -from edu_sharing_client.models.repo import Repo -from edu_sharing_client.models.repo_entries import RepoEntries -from edu_sharing_client.models.repository_config import RepositoryConfig -from edu_sharing_client.models.restore_result import RestoreResult -from edu_sharing_client.models.restore_results import RestoreResults -from edu_sharing_client.models.search_parameters import SearchParameters -from edu_sharing_client.models.search_result import SearchResult -from edu_sharing_client.models.search_result_node import SearchResultNode -from edu_sharing_client.models.serializable import Serializable -from edu_sharing_client.models.server_update_info import ServerUpdateInfo -from edu_sharing_client.models.service import Service -from edu_sharing_client.models.service_instance import ServiceInstance -from edu_sharing_client.models.service_version import ServiceVersion -from edu_sharing_client.models.services import Services -from edu_sharing_client.models.session_expired_dialog import SessionExpiredDialog -from edu_sharing_client.models.shared_folder_options import SharedFolderOptions -from edu_sharing_client.models.sharing_info import SharingInfo -from edu_sharing_client.models.simple_edit import SimpleEdit -from edu_sharing_client.models.sort_column_v2 import SortColumnV2 -from edu_sharing_client.models.sort_v2 import SortV2 -from edu_sharing_client.models.sort_v2_default import SortV2Default -from edu_sharing_client.models.statistic_entity import StatisticEntity -from edu_sharing_client.models.statistic_entry import StatisticEntry -from edu_sharing_client.models.statistics import Statistics -from edu_sharing_client.models.statistics_global import StatisticsGlobal -from edu_sharing_client.models.statistics_group import StatisticsGroup -from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup -from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup -from edu_sharing_client.models.stored_service import StoredService -from edu_sharing_client.models.stream import Stream -from edu_sharing_client.models.stream_entry import StreamEntry -from edu_sharing_client.models.stream_entry_input import StreamEntryInput -from edu_sharing_client.models.stream_list import StreamList -from edu_sharing_client.models.sub_group_item import SubGroupItem -from edu_sharing_client.models.subwidget import Subwidget -from edu_sharing_client.models.suggestion_param import SuggestionParam -from edu_sharing_client.models.tracking import Tracking -from edu_sharing_client.models.tracking_node import TrackingNode -from edu_sharing_client.models.upload_result import UploadResult -from edu_sharing_client.models.usage import Usage -from edu_sharing_client.models.usages import Usages -from edu_sharing_client.models.user import User -from edu_sharing_client.models.user_credential import UserCredential -from edu_sharing_client.models.user_entries import UserEntries -from edu_sharing_client.models.user_entry import UserEntry -from edu_sharing_client.models.user_profile import UserProfile -from edu_sharing_client.models.user_profile_edit import UserProfileEdit -from edu_sharing_client.models.user_quota import UserQuota -from edu_sharing_client.models.user_simple import UserSimple -from edu_sharing_client.models.user_stats import UserStats -from edu_sharing_client.models.user_status import UserStatus -from edu_sharing_client.models.value import Value -from edu_sharing_client.models.value_parameters import ValueParameters -from edu_sharing_client.models.value_v2 import ValueV2 -from edu_sharing_client.models.values import Values -from edu_sharing_client.models.variables import Variables -from edu_sharing_client.models.view_v2 import ViewV2 -from edu_sharing_client.models.website_information import WebsiteInformation -from edu_sharing_client.models.widget_v2 import WidgetV2 -from edu_sharing_client.models.workflow import Workflow -from edu_sharing_client.models.workflow_history import WorkflowHistory diff --git a/edu_sharing_client/models/about.py b/edu_sharing_client/models/about.py deleted file mode 100644 index d7bdc1da..00000000 --- a/edu_sharing_client/models/about.py +++ /dev/null @@ -1,191 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class About(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'themes_url': 'str', - 'last_cache_update': 'int', - 'version': 'ServiceVersion', - 'services': 'list[Service]' - } - - attribute_map = { - 'themes_url': 'themesUrl', - 'last_cache_update': 'lastCacheUpdate', - 'version': 'version', - 'services': 'services' - } - - def __init__(self, themes_url=None, last_cache_update=None, version=None, services=None): # noqa: E501 - """About - a model defined in Swagger""" # noqa: E501 - self._themes_url = None - self._last_cache_update = None - self._version = None - self._services = None - self.discriminator = None - if themes_url is not None: - self.themes_url = themes_url - if last_cache_update is not None: - self.last_cache_update = last_cache_update - self.version = version - self.services = services - - @property - def themes_url(self): - """Gets the themes_url of this About. # noqa: E501 - - - :return: The themes_url of this About. # noqa: E501 - :rtype: str - """ - return self._themes_url - - @themes_url.setter - def themes_url(self, themes_url): - """Sets the themes_url of this About. - - - :param themes_url: The themes_url of this About. # noqa: E501 - :type: str - """ - - self._themes_url = themes_url - - @property - def last_cache_update(self): - """Gets the last_cache_update of this About. # noqa: E501 - - - :return: The last_cache_update of this About. # noqa: E501 - :rtype: int - """ - return self._last_cache_update - - @last_cache_update.setter - def last_cache_update(self, last_cache_update): - """Sets the last_cache_update of this About. - - - :param last_cache_update: The last_cache_update of this About. # noqa: E501 - :type: int - """ - - self._last_cache_update = last_cache_update - - @property - def version(self): - """Gets the version of this About. # noqa: E501 - - - :return: The version of this About. # noqa: E501 - :rtype: ServiceVersion - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this About. - - - :param version: The version of this About. # noqa: E501 - :type: ServiceVersion - """ - if version is None: - raise ValueError("Invalid value for `version`, must not be `None`") # noqa: E501 - - self._version = version - - @property - def services(self): - """Gets the services of this About. # noqa: E501 - - - :return: The services of this About. # noqa: E501 - :rtype: list[Service] - """ - return self._services - - @services.setter - def services(self, services): - """Sets the services of this About. - - - :param services: The services of this About. # noqa: E501 - :type: list[Service] - """ - if services is None: - raise ValueError("Invalid value for `services`, must not be `None`") # noqa: E501 - - self._services = services - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(About, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, About): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/accumulated_ratings.py b/edu_sharing_client/models/accumulated_ratings.py deleted file mode 100644 index 8e6f9da0..00000000 --- a/edu_sharing_client/models/accumulated_ratings.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class AccumulatedRatings(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'overall': 'RatingData', - 'user': 'float', - 'affiliation': 'dict(str, RatingData)' - } - - attribute_map = { - 'overall': 'overall', - 'user': 'user', - 'affiliation': 'affiliation' - } - - def __init__(self, overall=None, user=None, affiliation=None): # noqa: E501 - """AccumulatedRatings - a model defined in Swagger""" # noqa: E501 - self._overall = None - self._user = None - self._affiliation = None - self.discriminator = None - if overall is not None: - self.overall = overall - if user is not None: - self.user = user - if affiliation is not None: - self.affiliation = affiliation - - @property - def overall(self): - """Gets the overall of this AccumulatedRatings. # noqa: E501 - - - :return: The overall of this AccumulatedRatings. # noqa: E501 - :rtype: RatingData - """ - return self._overall - - @overall.setter - def overall(self, overall): - """Sets the overall of this AccumulatedRatings. - - - :param overall: The overall of this AccumulatedRatings. # noqa: E501 - :type: RatingData - """ - - self._overall = overall - - @property - def user(self): - """Gets the user of this AccumulatedRatings. # noqa: E501 - - - :return: The user of this AccumulatedRatings. # noqa: E501 - :rtype: float - """ - return self._user - - @user.setter - def user(self, user): - """Sets the user of this AccumulatedRatings. - - - :param user: The user of this AccumulatedRatings. # noqa: E501 - :type: float - """ - - self._user = user - - @property - def affiliation(self): - """Gets the affiliation of this AccumulatedRatings. # noqa: E501 - - - :return: The affiliation of this AccumulatedRatings. # noqa: E501 - :rtype: dict(str, RatingData) - """ - return self._affiliation - - @affiliation.setter - def affiliation(self, affiliation): - """Sets the affiliation of this AccumulatedRatings. - - - :param affiliation: The affiliation of this AccumulatedRatings. # noqa: E501 - :type: dict(str, RatingData) - """ - - self._affiliation = affiliation - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(AccumulatedRatings, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, AccumulatedRatings): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/ace.py b/edu_sharing_client/models/ace.py deleted file mode 100644 index 01cc49fe..00000000 --- a/edu_sharing_client/models/ace.py +++ /dev/null @@ -1,217 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ACE(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'editable': 'bool', - 'authority': 'Authority', - 'user': 'UserProfile', - 'group': 'GroupProfile', - 'permissions': 'list[str]' - } - - attribute_map = { - 'editable': 'editable', - 'authority': 'authority', - 'user': 'user', - 'group': 'group', - 'permissions': 'permissions' - } - - def __init__(self, editable=False, authority=None, user=None, group=None, permissions=None): # noqa: E501 - """ACE - a model defined in Swagger""" # noqa: E501 - self._editable = None - self._authority = None - self._user = None - self._group = None - self._permissions = None - self.discriminator = None - if editable is not None: - self.editable = editable - self.authority = authority - if user is not None: - self.user = user - if group is not None: - self.group = group - self.permissions = permissions - - @property - def editable(self): - """Gets the editable of this ACE. # noqa: E501 - - - :return: The editable of this ACE. # noqa: E501 - :rtype: bool - """ - return self._editable - - @editable.setter - def editable(self, editable): - """Sets the editable of this ACE. - - - :param editable: The editable of this ACE. # noqa: E501 - :type: bool - """ - - self._editable = editable - - @property - def authority(self): - """Gets the authority of this ACE. # noqa: E501 - - - :return: The authority of this ACE. # noqa: E501 - :rtype: Authority - """ - return self._authority - - @authority.setter - def authority(self, authority): - """Sets the authority of this ACE. - - - :param authority: The authority of this ACE. # noqa: E501 - :type: Authority - """ - if authority is None: - raise ValueError("Invalid value for `authority`, must not be `None`") # noqa: E501 - - self._authority = authority - - @property - def user(self): - """Gets the user of this ACE. # noqa: E501 - - - :return: The user of this ACE. # noqa: E501 - :rtype: UserProfile - """ - return self._user - - @user.setter - def user(self, user): - """Sets the user of this ACE. - - - :param user: The user of this ACE. # noqa: E501 - :type: UserProfile - """ - - self._user = user - - @property - def group(self): - """Gets the group of this ACE. # noqa: E501 - - - :return: The group of this ACE. # noqa: E501 - :rtype: GroupProfile - """ - return self._group - - @group.setter - def group(self, group): - """Sets the group of this ACE. - - - :param group: The group of this ACE. # noqa: E501 - :type: GroupProfile - """ - - self._group = group - - @property - def permissions(self): - """Gets the permissions of this ACE. # noqa: E501 - - - :return: The permissions of this ACE. # noqa: E501 - :rtype: list[str] - """ - return self._permissions - - @permissions.setter - def permissions(self, permissions): - """Sets the permissions of this ACE. - - - :param permissions: The permissions of this ACE. # noqa: E501 - :type: list[str] - """ - if permissions is None: - raise ValueError("Invalid value for `permissions`, must not be `None`") # noqa: E501 - - self._permissions = permissions - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ACE, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ACE): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/acl.py b/edu_sharing_client/models/acl.py deleted file mode 100644 index e6aa0a7c..00000000 --- a/edu_sharing_client/models/acl.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ACL(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'inherited': 'bool', - 'permissions': 'list[ACE]' - } - - attribute_map = { - 'inherited': 'inherited', - 'permissions': 'permissions' - } - - def __init__(self, inherited=False, permissions=None): # noqa: E501 - """ACL - a model defined in Swagger""" # noqa: E501 - self._inherited = None - self._permissions = None - self.discriminator = None - self.inherited = inherited - self.permissions = permissions - - @property - def inherited(self): - """Gets the inherited of this ACL. # noqa: E501 - - - :return: The inherited of this ACL. # noqa: E501 - :rtype: bool - """ - return self._inherited - - @inherited.setter - def inherited(self, inherited): - """Sets the inherited of this ACL. - - - :param inherited: The inherited of this ACL. # noqa: E501 - :type: bool - """ - if inherited is None: - raise ValueError("Invalid value for `inherited`, must not be `None`") # noqa: E501 - - self._inherited = inherited - - @property - def permissions(self): - """Gets the permissions of this ACL. # noqa: E501 - - - :return: The permissions of this ACL. # noqa: E501 - :rtype: list[ACE] - """ - return self._permissions - - @permissions.setter - def permissions(self, permissions): - """Sets the permissions of this ACL. - - - :param permissions: The permissions of this ACL. # noqa: E501 - :type: list[ACE] - """ - if permissions is None: - raise ValueError("Invalid value for `permissions`, must not be `None`") # noqa: E501 - - self._permissions = permissions - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ACL, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ACL): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/admin.py b/edu_sharing_client/models/admin.py deleted file mode 100644 index ee8977d9..00000000 --- a/edu_sharing_client/models/admin.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Admin(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'statistics': 'Statistics' - } - - attribute_map = { - 'statistics': 'statistics' - } - - def __init__(self, statistics=None): # noqa: E501 - """Admin - a model defined in Swagger""" # noqa: E501 - self._statistics = None - self.discriminator = None - if statistics is not None: - self.statistics = statistics - - @property - def statistics(self): - """Gets the statistics of this Admin. # noqa: E501 - - - :return: The statistics of this Admin. # noqa: E501 - :rtype: Statistics - """ - return self._statistics - - @statistics.setter - def statistics(self, statistics): - """Sets the statistics of this Admin. - - - :param statistics: The statistics of this Admin. # noqa: E501 - :type: Statistics - """ - - self._statistics = statistics - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Admin, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Admin): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/admin_statistics.py b/edu_sharing_client/models/admin_statistics.py deleted file mode 100644 index cd97e749..00000000 --- a/edu_sharing_client/models/admin_statistics.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class AdminStatistics(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'active_sessions': 'int', - 'number_of_previews': 'int', - 'max_memory': 'int', - 'allocated_memory': 'int', - 'preview_cache_size': 'int', - 'active_locks': 'list[Node]' - } - - attribute_map = { - 'active_sessions': 'activeSessions', - 'number_of_previews': 'numberOfPreviews', - 'max_memory': 'maxMemory', - 'allocated_memory': 'allocatedMemory', - 'preview_cache_size': 'previewCacheSize', - 'active_locks': 'activeLocks' - } - - def __init__(self, active_sessions=None, number_of_previews=None, max_memory=None, allocated_memory=None, preview_cache_size=None, active_locks=None): # noqa: E501 - """AdminStatistics - a model defined in Swagger""" # noqa: E501 - self._active_sessions = None - self._number_of_previews = None - self._max_memory = None - self._allocated_memory = None - self._preview_cache_size = None - self._active_locks = None - self.discriminator = None - if active_sessions is not None: - self.active_sessions = active_sessions - if number_of_previews is not None: - self.number_of_previews = number_of_previews - if max_memory is not None: - self.max_memory = max_memory - if allocated_memory is not None: - self.allocated_memory = allocated_memory - if preview_cache_size is not None: - self.preview_cache_size = preview_cache_size - if active_locks is not None: - self.active_locks = active_locks - - @property - def active_sessions(self): - """Gets the active_sessions of this AdminStatistics. # noqa: E501 - - - :return: The active_sessions of this AdminStatistics. # noqa: E501 - :rtype: int - """ - return self._active_sessions - - @active_sessions.setter - def active_sessions(self, active_sessions): - """Sets the active_sessions of this AdminStatistics. - - - :param active_sessions: The active_sessions of this AdminStatistics. # noqa: E501 - :type: int - """ - - self._active_sessions = active_sessions - - @property - def number_of_previews(self): - """Gets the number_of_previews of this AdminStatistics. # noqa: E501 - - - :return: The number_of_previews of this AdminStatistics. # noqa: E501 - :rtype: int - """ - return self._number_of_previews - - @number_of_previews.setter - def number_of_previews(self, number_of_previews): - """Sets the number_of_previews of this AdminStatistics. - - - :param number_of_previews: The number_of_previews of this AdminStatistics. # noqa: E501 - :type: int - """ - - self._number_of_previews = number_of_previews - - @property - def max_memory(self): - """Gets the max_memory of this AdminStatistics. # noqa: E501 - - - :return: The max_memory of this AdminStatistics. # noqa: E501 - :rtype: int - """ - return self._max_memory - - @max_memory.setter - def max_memory(self, max_memory): - """Sets the max_memory of this AdminStatistics. - - - :param max_memory: The max_memory of this AdminStatistics. # noqa: E501 - :type: int - """ - - self._max_memory = max_memory - - @property - def allocated_memory(self): - """Gets the allocated_memory of this AdminStatistics. # noqa: E501 - - - :return: The allocated_memory of this AdminStatistics. # noqa: E501 - :rtype: int - """ - return self._allocated_memory - - @allocated_memory.setter - def allocated_memory(self, allocated_memory): - """Sets the allocated_memory of this AdminStatistics. - - - :param allocated_memory: The allocated_memory of this AdminStatistics. # noqa: E501 - :type: int - """ - - self._allocated_memory = allocated_memory - - @property - def preview_cache_size(self): - """Gets the preview_cache_size of this AdminStatistics. # noqa: E501 - - - :return: The preview_cache_size of this AdminStatistics. # noqa: E501 - :rtype: int - """ - return self._preview_cache_size - - @preview_cache_size.setter - def preview_cache_size(self, preview_cache_size): - """Sets the preview_cache_size of this AdminStatistics. - - - :param preview_cache_size: The preview_cache_size of this AdminStatistics. # noqa: E501 - :type: int - """ - - self._preview_cache_size = preview_cache_size - - @property - def active_locks(self): - """Gets the active_locks of this AdminStatistics. # noqa: E501 - - - :return: The active_locks of this AdminStatistics. # noqa: E501 - :rtype: list[Node] - """ - return self._active_locks - - @active_locks.setter - def active_locks(self, active_locks): - """Sets the active_locks of this AdminStatistics. - - - :param active_locks: The active_locks of this AdminStatistics. # noqa: E501 - :type: list[Node] - """ - - self._active_locks = active_locks - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(AdminStatistics, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, AdminStatistics): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/application.py b/edu_sharing_client/models/application.py deleted file mode 100644 index 2f05fab4..00000000 --- a/edu_sharing_client/models/application.py +++ /dev/null @@ -1,371 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Application(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'title': 'str', - 'webserver_url': 'str', - 'client_base_url': 'str', - 'type': 'str', - 'subtype': 'str', - 'repository_type': 'str', - 'xml': 'str', - 'file': 'str', - 'content_url': 'str', - 'config_url': 'str' - } - - attribute_map = { - 'id': 'id', - 'title': 'title', - 'webserver_url': 'webserverUrl', - 'client_base_url': 'clientBaseUrl', - 'type': 'type', - 'subtype': 'subtype', - 'repository_type': 'repositoryType', - 'xml': 'xml', - 'file': 'file', - 'content_url': 'contentUrl', - 'config_url': 'configUrl' - } - - def __init__(self, id=None, title=None, webserver_url=None, client_base_url=None, type=None, subtype=None, repository_type=None, xml=None, file=None, content_url=None, config_url=None): # noqa: E501 - """Application - a model defined in Swagger""" # noqa: E501 - self._id = None - self._title = None - self._webserver_url = None - self._client_base_url = None - self._type = None - self._subtype = None - self._repository_type = None - self._xml = None - self._file = None - self._content_url = None - self._config_url = None - self.discriminator = None - if id is not None: - self.id = id - if title is not None: - self.title = title - if webserver_url is not None: - self.webserver_url = webserver_url - if client_base_url is not None: - self.client_base_url = client_base_url - if type is not None: - self.type = type - if subtype is not None: - self.subtype = subtype - if repository_type is not None: - self.repository_type = repository_type - if xml is not None: - self.xml = xml - if file is not None: - self.file = file - if content_url is not None: - self.content_url = content_url - if config_url is not None: - self.config_url = config_url - - @property - def id(self): - """Gets the id of this Application. # noqa: E501 - - - :return: The id of this Application. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Application. - - - :param id: The id of this Application. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def title(self): - """Gets the title of this Application. # noqa: E501 - - - :return: The title of this Application. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this Application. - - - :param title: The title of this Application. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def webserver_url(self): - """Gets the webserver_url of this Application. # noqa: E501 - - - :return: The webserver_url of this Application. # noqa: E501 - :rtype: str - """ - return self._webserver_url - - @webserver_url.setter - def webserver_url(self, webserver_url): - """Sets the webserver_url of this Application. - - - :param webserver_url: The webserver_url of this Application. # noqa: E501 - :type: str - """ - - self._webserver_url = webserver_url - - @property - def client_base_url(self): - """Gets the client_base_url of this Application. # noqa: E501 - - - :return: The client_base_url of this Application. # noqa: E501 - :rtype: str - """ - return self._client_base_url - - @client_base_url.setter - def client_base_url(self, client_base_url): - """Sets the client_base_url of this Application. - - - :param client_base_url: The client_base_url of this Application. # noqa: E501 - :type: str - """ - - self._client_base_url = client_base_url - - @property - def type(self): - """Gets the type of this Application. # noqa: E501 - - - :return: The type of this Application. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Application. - - - :param type: The type of this Application. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def subtype(self): - """Gets the subtype of this Application. # noqa: E501 - - - :return: The subtype of this Application. # noqa: E501 - :rtype: str - """ - return self._subtype - - @subtype.setter - def subtype(self, subtype): - """Sets the subtype of this Application. - - - :param subtype: The subtype of this Application. # noqa: E501 - :type: str - """ - - self._subtype = subtype - - @property - def repository_type(self): - """Gets the repository_type of this Application. # noqa: E501 - - - :return: The repository_type of this Application. # noqa: E501 - :rtype: str - """ - return self._repository_type - - @repository_type.setter - def repository_type(self, repository_type): - """Sets the repository_type of this Application. - - - :param repository_type: The repository_type of this Application. # noqa: E501 - :type: str - """ - - self._repository_type = repository_type - - @property - def xml(self): - """Gets the xml of this Application. # noqa: E501 - - - :return: The xml of this Application. # noqa: E501 - :rtype: str - """ - return self._xml - - @xml.setter - def xml(self, xml): - """Sets the xml of this Application. - - - :param xml: The xml of this Application. # noqa: E501 - :type: str - """ - - self._xml = xml - - @property - def file(self): - """Gets the file of this Application. # noqa: E501 - - - :return: The file of this Application. # noqa: E501 - :rtype: str - """ - return self._file - - @file.setter - def file(self, file): - """Sets the file of this Application. - - - :param file: The file of this Application. # noqa: E501 - :type: str - """ - - self._file = file - - @property - def content_url(self): - """Gets the content_url of this Application. # noqa: E501 - - - :return: The content_url of this Application. # noqa: E501 - :rtype: str - """ - return self._content_url - - @content_url.setter - def content_url(self, content_url): - """Sets the content_url of this Application. - - - :param content_url: The content_url of this Application. # noqa: E501 - :type: str - """ - - self._content_url = content_url - - @property - def config_url(self): - """Gets the config_url of this Application. # noqa: E501 - - - :return: The config_url of this Application. # noqa: E501 - :rtype: str - """ - return self._config_url - - @config_url.setter - def config_url(self, config_url): - """Sets the config_url of this Application. - - - :param config_url: The config_url of this Application. # noqa: E501 - :type: str - """ - - self._config_url = config_url - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Application, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Application): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/audience.py b/edu_sharing_client/models/audience.py deleted file mode 100644 index 2b433b7f..00000000 --- a/edu_sharing_client/models/audience.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Audience(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str' - } - - attribute_map = { - 'name': 'name' - } - - def __init__(self, name=None): # noqa: E501 - """Audience - a model defined in Swagger""" # noqa: E501 - self._name = None - self.discriminator = None - if name is not None: - self.name = name - - @property - def name(self): - """Gets the name of this Audience. # noqa: E501 - - - :return: The name of this Audience. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Audience. - - - :param name: The name of this Audience. # noqa: E501 - :type: str - """ - - self._name = name - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Audience, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Audience): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/authority.py b/edu_sharing_client/models/authority.py deleted file mode 100644 index a272be15..00000000 --- a/edu_sharing_client/models/authority.py +++ /dev/null @@ -1,144 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Authority(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'authority_name': 'str', - 'authority_type': 'str' - } - - attribute_map = { - 'authority_name': 'authorityName', - 'authority_type': 'authorityType' - } - - def __init__(self, authority_name=None, authority_type=None): # noqa: E501 - """Authority - a model defined in Swagger""" # noqa: E501 - self._authority_name = None - self._authority_type = None - self.discriminator = None - self.authority_name = authority_name - if authority_type is not None: - self.authority_type = authority_type - - @property - def authority_name(self): - """Gets the authority_name of this Authority. # noqa: E501 - - - :return: The authority_name of this Authority. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this Authority. - - - :param authority_name: The authority_name of this Authority. # noqa: E501 - :type: str - """ - if authority_name is None: - raise ValueError("Invalid value for `authority_name`, must not be `None`") # noqa: E501 - - self._authority_name = authority_name - - @property - def authority_type(self): - """Gets the authority_type of this Authority. # noqa: E501 - - - :return: The authority_type of this Authority. # noqa: E501 - :rtype: str - """ - return self._authority_type - - @authority_type.setter - def authority_type(self, authority_type): - """Sets the authority_type of this Authority. - - - :param authority_type: The authority_type of this Authority. # noqa: E501 - :type: str - """ - allowed_values = ["USER", "GROUP", "OWNER", "EVERYONE", "GUEST"] # noqa: E501 - if authority_type not in allowed_values: - raise ValueError( - "Invalid value for `authority_type` ({0}), must be one of {1}" # noqa: E501 - .format(authority_type, allowed_values) - ) - - self._authority_type = authority_type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Authority, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Authority): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/authority_entries.py b/edu_sharing_client/models/authority_entries.py deleted file mode 100644 index 9bfb725a..00000000 --- a/edu_sharing_client/models/authority_entries.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class AuthorityEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'authorities': 'list[Authority]', - 'pagination': 'Pagination' - } - - attribute_map = { - 'authorities': 'authorities', - 'pagination': 'pagination' - } - - def __init__(self, authorities=None, pagination=None): # noqa: E501 - """AuthorityEntries - a model defined in Swagger""" # noqa: E501 - self._authorities = None - self._pagination = None - self.discriminator = None - self.authorities = authorities - self.pagination = pagination - - @property - def authorities(self): - """Gets the authorities of this AuthorityEntries. # noqa: E501 - - - :return: The authorities of this AuthorityEntries. # noqa: E501 - :rtype: list[Authority] - """ - return self._authorities - - @authorities.setter - def authorities(self, authorities): - """Sets the authorities of this AuthorityEntries. - - - :param authorities: The authorities of this AuthorityEntries. # noqa: E501 - :type: list[Authority] - """ - if authorities is None: - raise ValueError("Invalid value for `authorities`, must not be `None`") # noqa: E501 - - self._authorities = authorities - - @property - def pagination(self): - """Gets the pagination of this AuthorityEntries. # noqa: E501 - - - :return: The pagination of this AuthorityEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this AuthorityEntries. - - - :param pagination: The pagination of this AuthorityEntries. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(AuthorityEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, AuthorityEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/available_mds.py b/edu_sharing_client/models/available_mds.py deleted file mode 100644 index 99a7e487..00000000 --- a/edu_sharing_client/models/available_mds.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class AvailableMds(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repository': 'str', - 'mds': 'list[str]' - } - - attribute_map = { - 'repository': 'repository', - 'mds': 'mds' - } - - def __init__(self, repository=None, mds=None): # noqa: E501 - """AvailableMds - a model defined in Swagger""" # noqa: E501 - self._repository = None - self._mds = None - self.discriminator = None - if repository is not None: - self.repository = repository - if mds is not None: - self.mds = mds - - @property - def repository(self): - """Gets the repository of this AvailableMds. # noqa: E501 - - - :return: The repository of this AvailableMds. # noqa: E501 - :rtype: str - """ - return self._repository - - @repository.setter - def repository(self, repository): - """Sets the repository of this AvailableMds. - - - :param repository: The repository of this AvailableMds. # noqa: E501 - :type: str - """ - - self._repository = repository - - @property - def mds(self): - """Gets the mds of this AvailableMds. # noqa: E501 - - - :return: The mds of this AvailableMds. # noqa: E501 - :rtype: list[str] - """ - return self._mds - - @mds.setter - def mds(self, mds): - """Sets the mds of this AvailableMds. - - - :param mds: The mds of this AvailableMds. # noqa: E501 - :type: list[str] - """ - - self._mds = mds - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(AvailableMds, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, AvailableMds): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/banner.py b/edu_sharing_client/models/banner.py deleted file mode 100644 index 517d344b..00000000 --- a/edu_sharing_client/models/banner.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Banner(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str', - 'href': 'str', - 'components': 'list[str]' - } - - attribute_map = { - 'url': 'url', - 'href': 'href', - 'components': 'components' - } - - def __init__(self, url=None, href=None, components=None): # noqa: E501 - """Banner - a model defined in Swagger""" # noqa: E501 - self._url = None - self._href = None - self._components = None - self.discriminator = None - if url is not None: - self.url = url - if href is not None: - self.href = href - if components is not None: - self.components = components - - @property - def url(self): - """Gets the url of this Banner. # noqa: E501 - - - :return: The url of this Banner. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Banner. - - - :param url: The url of this Banner. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def href(self): - """Gets the href of this Banner. # noqa: E501 - - - :return: The href of this Banner. # noqa: E501 - :rtype: str - """ - return self._href - - @href.setter - def href(self, href): - """Sets the href of this Banner. - - - :param href: The href of this Banner. # noqa: E501 - :type: str - """ - - self._href = href - - @property - def components(self): - """Gets the components of this Banner. # noqa: E501 - - - :return: The components of this Banner. # noqa: E501 - :rtype: list[str] - """ - return self._components - - @components.setter - def components(self, components): - """Sets the components of this Banner. - - - :param components: The components of this Banner. # noqa: E501 - :type: list[str] - """ - - self._components = components - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Banner, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Banner): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body.py b/edu_sharing_client/models/body.py deleted file mode 100644 index 8652c760..00000000 --- a/edu_sharing_client/models/body.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'xml': 'str' - } - - attribute_map = { - 'xml': 'xml' - } - - def __init__(self, xml=None): # noqa: E501 - """Body - a model defined in Swagger""" # noqa: E501 - self._xml = None - self.discriminator = None - self.xml = xml - - @property - def xml(self): - """Gets the xml of this Body. # noqa: E501 - - XML file for app to register # noqa: E501 - - :return: The xml of this Body. # noqa: E501 - :rtype: str - """ - return self._xml - - @xml.setter - def xml(self, xml): - """Sets the xml of this Body. - - XML file for app to register # noqa: E501 - - :param xml: The xml of this Body. # noqa: E501 - :type: str - """ - if xml is None: - raise ValueError("Invalid value for `xml`, must not be `None`") # noqa: E501 - - self._xml = xml - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body1.py b/edu_sharing_client/models/body1.py deleted file mode 100644 index 3bf7c631..00000000 --- a/edu_sharing_client/models/body1.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body1(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'xml': 'str' - } - - attribute_map = { - 'xml': 'xml' - } - - def __init__(self, xml=None): # noqa: E501 - """Body1 - a model defined in Swagger""" # noqa: E501 - self._xml = None - self.discriminator = None - self.xml = xml - - @property - def xml(self): - """Gets the xml of this Body1. # noqa: E501 - - XML file to parse (or zip file containing exactly 1 xml file to parse) # noqa: E501 - - :return: The xml of this Body1. # noqa: E501 - :rtype: str - """ - return self._xml - - @xml.setter - def xml(self, xml): - """Sets the xml of this Body1. - - XML file to parse (or zip file containing exactly 1 xml file to parse) # noqa: E501 - - :param xml: The xml of this Body1. # noqa: E501 - :type: str - """ - if xml is None: - raise ValueError("Invalid value for `xml`, must not be `None`") # noqa: E501 - - self._xml = xml - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body1, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body1): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body10.py b/edu_sharing_client/models/body10.py deleted file mode 100644 index ff2ebf92..00000000 --- a/edu_sharing_client/models/body10.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body10(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'file': 'str' - } - - attribute_map = { - 'file': 'file' - } - - def __init__(self, file=None): # noqa: E501 - """Body10 - a model defined in Swagger""" # noqa: E501 - self._file = None - self.discriminator = None - if file is not None: - self.file = file - - @property - def file(self): - """Gets the file of this Body10. # noqa: E501 - - - :return: The file of this Body10. # noqa: E501 - :rtype: str - """ - return self._file - - @file.setter - def file(self, file): - """Sets the file of this Body10. - - - :param file: The file of this Body10. # noqa: E501 - :type: str - """ - - self._file = file - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body10, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body10): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body11.py b/edu_sharing_client/models/body11.py deleted file mode 100644 index 9a27067a..00000000 --- a/edu_sharing_client/models/body11.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body11(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'image': 'str' - } - - attribute_map = { - 'image': 'image' - } - - def __init__(self, image=None): # noqa: E501 - """Body11 - a model defined in Swagger""" # noqa: E501 - self._image = None - self.discriminator = None - if image is not None: - self.image = image - - @property - def image(self): - """Gets the image of this Body11. # noqa: E501 - - - :return: The image of this Body11. # noqa: E501 - :rtype: str - """ - return self._image - - @image.setter - def image(self, image): - """Sets the image of this Body11. - - - :param image: The image of this Body11. # noqa: E501 - :type: str - """ - - self._image = image - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body11, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body11): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body2.py b/edu_sharing_client/models/body2.py deleted file mode 100644 index 131e9f08..00000000 --- a/edu_sharing_client/models/body2.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'excel': 'str' - } - - attribute_map = { - 'excel': 'excel' - } - - def __init__(self, excel=None): # noqa: E501 - """Body2 - a model defined in Swagger""" # noqa: E501 - self._excel = None - self.discriminator = None - self.excel = excel - - @property - def excel(self): - """Gets the excel of this Body2. # noqa: E501 - - Excel file to import # noqa: E501 - - :return: The excel of this Body2. # noqa: E501 - :rtype: str - """ - return self._excel - - @excel.setter - def excel(self, excel): - """Sets the excel of this Body2. - - Excel file to import # noqa: E501 - - :param excel: The excel of this Body2. # noqa: E501 - :type: str - """ - if excel is None: - raise ValueError("Invalid value for `excel`, must not be `None`") # noqa: E501 - - self._excel = excel - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body3.py b/edu_sharing_client/models/body3.py deleted file mode 100644 index 52aeabfd..00000000 --- a/edu_sharing_client/models/body3.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body3(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'xml': 'str' - } - - attribute_map = { - 'xml': 'xml' - } - - def __init__(self, xml=None): # noqa: E501 - """Body3 - a model defined in Swagger""" # noqa: E501 - self._xml = None - self.discriminator = None - if xml is not None: - self.xml = xml - - @property - def xml(self): - """Gets the xml of this Body3. # noqa: E501 - - - :return: The xml of this Body3. # noqa: E501 - :rtype: str - """ - return self._xml - - @xml.setter - def xml(self, xml): - """Sets the xml of this Body3. - - - :param xml: The xml of this Body3. # noqa: E501 - :type: str - """ - - self._xml = xml - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body3, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body3): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body4.py b/edu_sharing_client/models/body4.py deleted file mode 100644 index e97310f9..00000000 --- a/edu_sharing_client/models/body4.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body4(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'file': 'str' - } - - attribute_map = { - 'file': 'file' - } - - def __init__(self, file=None): # noqa: E501 - """Body4 - a model defined in Swagger""" # noqa: E501 - self._file = None - self.discriminator = None - self.file = file - - @property - def file(self): - """Gets the file of this Body4. # noqa: E501 - - file to upload # noqa: E501 - - :return: The file of this Body4. # noqa: E501 - :rtype: str - """ - return self._file - - @file.setter - def file(self, file): - """Sets the file of this Body4. - - file to upload # noqa: E501 - - :param file: The file of this Body4. # noqa: E501 - :type: str - """ - if file is None: - raise ValueError("Invalid value for `file`, must not be `None`") # noqa: E501 - - self._file = file - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body4, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body4): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body5.py b/edu_sharing_client/models/body5.py deleted file mode 100644 index 3b41be70..00000000 --- a/edu_sharing_client/models/body5.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body5(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'file': 'str' - } - - attribute_map = { - 'file': 'file' - } - - def __init__(self, file=None): # noqa: E501 - """Body5 - a model defined in Swagger""" # noqa: E501 - self._file = None - self.discriminator = None - if file is not None: - self.file = file - - @property - def file(self): - """Gets the file of this Body5. # noqa: E501 - - - :return: The file of this Body5. # noqa: E501 - :rtype: str - """ - return self._file - - @file.setter - def file(self, file): - """Sets the file of this Body5. - - - :param file: The file of this Body5. # noqa: E501 - :type: str - """ - - self._file = file - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body5, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body5): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body6.py b/edu_sharing_client/models/body6.py deleted file mode 100644 index 5889debd..00000000 --- a/edu_sharing_client/models/body6.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body6(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'avatar': 'str' - } - - attribute_map = { - 'avatar': 'avatar' - } - - def __init__(self, avatar=None): # noqa: E501 - """Body6 - a model defined in Swagger""" # noqa: E501 - self._avatar = None - self.discriminator = None - self.avatar = avatar - - @property - def avatar(self): - """Gets the avatar of this Body6. # noqa: E501 - - avatar image # noqa: E501 - - :return: The avatar of this Body6. # noqa: E501 - :rtype: str - """ - return self._avatar - - @avatar.setter - def avatar(self, avatar): - """Sets the avatar of this Body6. - - avatar image # noqa: E501 - - :param avatar: The avatar of this Body6. # noqa: E501 - :type: str - """ - if avatar is None: - raise ValueError("Invalid value for `avatar`, must not be `None`") # noqa: E501 - - self._avatar = avatar - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body6, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body6): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body7.py b/edu_sharing_client/models/body7.py deleted file mode 100644 index baca61a0..00000000 --- a/edu_sharing_client/models/body7.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body7(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'mc_orgs': 'str' - } - - attribute_map = { - 'mc_orgs': 'mcOrgs' - } - - def __init__(self, mc_orgs=None): # noqa: E501 - """Body7 - a model defined in Swagger""" # noqa: E501 - self._mc_orgs = None - self.discriminator = None - self.mc_orgs = mc_orgs - - @property - def mc_orgs(self): - """Gets the mc_orgs of this Body7. # noqa: E501 - - Mediacenter Organisation Connection csv to import # noqa: E501 - - :return: The mc_orgs of this Body7. # noqa: E501 - :rtype: str - """ - return self._mc_orgs - - @mc_orgs.setter - def mc_orgs(self, mc_orgs): - """Sets the mc_orgs of this Body7. - - Mediacenter Organisation Connection csv to import # noqa: E501 - - :param mc_orgs: The mc_orgs of this Body7. # noqa: E501 - :type: str - """ - if mc_orgs is None: - raise ValueError("Invalid value for `mc_orgs`, must not be `None`") # noqa: E501 - - self._mc_orgs = mc_orgs - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body7, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body7): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body8.py b/edu_sharing_client/models/body8.py deleted file mode 100644 index f5bfd10f..00000000 --- a/edu_sharing_client/models/body8.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body8(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'mediacenters': 'str' - } - - attribute_map = { - 'mediacenters': 'mediacenters' - } - - def __init__(self, mediacenters=None): # noqa: E501 - """Body8 - a model defined in Swagger""" # noqa: E501 - self._mediacenters = None - self.discriminator = None - self.mediacenters = mediacenters - - @property - def mediacenters(self): - """Gets the mediacenters of this Body8. # noqa: E501 - - Mediacenters csv to import # noqa: E501 - - :return: The mediacenters of this Body8. # noqa: E501 - :rtype: str - """ - return self._mediacenters - - @mediacenters.setter - def mediacenters(self, mediacenters): - """Sets the mediacenters of this Body8. - - Mediacenters csv to import # noqa: E501 - - :param mediacenters: The mediacenters of this Body8. # noqa: E501 - :type: str - """ - if mediacenters is None: - raise ValueError("Invalid value for `mediacenters`, must not be `None`") # noqa: E501 - - self._mediacenters = mediacenters - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body8, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body8): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/body9.py b/edu_sharing_client/models/body9.py deleted file mode 100644 index 06253821..00000000 --- a/edu_sharing_client/models/body9.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Body9(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'organisations': 'str' - } - - attribute_map = { - 'organisations': 'organisations' - } - - def __init__(self, organisations=None): # noqa: E501 - """Body9 - a model defined in Swagger""" # noqa: E501 - self._organisations = None - self.discriminator = None - self.organisations = organisations - - @property - def organisations(self): - """Gets the organisations of this Body9. # noqa: E501 - - Organisations csv to import # noqa: E501 - - :return: The organisations of this Body9. # noqa: E501 - :rtype: str - """ - return self._organisations - - @organisations.setter - def organisations(self, organisations): - """Sets the organisations of this Body9. - - Organisations csv to import # noqa: E501 - - :param organisations: The organisations of this Body9. # noqa: E501 - :type: str - """ - if organisations is None: - raise ValueError("Invalid value for `organisations`, must not be `None`") # noqa: E501 - - self._organisations = organisations - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Body9, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Body9): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/cache_cluster.py b/edu_sharing_client/models/cache_cluster.py deleted file mode 100644 index c16688d8..00000000 --- a/edu_sharing_client/models/cache_cluster.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CacheCluster(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'instances': 'list[CacheMember]', - 'cache_infos': 'list[CacheInfo]', - 'local_member': 'str', - 'free_memory': 'int', - 'total_memory': 'int', - 'max_memory': 'int', - 'available_processors': 'int', - 'time_stamp': 'datetime', - 'group_name': 'str' - } - - attribute_map = { - 'instances': 'instances', - 'cache_infos': 'cacheInfos', - 'local_member': 'localMember', - 'free_memory': 'freeMemory', - 'total_memory': 'totalMemory', - 'max_memory': 'maxMemory', - 'available_processors': 'availableProcessors', - 'time_stamp': 'timeStamp', - 'group_name': 'groupName' - } - - def __init__(self, instances=None, cache_infos=None, local_member=None, free_memory=None, total_memory=None, max_memory=None, available_processors=None, time_stamp=None, group_name=None): # noqa: E501 - """CacheCluster - a model defined in Swagger""" # noqa: E501 - self._instances = None - self._cache_infos = None - self._local_member = None - self._free_memory = None - self._total_memory = None - self._max_memory = None - self._available_processors = None - self._time_stamp = None - self._group_name = None - self.discriminator = None - if instances is not None: - self.instances = instances - if cache_infos is not None: - self.cache_infos = cache_infos - if local_member is not None: - self.local_member = local_member - if free_memory is not None: - self.free_memory = free_memory - if total_memory is not None: - self.total_memory = total_memory - if max_memory is not None: - self.max_memory = max_memory - if available_processors is not None: - self.available_processors = available_processors - if time_stamp is not None: - self.time_stamp = time_stamp - if group_name is not None: - self.group_name = group_name - - @property - def instances(self): - """Gets the instances of this CacheCluster. # noqa: E501 - - - :return: The instances of this CacheCluster. # noqa: E501 - :rtype: list[CacheMember] - """ - return self._instances - - @instances.setter - def instances(self, instances): - """Sets the instances of this CacheCluster. - - - :param instances: The instances of this CacheCluster. # noqa: E501 - :type: list[CacheMember] - """ - - self._instances = instances - - @property - def cache_infos(self): - """Gets the cache_infos of this CacheCluster. # noqa: E501 - - - :return: The cache_infos of this CacheCluster. # noqa: E501 - :rtype: list[CacheInfo] - """ - return self._cache_infos - - @cache_infos.setter - def cache_infos(self, cache_infos): - """Sets the cache_infos of this CacheCluster. - - - :param cache_infos: The cache_infos of this CacheCluster. # noqa: E501 - :type: list[CacheInfo] - """ - - self._cache_infos = cache_infos - - @property - def local_member(self): - """Gets the local_member of this CacheCluster. # noqa: E501 - - - :return: The local_member of this CacheCluster. # noqa: E501 - :rtype: str - """ - return self._local_member - - @local_member.setter - def local_member(self, local_member): - """Sets the local_member of this CacheCluster. - - - :param local_member: The local_member of this CacheCluster. # noqa: E501 - :type: str - """ - - self._local_member = local_member - - @property - def free_memory(self): - """Gets the free_memory of this CacheCluster. # noqa: E501 - - - :return: The free_memory of this CacheCluster. # noqa: E501 - :rtype: int - """ - return self._free_memory - - @free_memory.setter - def free_memory(self, free_memory): - """Sets the free_memory of this CacheCluster. - - - :param free_memory: The free_memory of this CacheCluster. # noqa: E501 - :type: int - """ - - self._free_memory = free_memory - - @property - def total_memory(self): - """Gets the total_memory of this CacheCluster. # noqa: E501 - - - :return: The total_memory of this CacheCluster. # noqa: E501 - :rtype: int - """ - return self._total_memory - - @total_memory.setter - def total_memory(self, total_memory): - """Sets the total_memory of this CacheCluster. - - - :param total_memory: The total_memory of this CacheCluster. # noqa: E501 - :type: int - """ - - self._total_memory = total_memory - - @property - def max_memory(self): - """Gets the max_memory of this CacheCluster. # noqa: E501 - - - :return: The max_memory of this CacheCluster. # noqa: E501 - :rtype: int - """ - return self._max_memory - - @max_memory.setter - def max_memory(self, max_memory): - """Sets the max_memory of this CacheCluster. - - - :param max_memory: The max_memory of this CacheCluster. # noqa: E501 - :type: int - """ - - self._max_memory = max_memory - - @property - def available_processors(self): - """Gets the available_processors of this CacheCluster. # noqa: E501 - - - :return: The available_processors of this CacheCluster. # noqa: E501 - :rtype: int - """ - return self._available_processors - - @available_processors.setter - def available_processors(self, available_processors): - """Sets the available_processors of this CacheCluster. - - - :param available_processors: The available_processors of this CacheCluster. # noqa: E501 - :type: int - """ - - self._available_processors = available_processors - - @property - def time_stamp(self): - """Gets the time_stamp of this CacheCluster. # noqa: E501 - - - :return: The time_stamp of this CacheCluster. # noqa: E501 - :rtype: datetime - """ - return self._time_stamp - - @time_stamp.setter - def time_stamp(self, time_stamp): - """Sets the time_stamp of this CacheCluster. - - - :param time_stamp: The time_stamp of this CacheCluster. # noqa: E501 - :type: datetime - """ - - self._time_stamp = time_stamp - - @property - def group_name(self): - """Gets the group_name of this CacheCluster. # noqa: E501 - - - :return: The group_name of this CacheCluster. # noqa: E501 - :rtype: str - """ - return self._group_name - - @group_name.setter - def group_name(self, group_name): - """Sets the group_name of this CacheCluster. - - - :param group_name: The group_name of this CacheCluster. # noqa: E501 - :type: str - """ - - self._group_name = group_name - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CacheCluster, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CacheCluster): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/cache_info.py b/edu_sharing_client/models/cache_info.py deleted file mode 100644 index 529e7289..00000000 --- a/edu_sharing_client/models/cache_info.py +++ /dev/null @@ -1,423 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CacheInfo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'size': 'int', - 'statistic_hits': 'int', - 'name': 'str', - 'backup_count': 'int', - 'backup_entry_count': 'int', - 'backup_entry_memory_cost': 'int', - 'heap_cost': 'int', - 'owned_entry_count': 'int', - 'get_owned_entry_memory_cost': 'int', - 'size_in_memory': 'int', - 'member': 'str', - 'group_name': 'str', - 'max_size': 'int' - } - - attribute_map = { - 'size': 'size', - 'statistic_hits': 'statisticHits', - 'name': 'name', - 'backup_count': 'backupCount', - 'backup_entry_count': 'backupEntryCount', - 'backup_entry_memory_cost': 'backupEntryMemoryCost', - 'heap_cost': 'heapCost', - 'owned_entry_count': 'ownedEntryCount', - 'get_owned_entry_memory_cost': 'getOwnedEntryMemoryCost', - 'size_in_memory': 'sizeInMemory', - 'member': 'member', - 'group_name': 'groupName', - 'max_size': 'maxSize' - } - - def __init__(self, size=None, statistic_hits=None, name=None, backup_count=None, backup_entry_count=None, backup_entry_memory_cost=None, heap_cost=None, owned_entry_count=None, get_owned_entry_memory_cost=None, size_in_memory=None, member=None, group_name=None, max_size=None): # noqa: E501 - """CacheInfo - a model defined in Swagger""" # noqa: E501 - self._size = None - self._statistic_hits = None - self._name = None - self._backup_count = None - self._backup_entry_count = None - self._backup_entry_memory_cost = None - self._heap_cost = None - self._owned_entry_count = None - self._get_owned_entry_memory_cost = None - self._size_in_memory = None - self._member = None - self._group_name = None - self._max_size = None - self.discriminator = None - if size is not None: - self.size = size - if statistic_hits is not None: - self.statistic_hits = statistic_hits - if name is not None: - self.name = name - if backup_count is not None: - self.backup_count = backup_count - if backup_entry_count is not None: - self.backup_entry_count = backup_entry_count - if backup_entry_memory_cost is not None: - self.backup_entry_memory_cost = backup_entry_memory_cost - if heap_cost is not None: - self.heap_cost = heap_cost - if owned_entry_count is not None: - self.owned_entry_count = owned_entry_count - if get_owned_entry_memory_cost is not None: - self.get_owned_entry_memory_cost = get_owned_entry_memory_cost - if size_in_memory is not None: - self.size_in_memory = size_in_memory - if member is not None: - self.member = member - if group_name is not None: - self.group_name = group_name - if max_size is not None: - self.max_size = max_size - - @property - def size(self): - """Gets the size of this CacheInfo. # noqa: E501 - - - :return: The size of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this CacheInfo. - - - :param size: The size of this CacheInfo. # noqa: E501 - :type: int - """ - - self._size = size - - @property - def statistic_hits(self): - """Gets the statistic_hits of this CacheInfo. # noqa: E501 - - - :return: The statistic_hits of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._statistic_hits - - @statistic_hits.setter - def statistic_hits(self, statistic_hits): - """Sets the statistic_hits of this CacheInfo. - - - :param statistic_hits: The statistic_hits of this CacheInfo. # noqa: E501 - :type: int - """ - - self._statistic_hits = statistic_hits - - @property - def name(self): - """Gets the name of this CacheInfo. # noqa: E501 - - - :return: The name of this CacheInfo. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this CacheInfo. - - - :param name: The name of this CacheInfo. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def backup_count(self): - """Gets the backup_count of this CacheInfo. # noqa: E501 - - - :return: The backup_count of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._backup_count - - @backup_count.setter - def backup_count(self, backup_count): - """Sets the backup_count of this CacheInfo. - - - :param backup_count: The backup_count of this CacheInfo. # noqa: E501 - :type: int - """ - - self._backup_count = backup_count - - @property - def backup_entry_count(self): - """Gets the backup_entry_count of this CacheInfo. # noqa: E501 - - - :return: The backup_entry_count of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._backup_entry_count - - @backup_entry_count.setter - def backup_entry_count(self, backup_entry_count): - """Sets the backup_entry_count of this CacheInfo. - - - :param backup_entry_count: The backup_entry_count of this CacheInfo. # noqa: E501 - :type: int - """ - - self._backup_entry_count = backup_entry_count - - @property - def backup_entry_memory_cost(self): - """Gets the backup_entry_memory_cost of this CacheInfo. # noqa: E501 - - - :return: The backup_entry_memory_cost of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._backup_entry_memory_cost - - @backup_entry_memory_cost.setter - def backup_entry_memory_cost(self, backup_entry_memory_cost): - """Sets the backup_entry_memory_cost of this CacheInfo. - - - :param backup_entry_memory_cost: The backup_entry_memory_cost of this CacheInfo. # noqa: E501 - :type: int - """ - - self._backup_entry_memory_cost = backup_entry_memory_cost - - @property - def heap_cost(self): - """Gets the heap_cost of this CacheInfo. # noqa: E501 - - - :return: The heap_cost of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._heap_cost - - @heap_cost.setter - def heap_cost(self, heap_cost): - """Sets the heap_cost of this CacheInfo. - - - :param heap_cost: The heap_cost of this CacheInfo. # noqa: E501 - :type: int - """ - - self._heap_cost = heap_cost - - @property - def owned_entry_count(self): - """Gets the owned_entry_count of this CacheInfo. # noqa: E501 - - - :return: The owned_entry_count of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._owned_entry_count - - @owned_entry_count.setter - def owned_entry_count(self, owned_entry_count): - """Sets the owned_entry_count of this CacheInfo. - - - :param owned_entry_count: The owned_entry_count of this CacheInfo. # noqa: E501 - :type: int - """ - - self._owned_entry_count = owned_entry_count - - @property - def get_owned_entry_memory_cost(self): - """Gets the get_owned_entry_memory_cost of this CacheInfo. # noqa: E501 - - - :return: The get_owned_entry_memory_cost of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._get_owned_entry_memory_cost - - @get_owned_entry_memory_cost.setter - def get_owned_entry_memory_cost(self, get_owned_entry_memory_cost): - """Sets the get_owned_entry_memory_cost of this CacheInfo. - - - :param get_owned_entry_memory_cost: The get_owned_entry_memory_cost of this CacheInfo. # noqa: E501 - :type: int - """ - - self._get_owned_entry_memory_cost = get_owned_entry_memory_cost - - @property - def size_in_memory(self): - """Gets the size_in_memory of this CacheInfo. # noqa: E501 - - - :return: The size_in_memory of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._size_in_memory - - @size_in_memory.setter - def size_in_memory(self, size_in_memory): - """Sets the size_in_memory of this CacheInfo. - - - :param size_in_memory: The size_in_memory of this CacheInfo. # noqa: E501 - :type: int - """ - - self._size_in_memory = size_in_memory - - @property - def member(self): - """Gets the member of this CacheInfo. # noqa: E501 - - - :return: The member of this CacheInfo. # noqa: E501 - :rtype: str - """ - return self._member - - @member.setter - def member(self, member): - """Sets the member of this CacheInfo. - - - :param member: The member of this CacheInfo. # noqa: E501 - :type: str - """ - - self._member = member - - @property - def group_name(self): - """Gets the group_name of this CacheInfo. # noqa: E501 - - - :return: The group_name of this CacheInfo. # noqa: E501 - :rtype: str - """ - return self._group_name - - @group_name.setter - def group_name(self, group_name): - """Sets the group_name of this CacheInfo. - - - :param group_name: The group_name of this CacheInfo. # noqa: E501 - :type: str - """ - - self._group_name = group_name - - @property - def max_size(self): - """Gets the max_size of this CacheInfo. # noqa: E501 - - - :return: The max_size of this CacheInfo. # noqa: E501 - :rtype: int - """ - return self._max_size - - @max_size.setter - def max_size(self, max_size): - """Sets the max_size of this CacheInfo. - - - :param max_size: The max_size of this CacheInfo. # noqa: E501 - :type: int - """ - - self._max_size = max_size - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CacheInfo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CacheInfo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/cache_member.py b/edu_sharing_client/models/cache_member.py deleted file mode 100644 index bddd9f53..00000000 --- a/edu_sharing_client/models/cache_member.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CacheMember(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str' - } - - attribute_map = { - 'name': 'name' - } - - def __init__(self, name=None): # noqa: E501 - """CacheMember - a model defined in Swagger""" # noqa: E501 - self._name = None - self.discriminator = None - if name is not None: - self.name = name - - @property - def name(self): - """Gets the name of this CacheMember. # noqa: E501 - - - :return: The name of this CacheMember. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this CacheMember. - - - :param name: The name of this CacheMember. # noqa: E501 - :type: str - """ - - self._name = name - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CacheMember, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CacheMember): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/catalog.py b/edu_sharing_client/models/catalog.py deleted file mode 100644 index 53f5a8f7..00000000 --- a/edu_sharing_client/models/catalog.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Catalog(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'url': 'str' - } - - attribute_map = { - 'name': 'name', - 'url': 'url' - } - - def __init__(self, name=None, url=None): # noqa: E501 - """Catalog - a model defined in Swagger""" # noqa: E501 - self._name = None - self._url = None - self.discriminator = None - if name is not None: - self.name = name - if url is not None: - self.url = url - - @property - def name(self): - """Gets the name of this Catalog. # noqa: E501 - - - :return: The name of this Catalog. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Catalog. - - - :param name: The name of this Catalog. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def url(self): - """Gets the url of this Catalog. # noqa: E501 - - - :return: The url of this Catalog. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Catalog. - - - :param url: The url of this Catalog. # noqa: E501 - :type: str - """ - - self._url = url - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Catalog, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Catalog): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection.py b/edu_sharing_client/models/collection.py deleted file mode 100644 index 7815d8b7..00000000 --- a/edu_sharing_client/models/collection.py +++ /dev/null @@ -1,510 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Collection(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'scope': 'str', - 'author_freetext': 'str', - 'level0': 'bool', - 'title': 'str', - 'description': 'str', - 'type': 'str', - 'viewtype': 'str', - 'order_mode': 'str', - 'x': 'int', - 'y': 'int', - 'z': 'int', - 'color': 'str', - 'from_user': 'bool', - 'pinned': 'bool', - 'child_collections_count': 'int', - 'child_references_count': 'int' - } - - attribute_map = { - 'scope': 'scope', - 'author_freetext': 'authorFreetext', - 'level0': 'level0', - 'title': 'title', - 'description': 'description', - 'type': 'type', - 'viewtype': 'viewtype', - 'order_mode': 'orderMode', - 'x': 'x', - 'y': 'y', - 'z': 'z', - 'color': 'color', - 'from_user': 'fromUser', - 'pinned': 'pinned', - 'child_collections_count': 'childCollectionsCount', - 'child_references_count': 'childReferencesCount' - } - - def __init__(self, scope=None, author_freetext=None, level0=False, title=None, description=None, type=None, viewtype=None, order_mode=None, x=None, y=None, z=None, color=None, from_user=False, pinned=False, child_collections_count=None, child_references_count=None): # noqa: E501 - """Collection - a model defined in Swagger""" # noqa: E501 - self._scope = None - self._author_freetext = None - self._level0 = None - self._title = None - self._description = None - self._type = None - self._viewtype = None - self._order_mode = None - self._x = None - self._y = None - self._z = None - self._color = None - self._from_user = None - self._pinned = None - self._child_collections_count = None - self._child_references_count = None - self.discriminator = None - if scope is not None: - self.scope = scope - if author_freetext is not None: - self.author_freetext = author_freetext - self.level0 = level0 - self.title = title - if description is not None: - self.description = description - self.type = type - self.viewtype = viewtype - if order_mode is not None: - self.order_mode = order_mode - if x is not None: - self.x = x - if y is not None: - self.y = y - if z is not None: - self.z = z - if color is not None: - self.color = color - self.from_user = from_user - if pinned is not None: - self.pinned = pinned - if child_collections_count is not None: - self.child_collections_count = child_collections_count - if child_references_count is not None: - self.child_references_count = child_references_count - - @property - def scope(self): - """Gets the scope of this Collection. # noqa: E501 - - - :return: The scope of this Collection. # noqa: E501 - :rtype: str - """ - return self._scope - - @scope.setter - def scope(self, scope): - """Sets the scope of this Collection. - - - :param scope: The scope of this Collection. # noqa: E501 - :type: str - """ - - self._scope = scope - - @property - def author_freetext(self): - """Gets the author_freetext of this Collection. # noqa: E501 - - - :return: The author_freetext of this Collection. # noqa: E501 - :rtype: str - """ - return self._author_freetext - - @author_freetext.setter - def author_freetext(self, author_freetext): - """Sets the author_freetext of this Collection. - - - :param author_freetext: The author_freetext of this Collection. # noqa: E501 - :type: str - """ - - self._author_freetext = author_freetext - - @property - def level0(self): - """Gets the level0 of this Collection. # noqa: E501 - - false # noqa: E501 - - :return: The level0 of this Collection. # noqa: E501 - :rtype: bool - """ - return self._level0 - - @level0.setter - def level0(self, level0): - """Sets the level0 of this Collection. - - false # noqa: E501 - - :param level0: The level0 of this Collection. # noqa: E501 - :type: bool - """ - if level0 is None: - raise ValueError("Invalid value for `level0`, must not be `None`") # noqa: E501 - - self._level0 = level0 - - @property - def title(self): - """Gets the title of this Collection. # noqa: E501 - - - :return: The title of this Collection. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this Collection. - - - :param title: The title of this Collection. # noqa: E501 - :type: str - """ - if title is None: - raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501 - - self._title = title - - @property - def description(self): - """Gets the description of this Collection. # noqa: E501 - - - :return: The description of this Collection. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this Collection. - - - :param description: The description of this Collection. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def type(self): - """Gets the type of this Collection. # noqa: E501 - - - :return: The type of this Collection. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Collection. - - - :param type: The type of this Collection. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - @property - def viewtype(self): - """Gets the viewtype of this Collection. # noqa: E501 - - - :return: The viewtype of this Collection. # noqa: E501 - :rtype: str - """ - return self._viewtype - - @viewtype.setter - def viewtype(self, viewtype): - """Sets the viewtype of this Collection. - - - :param viewtype: The viewtype of this Collection. # noqa: E501 - :type: str - """ - if viewtype is None: - raise ValueError("Invalid value for `viewtype`, must not be `None`") # noqa: E501 - - self._viewtype = viewtype - - @property - def order_mode(self): - """Gets the order_mode of this Collection. # noqa: E501 - - - :return: The order_mode of this Collection. # noqa: E501 - :rtype: str - """ - return self._order_mode - - @order_mode.setter - def order_mode(self, order_mode): - """Sets the order_mode of this Collection. - - - :param order_mode: The order_mode of this Collection. # noqa: E501 - :type: str - """ - - self._order_mode = order_mode - - @property - def x(self): - """Gets the x of this Collection. # noqa: E501 - - - :return: The x of this Collection. # noqa: E501 - :rtype: int - """ - return self._x - - @x.setter - def x(self, x): - """Sets the x of this Collection. - - - :param x: The x of this Collection. # noqa: E501 - :type: int - """ - - self._x = x - - @property - def y(self): - """Gets the y of this Collection. # noqa: E501 - - - :return: The y of this Collection. # noqa: E501 - :rtype: int - """ - return self._y - - @y.setter - def y(self, y): - """Sets the y of this Collection. - - - :param y: The y of this Collection. # noqa: E501 - :type: int - """ - - self._y = y - - @property - def z(self): - """Gets the z of this Collection. # noqa: E501 - - - :return: The z of this Collection. # noqa: E501 - :rtype: int - """ - return self._z - - @z.setter - def z(self, z): - """Sets the z of this Collection. - - - :param z: The z of this Collection. # noqa: E501 - :type: int - """ - - self._z = z - - @property - def color(self): - """Gets the color of this Collection. # noqa: E501 - - - :return: The color of this Collection. # noqa: E501 - :rtype: str - """ - return self._color - - @color.setter - def color(self, color): - """Sets the color of this Collection. - - - :param color: The color of this Collection. # noqa: E501 - :type: str - """ - - self._color = color - - @property - def from_user(self): - """Gets the from_user of this Collection. # noqa: E501 - - false # noqa: E501 - - :return: The from_user of this Collection. # noqa: E501 - :rtype: bool - """ - return self._from_user - - @from_user.setter - def from_user(self, from_user): - """Sets the from_user of this Collection. - - false # noqa: E501 - - :param from_user: The from_user of this Collection. # noqa: E501 - :type: bool - """ - if from_user is None: - raise ValueError("Invalid value for `from_user`, must not be `None`") # noqa: E501 - - self._from_user = from_user - - @property - def pinned(self): - """Gets the pinned of this Collection. # noqa: E501 - - - :return: The pinned of this Collection. # noqa: E501 - :rtype: bool - """ - return self._pinned - - @pinned.setter - def pinned(self, pinned): - """Sets the pinned of this Collection. - - - :param pinned: The pinned of this Collection. # noqa: E501 - :type: bool - """ - - self._pinned = pinned - - @property - def child_collections_count(self): - """Gets the child_collections_count of this Collection. # noqa: E501 - - - :return: The child_collections_count of this Collection. # noqa: E501 - :rtype: int - """ - return self._child_collections_count - - @child_collections_count.setter - def child_collections_count(self, child_collections_count): - """Sets the child_collections_count of this Collection. - - - :param child_collections_count: The child_collections_count of this Collection. # noqa: E501 - :type: int - """ - - self._child_collections_count = child_collections_count - - @property - def child_references_count(self): - """Gets the child_references_count of this Collection. # noqa: E501 - - - :return: The child_references_count of this Collection. # noqa: E501 - :rtype: int - """ - return self._child_references_count - - @child_references_count.setter - def child_references_count(self, child_references_count): - """Sets the child_references_count of this Collection. - - - :param child_references_count: The child_references_count of this Collection. # noqa: E501 - :type: int - """ - - self._child_references_count = child_references_count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Collection, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Collection): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection_counts.py b/edu_sharing_client/models/collection_counts.py deleted file mode 100644 index ebee1f79..00000000 --- a/edu_sharing_client/models/collection_counts.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionCounts(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'refs': 'list[Element]', - 'collections': 'list[Element]' - } - - attribute_map = { - 'refs': 'refs', - 'collections': 'collections' - } - - def __init__(self, refs=None, collections=None): # noqa: E501 - """CollectionCounts - a model defined in Swagger""" # noqa: E501 - self._refs = None - self._collections = None - self.discriminator = None - if refs is not None: - self.refs = refs - if collections is not None: - self.collections = collections - - @property - def refs(self): - """Gets the refs of this CollectionCounts. # noqa: E501 - - - :return: The refs of this CollectionCounts. # noqa: E501 - :rtype: list[Element] - """ - return self._refs - - @refs.setter - def refs(self, refs): - """Sets the refs of this CollectionCounts. - - - :param refs: The refs of this CollectionCounts. # noqa: E501 - :type: list[Element] - """ - - self._refs = refs - - @property - def collections(self): - """Gets the collections of this CollectionCounts. # noqa: E501 - - - :return: The collections of this CollectionCounts. # noqa: E501 - :rtype: list[Element] - """ - return self._collections - - @collections.setter - def collections(self, collections): - """Sets the collections of this CollectionCounts. - - - :param collections: The collections of this CollectionCounts. # noqa: E501 - :type: list[Element] - """ - - self._collections = collections - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionCounts, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionCounts): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection_entries.py b/edu_sharing_client/models/collection_entries.py deleted file mode 100644 index 38a85be9..00000000 --- a/edu_sharing_client/models/collection_entries.py +++ /dev/null @@ -1,138 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'pagination': 'Pagination', - 'collections': 'list[Node]' - } - - attribute_map = { - 'pagination': 'pagination', - 'collections': 'collections' - } - - def __init__(self, pagination=None, collections=None): # noqa: E501 - """CollectionEntries - a model defined in Swagger""" # noqa: E501 - self._pagination = None - self._collections = None - self.discriminator = None - if pagination is not None: - self.pagination = pagination - self.collections = collections - - @property - def pagination(self): - """Gets the pagination of this CollectionEntries. # noqa: E501 - - - :return: The pagination of this CollectionEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this CollectionEntries. - - - :param pagination: The pagination of this CollectionEntries. # noqa: E501 - :type: Pagination - """ - - self._pagination = pagination - - @property - def collections(self): - """Gets the collections of this CollectionEntries. # noqa: E501 - - - :return: The collections of this CollectionEntries. # noqa: E501 - :rtype: list[Node] - """ - return self._collections - - @collections.setter - def collections(self, collections): - """Sets the collections of this CollectionEntries. - - - :param collections: The collections of this CollectionEntries. # noqa: E501 - :type: list[Node] - """ - if collections is None: - raise ValueError("Invalid value for `collections`, must not be `None`") # noqa: E501 - - self._collections = collections - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection_entry.py b/edu_sharing_client/models/collection_entry.py deleted file mode 100644 index fcd83e22..00000000 --- a/edu_sharing_client/models/collection_entry.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'collection': 'Node' - } - - attribute_map = { - 'collection': 'collection' - } - - def __init__(self, collection=None): # noqa: E501 - """CollectionEntry - a model defined in Swagger""" # noqa: E501 - self._collection = None - self.discriminator = None - self.collection = collection - - @property - def collection(self): - """Gets the collection of this CollectionEntry. # noqa: E501 - - - :return: The collection of this CollectionEntry. # noqa: E501 - :rtype: Node - """ - return self._collection - - @collection.setter - def collection(self, collection): - """Sets the collection of this CollectionEntry. - - - :param collection: The collection of this CollectionEntry. # noqa: E501 - :type: Node - """ - if collection is None: - raise ValueError("Invalid value for `collection`, must not be `None`") # noqa: E501 - - self._collection = collection - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection_feedback.py b/edu_sharing_client/models/collection_feedback.py deleted file mode 100644 index 7d19252e..00000000 --- a/edu_sharing_client/models/collection_feedback.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionFeedback(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'created_at': 'datetime', - 'creator': 'str', - 'feedback': 'dict(str, Serializable)' - } - - attribute_map = { - 'created_at': 'createdAt', - 'creator': 'creator', - 'feedback': 'feedback' - } - - def __init__(self, created_at=None, creator=None, feedback=None): # noqa: E501 - """CollectionFeedback - a model defined in Swagger""" # noqa: E501 - self._created_at = None - self._creator = None - self._feedback = None - self.discriminator = None - if created_at is not None: - self.created_at = created_at - if creator is not None: - self.creator = creator - if feedback is not None: - self.feedback = feedback - - @property - def created_at(self): - """Gets the created_at of this CollectionFeedback. # noqa: E501 - - - :return: The created_at of this CollectionFeedback. # noqa: E501 - :rtype: datetime - """ - return self._created_at - - @created_at.setter - def created_at(self, created_at): - """Sets the created_at of this CollectionFeedback. - - - :param created_at: The created_at of this CollectionFeedback. # noqa: E501 - :type: datetime - """ - - self._created_at = created_at - - @property - def creator(self): - """Gets the creator of this CollectionFeedback. # noqa: E501 - - - :return: The creator of this CollectionFeedback. # noqa: E501 - :rtype: str - """ - return self._creator - - @creator.setter - def creator(self, creator): - """Sets the creator of this CollectionFeedback. - - - :param creator: The creator of this CollectionFeedback. # noqa: E501 - :type: str - """ - - self._creator = creator - - @property - def feedback(self): - """Gets the feedback of this CollectionFeedback. # noqa: E501 - - - :return: The feedback of this CollectionFeedback. # noqa: E501 - :rtype: dict(str, Serializable) - """ - return self._feedback - - @feedback.setter - def feedback(self, feedback): - """Sets the feedback of this CollectionFeedback. - - - :param feedback: The feedback of this CollectionFeedback. # noqa: E501 - :type: dict(str, Serializable) - """ - - self._feedback = feedback - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionFeedback, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionFeedback): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection_options.py b/edu_sharing_client/models/collection_options.py deleted file mode 100644 index 118910ec..00000000 --- a/edu_sharing_client/models/collection_options.py +++ /dev/null @@ -1,149 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionOptions(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'private_collections': 'str', - 'public_collections': 'str' - } - - attribute_map = { - 'private_collections': 'privateCollections', - 'public_collections': 'publicCollections' - } - - def __init__(self, private_collections=None, public_collections=None): # noqa: E501 - """CollectionOptions - a model defined in Swagger""" # noqa: E501 - self._private_collections = None - self._public_collections = None - self.discriminator = None - if private_collections is not None: - self.private_collections = private_collections - if public_collections is not None: - self.public_collections = public_collections - - @property - def private_collections(self): - """Gets the private_collections of this CollectionOptions. # noqa: E501 - - - :return: The private_collections of this CollectionOptions. # noqa: E501 - :rtype: str - """ - return self._private_collections - - @private_collections.setter - def private_collections(self, private_collections): - """Sets the private_collections of this CollectionOptions. - - - :param private_collections: The private_collections of this CollectionOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign", "delete"] # noqa: E501 - if private_collections not in allowed_values: - raise ValueError( - "Invalid value for `private_collections` ({0}), must be one of {1}" # noqa: E501 - .format(private_collections, allowed_values) - ) - - self._private_collections = private_collections - - @property - def public_collections(self): - """Gets the public_collections of this CollectionOptions. # noqa: E501 - - - :return: The public_collections of this CollectionOptions. # noqa: E501 - :rtype: str - """ - return self._public_collections - - @public_collections.setter - def public_collections(self, public_collections): - """Sets the public_collections of this CollectionOptions. - - - :param public_collections: The public_collections of this CollectionOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign", "delete"] # noqa: E501 - if public_collections not in allowed_values: - raise ValueError( - "Invalid value for `public_collections` ({0}), must be one of {1}" # noqa: E501 - .format(public_collections, allowed_values) - ) - - self._public_collections = public_collections - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionOptions, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionOptions): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collection_reference.py b/edu_sharing_client/models/collection_reference.py deleted file mode 100644 index 57fff7dd..00000000 --- a/edu_sharing_client/models/collection_reference.py +++ /dev/null @@ -1,873 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionReference(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'remote': 'Remote', - 'content': 'Content', - 'license': 'License', - 'is_directory': 'bool', - 'comment_count': 'int', - 'rating': 'AccumulatedRatings', - 'access_original': 'list[str]', - 'ref': 'NodeRef', - 'parent': 'NodeRef', - 'type': 'str', - 'aspects': 'list[str]', - 'name': 'str', - 'title': 'str', - 'metadataset': 'str', - 'repository_type': 'str', - 'created_at': 'datetime', - 'created_by': 'Person', - 'modified_at': 'datetime', - 'modified_by': 'Person', - 'access': 'list[str]', - 'download_url': 'str', - 'properties': 'dict(str, list[str])', - 'mimetype': 'str', - 'mediatype': 'str', - 'size': 'str', - 'preview': 'Preview', - 'icon_url': 'str', - 'collection': 'Collection', - 'owner': 'Person', - 'original_id': 'str' - } - - attribute_map = { - 'remote': 'remote', - 'content': 'content', - 'license': 'license', - 'is_directory': 'isDirectory', - 'comment_count': 'commentCount', - 'rating': 'rating', - 'access_original': 'accessOriginal', - 'ref': 'ref', - 'parent': 'parent', - 'type': 'type', - 'aspects': 'aspects', - 'name': 'name', - 'title': 'title', - 'metadataset': 'metadataset', - 'repository_type': 'repositoryType', - 'created_at': 'createdAt', - 'created_by': 'createdBy', - 'modified_at': 'modifiedAt', - 'modified_by': 'modifiedBy', - 'access': 'access', - 'download_url': 'downloadUrl', - 'properties': 'properties', - 'mimetype': 'mimetype', - 'mediatype': 'mediatype', - 'size': 'size', - 'preview': 'preview', - 'icon_url': 'iconURL', - 'collection': 'collection', - 'owner': 'owner', - 'original_id': 'originalId' - } - - def __init__(self, remote=None, content=None, license=None, is_directory=False, comment_count=None, rating=None, access_original=None, ref=None, parent=None, type=None, aspects=None, name=None, title=None, metadataset=None, repository_type=None, created_at=None, created_by=None, modified_at=None, modified_by=None, access=None, download_url=None, properties=None, mimetype=None, mediatype=None, size=None, preview=None, icon_url=None, collection=None, owner=None, original_id=None): # noqa: E501 - """CollectionReference - a model defined in Swagger""" # noqa: E501 - self._remote = None - self._content = None - self._license = None - self._is_directory = None - self._comment_count = None - self._rating = None - self._access_original = None - self._ref = None - self._parent = None - self._type = None - self._aspects = None - self._name = None - self._title = None - self._metadataset = None - self._repository_type = None - self._created_at = None - self._created_by = None - self._modified_at = None - self._modified_by = None - self._access = None - self._download_url = None - self._properties = None - self._mimetype = None - self._mediatype = None - self._size = None - self._preview = None - self._icon_url = None - self._collection = None - self._owner = None - self._original_id = None - self.discriminator = None - if remote is not None: - self.remote = remote - if content is not None: - self.content = content - if license is not None: - self.license = license - if is_directory is not None: - self.is_directory = is_directory - if comment_count is not None: - self.comment_count = comment_count - if rating is not None: - self.rating = rating - if access_original is not None: - self.access_original = access_original - self.ref = ref - if parent is not None: - self.parent = parent - if type is not None: - self.type = type - if aspects is not None: - self.aspects = aspects - self.name = name - if title is not None: - self.title = title - if metadataset is not None: - self.metadataset = metadataset - if repository_type is not None: - self.repository_type = repository_type - self.created_at = created_at - self.created_by = created_by - if modified_at is not None: - self.modified_at = modified_at - if modified_by is not None: - self.modified_by = modified_by - self.access = access - self.download_url = download_url - if properties is not None: - self.properties = properties - if mimetype is not None: - self.mimetype = mimetype - if mediatype is not None: - self.mediatype = mediatype - if size is not None: - self.size = size - if preview is not None: - self.preview = preview - if icon_url is not None: - self.icon_url = icon_url - self.collection = collection - self.owner = owner - if original_id is not None: - self.original_id = original_id - - @property - def remote(self): - """Gets the remote of this CollectionReference. # noqa: E501 - - - :return: The remote of this CollectionReference. # noqa: E501 - :rtype: Remote - """ - return self._remote - - @remote.setter - def remote(self, remote): - """Sets the remote of this CollectionReference. - - - :param remote: The remote of this CollectionReference. # noqa: E501 - :type: Remote - """ - - self._remote = remote - - @property - def content(self): - """Gets the content of this CollectionReference. # noqa: E501 - - - :return: The content of this CollectionReference. # noqa: E501 - :rtype: Content - """ - return self._content - - @content.setter - def content(self, content): - """Sets the content of this CollectionReference. - - - :param content: The content of this CollectionReference. # noqa: E501 - :type: Content - """ - - self._content = content - - @property - def license(self): - """Gets the license of this CollectionReference. # noqa: E501 - - - :return: The license of this CollectionReference. # noqa: E501 - :rtype: License - """ - return self._license - - @license.setter - def license(self, license): - """Sets the license of this CollectionReference. - - - :param license: The license of this CollectionReference. # noqa: E501 - :type: License - """ - - self._license = license - - @property - def is_directory(self): - """Gets the is_directory of this CollectionReference. # noqa: E501 - - - :return: The is_directory of this CollectionReference. # noqa: E501 - :rtype: bool - """ - return self._is_directory - - @is_directory.setter - def is_directory(self, is_directory): - """Sets the is_directory of this CollectionReference. - - - :param is_directory: The is_directory of this CollectionReference. # noqa: E501 - :type: bool - """ - - self._is_directory = is_directory - - @property - def comment_count(self): - """Gets the comment_count of this CollectionReference. # noqa: E501 - - - :return: The comment_count of this CollectionReference. # noqa: E501 - :rtype: int - """ - return self._comment_count - - @comment_count.setter - def comment_count(self, comment_count): - """Sets the comment_count of this CollectionReference. - - - :param comment_count: The comment_count of this CollectionReference. # noqa: E501 - :type: int - """ - - self._comment_count = comment_count - - @property - def rating(self): - """Gets the rating of this CollectionReference. # noqa: E501 - - - :return: The rating of this CollectionReference. # noqa: E501 - :rtype: AccumulatedRatings - """ - return self._rating - - @rating.setter - def rating(self, rating): - """Sets the rating of this CollectionReference. - - - :param rating: The rating of this CollectionReference. # noqa: E501 - :type: AccumulatedRatings - """ - - self._rating = rating - - @property - def access_original(self): - """Gets the access_original of this CollectionReference. # noqa: E501 - - - :return: The access_original of this CollectionReference. # noqa: E501 - :rtype: list[str] - """ - return self._access_original - - @access_original.setter - def access_original(self, access_original): - """Sets the access_original of this CollectionReference. - - - :param access_original: The access_original of this CollectionReference. # noqa: E501 - :type: list[str] - """ - - self._access_original = access_original - - @property - def ref(self): - """Gets the ref of this CollectionReference. # noqa: E501 - - - :return: The ref of this CollectionReference. # noqa: E501 - :rtype: NodeRef - """ - return self._ref - - @ref.setter - def ref(self, ref): - """Sets the ref of this CollectionReference. - - - :param ref: The ref of this CollectionReference. # noqa: E501 - :type: NodeRef - """ - if ref is None: - raise ValueError("Invalid value for `ref`, must not be `None`") # noqa: E501 - - self._ref = ref - - @property - def parent(self): - """Gets the parent of this CollectionReference. # noqa: E501 - - - :return: The parent of this CollectionReference. # noqa: E501 - :rtype: NodeRef - """ - return self._parent - - @parent.setter - def parent(self, parent): - """Sets the parent of this CollectionReference. - - - :param parent: The parent of this CollectionReference. # noqa: E501 - :type: NodeRef - """ - - self._parent = parent - - @property - def type(self): - """Gets the type of this CollectionReference. # noqa: E501 - - - :return: The type of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this CollectionReference. - - - :param type: The type of this CollectionReference. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def aspects(self): - """Gets the aspects of this CollectionReference. # noqa: E501 - - - :return: The aspects of this CollectionReference. # noqa: E501 - :rtype: list[str] - """ - return self._aspects - - @aspects.setter - def aspects(self, aspects): - """Sets the aspects of this CollectionReference. - - - :param aspects: The aspects of this CollectionReference. # noqa: E501 - :type: list[str] - """ - - self._aspects = aspects - - @property - def name(self): - """Gets the name of this CollectionReference. # noqa: E501 - - - :return: The name of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this CollectionReference. - - - :param name: The name of this CollectionReference. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def title(self): - """Gets the title of this CollectionReference. # noqa: E501 - - - :return: The title of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this CollectionReference. - - - :param title: The title of this CollectionReference. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def metadataset(self): - """Gets the metadataset of this CollectionReference. # noqa: E501 - - - :return: The metadataset of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._metadataset - - @metadataset.setter - def metadataset(self, metadataset): - """Sets the metadataset of this CollectionReference. - - - :param metadataset: The metadataset of this CollectionReference. # noqa: E501 - :type: str - """ - - self._metadataset = metadataset - - @property - def repository_type(self): - """Gets the repository_type of this CollectionReference. # noqa: E501 - - - :return: The repository_type of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._repository_type - - @repository_type.setter - def repository_type(self, repository_type): - """Sets the repository_type of this CollectionReference. - - - :param repository_type: The repository_type of this CollectionReference. # noqa: E501 - :type: str - """ - - self._repository_type = repository_type - - @property - def created_at(self): - """Gets the created_at of this CollectionReference. # noqa: E501 - - - :return: The created_at of this CollectionReference. # noqa: E501 - :rtype: datetime - """ - return self._created_at - - @created_at.setter - def created_at(self, created_at): - """Sets the created_at of this CollectionReference. - - - :param created_at: The created_at of this CollectionReference. # noqa: E501 - :type: datetime - """ - if created_at is None: - raise ValueError("Invalid value for `created_at`, must not be `None`") # noqa: E501 - - self._created_at = created_at - - @property - def created_by(self): - """Gets the created_by of this CollectionReference. # noqa: E501 - - - :return: The created_by of this CollectionReference. # noqa: E501 - :rtype: Person - """ - return self._created_by - - @created_by.setter - def created_by(self, created_by): - """Sets the created_by of this CollectionReference. - - - :param created_by: The created_by of this CollectionReference. # noqa: E501 - :type: Person - """ - if created_by is None: - raise ValueError("Invalid value for `created_by`, must not be `None`") # noqa: E501 - - self._created_by = created_by - - @property - def modified_at(self): - """Gets the modified_at of this CollectionReference. # noqa: E501 - - - :return: The modified_at of this CollectionReference. # noqa: E501 - :rtype: datetime - """ - return self._modified_at - - @modified_at.setter - def modified_at(self, modified_at): - """Sets the modified_at of this CollectionReference. - - - :param modified_at: The modified_at of this CollectionReference. # noqa: E501 - :type: datetime - """ - - self._modified_at = modified_at - - @property - def modified_by(self): - """Gets the modified_by of this CollectionReference. # noqa: E501 - - - :return: The modified_by of this CollectionReference. # noqa: E501 - :rtype: Person - """ - return self._modified_by - - @modified_by.setter - def modified_by(self, modified_by): - """Sets the modified_by of this CollectionReference. - - - :param modified_by: The modified_by of this CollectionReference. # noqa: E501 - :type: Person - """ - - self._modified_by = modified_by - - @property - def access(self): - """Gets the access of this CollectionReference. # noqa: E501 - - - :return: The access of this CollectionReference. # noqa: E501 - :rtype: list[str] - """ - return self._access - - @access.setter - def access(self, access): - """Sets the access of this CollectionReference. - - - :param access: The access of this CollectionReference. # noqa: E501 - :type: list[str] - """ - if access is None: - raise ValueError("Invalid value for `access`, must not be `None`") # noqa: E501 - - self._access = access - - @property - def download_url(self): - """Gets the download_url of this CollectionReference. # noqa: E501 - - - :return: The download_url of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._download_url - - @download_url.setter - def download_url(self, download_url): - """Sets the download_url of this CollectionReference. - - - :param download_url: The download_url of this CollectionReference. # noqa: E501 - :type: str - """ - if download_url is None: - raise ValueError("Invalid value for `download_url`, must not be `None`") # noqa: E501 - - self._download_url = download_url - - @property - def properties(self): - """Gets the properties of this CollectionReference. # noqa: E501 - - - :return: The properties of this CollectionReference. # noqa: E501 - :rtype: dict(str, list[str]) - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this CollectionReference. - - - :param properties: The properties of this CollectionReference. # noqa: E501 - :type: dict(str, list[str]) - """ - - self._properties = properties - - @property - def mimetype(self): - """Gets the mimetype of this CollectionReference. # noqa: E501 - - - :return: The mimetype of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._mimetype - - @mimetype.setter - def mimetype(self, mimetype): - """Sets the mimetype of this CollectionReference. - - - :param mimetype: The mimetype of this CollectionReference. # noqa: E501 - :type: str - """ - - self._mimetype = mimetype - - @property - def mediatype(self): - """Gets the mediatype of this CollectionReference. # noqa: E501 - - - :return: The mediatype of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._mediatype - - @mediatype.setter - def mediatype(self, mediatype): - """Sets the mediatype of this CollectionReference. - - - :param mediatype: The mediatype of this CollectionReference. # noqa: E501 - :type: str - """ - - self._mediatype = mediatype - - @property - def size(self): - """Gets the size of this CollectionReference. # noqa: E501 - - - :return: The size of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this CollectionReference. - - - :param size: The size of this CollectionReference. # noqa: E501 - :type: str - """ - - self._size = size - - @property - def preview(self): - """Gets the preview of this CollectionReference. # noqa: E501 - - - :return: The preview of this CollectionReference. # noqa: E501 - :rtype: Preview - """ - return self._preview - - @preview.setter - def preview(self, preview): - """Sets the preview of this CollectionReference. - - - :param preview: The preview of this CollectionReference. # noqa: E501 - :type: Preview - """ - - self._preview = preview - - @property - def icon_url(self): - """Gets the icon_url of this CollectionReference. # noqa: E501 - - - :return: The icon_url of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._icon_url - - @icon_url.setter - def icon_url(self, icon_url): - """Sets the icon_url of this CollectionReference. - - - :param icon_url: The icon_url of this CollectionReference. # noqa: E501 - :type: str - """ - - self._icon_url = icon_url - - @property - def collection(self): - """Gets the collection of this CollectionReference. # noqa: E501 - - - :return: The collection of this CollectionReference. # noqa: E501 - :rtype: Collection - """ - return self._collection - - @collection.setter - def collection(self, collection): - """Sets the collection of this CollectionReference. - - - :param collection: The collection of this CollectionReference. # noqa: E501 - :type: Collection - """ - if collection is None: - raise ValueError("Invalid value for `collection`, must not be `None`") # noqa: E501 - - self._collection = collection - - @property - def owner(self): - """Gets the owner of this CollectionReference. # noqa: E501 - - - :return: The owner of this CollectionReference. # noqa: E501 - :rtype: Person - """ - return self._owner - - @owner.setter - def owner(self, owner): - """Sets the owner of this CollectionReference. - - - :param owner: The owner of this CollectionReference. # noqa: E501 - :type: Person - """ - if owner is None: - raise ValueError("Invalid value for `owner`, must not be `None`") # noqa: E501 - - self._owner = owner - - @property - def original_id(self): - """Gets the original_id of this CollectionReference. # noqa: E501 - - - :return: The original_id of this CollectionReference. # noqa: E501 - :rtype: str - """ - return self._original_id - - @original_id.setter - def original_id(self, original_id): - """Sets the original_id of this CollectionReference. - - - :param original_id: The original_id of this CollectionReference. # noqa: E501 - :type: str - """ - - self._original_id = original_id - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionReference, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionReference): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collections.py b/edu_sharing_client/models/collections.py deleted file mode 100644 index 1c43753a..00000000 --- a/edu_sharing_client/models/collections.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Collections(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'colors': 'list[str]' - } - - attribute_map = { - 'colors': 'colors' - } - - def __init__(self, colors=None): # noqa: E501 - """Collections - a model defined in Swagger""" # noqa: E501 - self._colors = None - self.discriminator = None - if colors is not None: - self.colors = colors - - @property - def colors(self): - """Gets the colors of this Collections. # noqa: E501 - - - :return: The colors of this Collections. # noqa: E501 - :rtype: list[str] - """ - return self._colors - - @colors.setter - def colors(self, colors): - """Sets the colors of this Collections. - - - :param colors: The colors of this Collections. # noqa: E501 - :type: list[str] - """ - - self._colors = colors - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Collections, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Collections): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/collections_result.py b/edu_sharing_client/models/collections_result.py deleted file mode 100644 index 36e8ce55..00000000 --- a/edu_sharing_client/models/collections_result.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class CollectionsResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'count': 'int' - } - - attribute_map = { - 'count': 'count' - } - - def __init__(self, count=None): # noqa: E501 - """CollectionsResult - a model defined in Swagger""" # noqa: E501 - self._count = None - self.discriminator = None - if count is not None: - self.count = count - - @property - def count(self): - """Gets the count of this CollectionsResult. # noqa: E501 - - - :return: The count of this CollectionsResult. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this CollectionsResult. - - - :param count: The count of this CollectionsResult. # noqa: E501 - :type: int - """ - - self._count = count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(CollectionsResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CollectionsResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/column_v2.py b/edu_sharing_client/models/column_v2.py deleted file mode 100644 index 5dda163b..00000000 --- a/edu_sharing_client/models/column_v2.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ColumnV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'format': 'str', - 'show_default': 'bool' - } - - attribute_map = { - 'id': 'id', - 'format': 'format', - 'show_default': 'showDefault' - } - - def __init__(self, id=None, format=None, show_default=False): # noqa: E501 - """ColumnV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._format = None - self._show_default = None - self.discriminator = None - if id is not None: - self.id = id - if format is not None: - self.format = format - if show_default is not None: - self.show_default = show_default - - @property - def id(self): - """Gets the id of this ColumnV2. # noqa: E501 - - - :return: The id of this ColumnV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ColumnV2. - - - :param id: The id of this ColumnV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def format(self): - """Gets the format of this ColumnV2. # noqa: E501 - - - :return: The format of this ColumnV2. # noqa: E501 - :rtype: str - """ - return self._format - - @format.setter - def format(self, format): - """Sets the format of this ColumnV2. - - - :param format: The format of this ColumnV2. # noqa: E501 - :type: str - """ - - self._format = format - - @property - def show_default(self): - """Gets the show_default of this ColumnV2. # noqa: E501 - - - :return: The show_default of this ColumnV2. # noqa: E501 - :rtype: bool - """ - return self._show_default - - @show_default.setter - def show_default(self, show_default): - """Sets the show_default of this ColumnV2. - - - :param show_default: The show_default of this ColumnV2. # noqa: E501 - :type: bool - """ - - self._show_default = show_default - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ColumnV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ColumnV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/comment.py b/edu_sharing_client/models/comment.py deleted file mode 100644 index 65b6ebb0..00000000 --- a/edu_sharing_client/models/comment.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Comment(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'ref': 'NodeRef', - 'reply_to': 'NodeRef', - 'creator': 'UserSimple', - 'created': 'int', - 'comment': 'str' - } - - attribute_map = { - 'ref': 'ref', - 'reply_to': 'replyTo', - 'creator': 'creator', - 'created': 'created', - 'comment': 'comment' - } - - def __init__(self, ref=None, reply_to=None, creator=None, created=None, comment=None): # noqa: E501 - """Comment - a model defined in Swagger""" # noqa: E501 - self._ref = None - self._reply_to = None - self._creator = None - self._created = None - self._comment = None - self.discriminator = None - if ref is not None: - self.ref = ref - if reply_to is not None: - self.reply_to = reply_to - if creator is not None: - self.creator = creator - if created is not None: - self.created = created - if comment is not None: - self.comment = comment - - @property - def ref(self): - """Gets the ref of this Comment. # noqa: E501 - - - :return: The ref of this Comment. # noqa: E501 - :rtype: NodeRef - """ - return self._ref - - @ref.setter - def ref(self, ref): - """Sets the ref of this Comment. - - - :param ref: The ref of this Comment. # noqa: E501 - :type: NodeRef - """ - - self._ref = ref - - @property - def reply_to(self): - """Gets the reply_to of this Comment. # noqa: E501 - - - :return: The reply_to of this Comment. # noqa: E501 - :rtype: NodeRef - """ - return self._reply_to - - @reply_to.setter - def reply_to(self, reply_to): - """Sets the reply_to of this Comment. - - - :param reply_to: The reply_to of this Comment. # noqa: E501 - :type: NodeRef - """ - - self._reply_to = reply_to - - @property - def creator(self): - """Gets the creator of this Comment. # noqa: E501 - - - :return: The creator of this Comment. # noqa: E501 - :rtype: UserSimple - """ - return self._creator - - @creator.setter - def creator(self, creator): - """Sets the creator of this Comment. - - - :param creator: The creator of this Comment. # noqa: E501 - :type: UserSimple - """ - - self._creator = creator - - @property - def created(self): - """Gets the created of this Comment. # noqa: E501 - - - :return: The created of this Comment. # noqa: E501 - :rtype: int - """ - return self._created - - @created.setter - def created(self, created): - """Sets the created of this Comment. - - - :param created: The created of this Comment. # noqa: E501 - :type: int - """ - - self._created = created - - @property - def comment(self): - """Gets the comment of this Comment. # noqa: E501 - - - :return: The comment of this Comment. # noqa: E501 - :rtype: str - """ - return self._comment - - @comment.setter - def comment(self, comment): - """Sets the comment of this Comment. - - - :param comment: The comment of this Comment. # noqa: E501 - :type: str - """ - - self._comment = comment - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Comment, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Comment): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/comments.py b/edu_sharing_client/models/comments.py deleted file mode 100644 index 6a1a4f55..00000000 --- a/edu_sharing_client/models/comments.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Comments(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'comments': 'list[Comment]' - } - - attribute_map = { - 'comments': 'comments' - } - - def __init__(self, comments=None): # noqa: E501 - """Comments - a model defined in Swagger""" # noqa: E501 - self._comments = None - self.discriminator = None - if comments is not None: - self.comments = comments - - @property - def comments(self): - """Gets the comments of this Comments. # noqa: E501 - - - :return: The comments of this Comments. # noqa: E501 - :rtype: list[Comment] - """ - return self._comments - - @comments.setter - def comments(self, comments): - """Sets the comments of this Comments. - - - :param comments: The comments of this Comments. # noqa: E501 - :type: list[Comment] - """ - - self._comments = comments - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Comments, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Comments): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/condition.py b/edu_sharing_client/models/condition.py deleted file mode 100644 index a5a4996c..00000000 --- a/edu_sharing_client/models/condition.py +++ /dev/null @@ -1,169 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Condition(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'type': 'str', - 'negate': 'bool', - 'value': 'str' - } - - attribute_map = { - 'type': 'type', - 'negate': 'negate', - 'value': 'value' - } - - def __init__(self, type=None, negate=False, value=None): # noqa: E501 - """Condition - a model defined in Swagger""" # noqa: E501 - self._type = None - self._negate = None - self._value = None - self.discriminator = None - if type is not None: - self.type = type - if negate is not None: - self.negate = negate - if value is not None: - self.value = value - - @property - def type(self): - """Gets the type of this Condition. # noqa: E501 - - - :return: The type of this Condition. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Condition. - - - :param type: The type of this Condition. # noqa: E501 - :type: str - """ - allowed_values = ["TOOLPERMISSION"] # noqa: E501 - if type not in allowed_values: - raise ValueError( - "Invalid value for `type` ({0}), must be one of {1}" # noqa: E501 - .format(type, allowed_values) - ) - - self._type = type - - @property - def negate(self): - """Gets the negate of this Condition. # noqa: E501 - - - :return: The negate of this Condition. # noqa: E501 - :rtype: bool - """ - return self._negate - - @negate.setter - def negate(self, negate): - """Sets the negate of this Condition. - - - :param negate: The negate of this Condition. # noqa: E501 - :type: bool - """ - - self._negate = negate - - @property - def value(self): - """Gets the value of this Condition. # noqa: E501 - - - :return: The value of this Condition. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this Condition. - - - :param value: The value of this Condition. # noqa: E501 - :type: str - """ - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Condition, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Condition): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/config.py b/edu_sharing_client/models/config.py deleted file mode 100644 index 534d516a..00000000 --- a/edu_sharing_client/models/config.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Config(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'current': 'Values', - '_global': 'Values', - 'language': 'Language' - } - - attribute_map = { - 'current': 'current', - '_global': 'global', - 'language': 'language' - } - - def __init__(self, current=None, _global=None, language=None): # noqa: E501 - """Config - a model defined in Swagger""" # noqa: E501 - self._current = None - self.__global = None - self._language = None - self.discriminator = None - if current is not None: - self.current = current - if _global is not None: - self._global = _global - if language is not None: - self.language = language - - @property - def current(self): - """Gets the current of this Config. # noqa: E501 - - - :return: The current of this Config. # noqa: E501 - :rtype: Values - """ - return self._current - - @current.setter - def current(self, current): - """Sets the current of this Config. - - - :param current: The current of this Config. # noqa: E501 - :type: Values - """ - - self._current = current - - @property - def _global(self): - """Gets the _global of this Config. # noqa: E501 - - - :return: The _global of this Config. # noqa: E501 - :rtype: Values - """ - return self.__global - - @_global.setter - def _global(self, _global): - """Sets the _global of this Config. - - - :param _global: The _global of this Config. # noqa: E501 - :type: Values - """ - - self.__global = _global - - @property - def language(self): - """Gets the language of this Config. # noqa: E501 - - - :return: The language of this Config. # noqa: E501 - :rtype: Language - """ - return self._language - - @language.setter - def language(self, language): - """Sets the language of this Config. - - - :param language: The language of this Config. # noqa: E501 - :type: Language - """ - - self._language = language - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Config, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Config): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/connector.py b/edu_sharing_client/models/connector.py deleted file mode 100644 index 7ff387f4..00000000 --- a/edu_sharing_client/models/connector.py +++ /dev/null @@ -1,270 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Connector(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'icon': 'str', - 'show_new': 'bool', - 'parameters': 'list[str]', - 'filetypes': 'list[ConnectorFileType]', - 'only_desktop': 'bool', - 'has_view_mode': 'bool' - } - - attribute_map = { - 'id': 'id', - 'icon': 'icon', - 'show_new': 'showNew', - 'parameters': 'parameters', - 'filetypes': 'filetypes', - 'only_desktop': 'onlyDesktop', - 'has_view_mode': 'hasViewMode' - } - - def __init__(self, id=None, icon=None, show_new=False, parameters=None, filetypes=None, only_desktop=False, has_view_mode=False): # noqa: E501 - """Connector - a model defined in Swagger""" # noqa: E501 - self._id = None - self._icon = None - self._show_new = None - self._parameters = None - self._filetypes = None - self._only_desktop = None - self._has_view_mode = None - self.discriminator = None - if id is not None: - self.id = id - if icon is not None: - self.icon = icon - self.show_new = show_new - if parameters is not None: - self.parameters = parameters - if filetypes is not None: - self.filetypes = filetypes - if only_desktop is not None: - self.only_desktop = only_desktop - if has_view_mode is not None: - self.has_view_mode = has_view_mode - - @property - def id(self): - """Gets the id of this Connector. # noqa: E501 - - - :return: The id of this Connector. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Connector. - - - :param id: The id of this Connector. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def icon(self): - """Gets the icon of this Connector. # noqa: E501 - - - :return: The icon of this Connector. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this Connector. - - - :param icon: The icon of this Connector. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def show_new(self): - """Gets the show_new of this Connector. # noqa: E501 - - false # noqa: E501 - - :return: The show_new of this Connector. # noqa: E501 - :rtype: bool - """ - return self._show_new - - @show_new.setter - def show_new(self, show_new): - """Sets the show_new of this Connector. - - false # noqa: E501 - - :param show_new: The show_new of this Connector. # noqa: E501 - :type: bool - """ - if show_new is None: - raise ValueError("Invalid value for `show_new`, must not be `None`") # noqa: E501 - - self._show_new = show_new - - @property - def parameters(self): - """Gets the parameters of this Connector. # noqa: E501 - - - :return: The parameters of this Connector. # noqa: E501 - :rtype: list[str] - """ - return self._parameters - - @parameters.setter - def parameters(self, parameters): - """Sets the parameters of this Connector. - - - :param parameters: The parameters of this Connector. # noqa: E501 - :type: list[str] - """ - - self._parameters = parameters - - @property - def filetypes(self): - """Gets the filetypes of this Connector. # noqa: E501 - - - :return: The filetypes of this Connector. # noqa: E501 - :rtype: list[ConnectorFileType] - """ - return self._filetypes - - @filetypes.setter - def filetypes(self, filetypes): - """Sets the filetypes of this Connector. - - - :param filetypes: The filetypes of this Connector. # noqa: E501 - :type: list[ConnectorFileType] - """ - - self._filetypes = filetypes - - @property - def only_desktop(self): - """Gets the only_desktop of this Connector. # noqa: E501 - - - :return: The only_desktop of this Connector. # noqa: E501 - :rtype: bool - """ - return self._only_desktop - - @only_desktop.setter - def only_desktop(self, only_desktop): - """Sets the only_desktop of this Connector. - - - :param only_desktop: The only_desktop of this Connector. # noqa: E501 - :type: bool - """ - - self._only_desktop = only_desktop - - @property - def has_view_mode(self): - """Gets the has_view_mode of this Connector. # noqa: E501 - - - :return: The has_view_mode of this Connector. # noqa: E501 - :rtype: bool - """ - return self._has_view_mode - - @has_view_mode.setter - def has_view_mode(self, has_view_mode): - """Sets the has_view_mode of this Connector. - - - :param has_view_mode: The has_view_mode of this Connector. # noqa: E501 - :type: bool - """ - - self._has_view_mode = has_view_mode - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Connector, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Connector): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/connector_file_type.py b/edu_sharing_client/models/connector_file_type.py deleted file mode 100644 index 5b2c8bef..00000000 --- a/edu_sharing_client/models/connector_file_type.py +++ /dev/null @@ -1,293 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ConnectorFileType(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'ccressourceversion': 'str', - 'ccressourcetype': 'str', - 'ccresourcesubtype': 'str', - 'editor_type': 'str', - 'mimetype': 'str', - 'filetype': 'str', - 'creatable': 'bool', - 'editable': 'bool' - } - - attribute_map = { - 'ccressourceversion': 'ccressourceversion', - 'ccressourcetype': 'ccressourcetype', - 'ccresourcesubtype': 'ccresourcesubtype', - 'editor_type': 'editorType', - 'mimetype': 'mimetype', - 'filetype': 'filetype', - 'creatable': 'creatable', - 'editable': 'editable' - } - - def __init__(self, ccressourceversion=None, ccressourcetype=None, ccresourcesubtype=None, editor_type=None, mimetype=None, filetype=None, creatable=False, editable=False): # noqa: E501 - """ConnectorFileType - a model defined in Swagger""" # noqa: E501 - self._ccressourceversion = None - self._ccressourcetype = None - self._ccresourcesubtype = None - self._editor_type = None - self._mimetype = None - self._filetype = None - self._creatable = None - self._editable = None - self.discriminator = None - if ccressourceversion is not None: - self.ccressourceversion = ccressourceversion - if ccressourcetype is not None: - self.ccressourcetype = ccressourcetype - if ccresourcesubtype is not None: - self.ccresourcesubtype = ccresourcesubtype - if editor_type is not None: - self.editor_type = editor_type - if mimetype is not None: - self.mimetype = mimetype - if filetype is not None: - self.filetype = filetype - if creatable is not None: - self.creatable = creatable - if editable is not None: - self.editable = editable - - @property - def ccressourceversion(self): - """Gets the ccressourceversion of this ConnectorFileType. # noqa: E501 - - - :return: The ccressourceversion of this ConnectorFileType. # noqa: E501 - :rtype: str - """ - return self._ccressourceversion - - @ccressourceversion.setter - def ccressourceversion(self, ccressourceversion): - """Sets the ccressourceversion of this ConnectorFileType. - - - :param ccressourceversion: The ccressourceversion of this ConnectorFileType. # noqa: E501 - :type: str - """ - - self._ccressourceversion = ccressourceversion - - @property - def ccressourcetype(self): - """Gets the ccressourcetype of this ConnectorFileType. # noqa: E501 - - - :return: The ccressourcetype of this ConnectorFileType. # noqa: E501 - :rtype: str - """ - return self._ccressourcetype - - @ccressourcetype.setter - def ccressourcetype(self, ccressourcetype): - """Sets the ccressourcetype of this ConnectorFileType. - - - :param ccressourcetype: The ccressourcetype of this ConnectorFileType. # noqa: E501 - :type: str - """ - - self._ccressourcetype = ccressourcetype - - @property - def ccresourcesubtype(self): - """Gets the ccresourcesubtype of this ConnectorFileType. # noqa: E501 - - - :return: The ccresourcesubtype of this ConnectorFileType. # noqa: E501 - :rtype: str - """ - return self._ccresourcesubtype - - @ccresourcesubtype.setter - def ccresourcesubtype(self, ccresourcesubtype): - """Sets the ccresourcesubtype of this ConnectorFileType. - - - :param ccresourcesubtype: The ccresourcesubtype of this ConnectorFileType. # noqa: E501 - :type: str - """ - - self._ccresourcesubtype = ccresourcesubtype - - @property - def editor_type(self): - """Gets the editor_type of this ConnectorFileType. # noqa: E501 - - - :return: The editor_type of this ConnectorFileType. # noqa: E501 - :rtype: str - """ - return self._editor_type - - @editor_type.setter - def editor_type(self, editor_type): - """Sets the editor_type of this ConnectorFileType. - - - :param editor_type: The editor_type of this ConnectorFileType. # noqa: E501 - :type: str - """ - - self._editor_type = editor_type - - @property - def mimetype(self): - """Gets the mimetype of this ConnectorFileType. # noqa: E501 - - - :return: The mimetype of this ConnectorFileType. # noqa: E501 - :rtype: str - """ - return self._mimetype - - @mimetype.setter - def mimetype(self, mimetype): - """Sets the mimetype of this ConnectorFileType. - - - :param mimetype: The mimetype of this ConnectorFileType. # noqa: E501 - :type: str - """ - - self._mimetype = mimetype - - @property - def filetype(self): - """Gets the filetype of this ConnectorFileType. # noqa: E501 - - - :return: The filetype of this ConnectorFileType. # noqa: E501 - :rtype: str - """ - return self._filetype - - @filetype.setter - def filetype(self, filetype): - """Sets the filetype of this ConnectorFileType. - - - :param filetype: The filetype of this ConnectorFileType. # noqa: E501 - :type: str - """ - - self._filetype = filetype - - @property - def creatable(self): - """Gets the creatable of this ConnectorFileType. # noqa: E501 - - - :return: The creatable of this ConnectorFileType. # noqa: E501 - :rtype: bool - """ - return self._creatable - - @creatable.setter - def creatable(self, creatable): - """Sets the creatable of this ConnectorFileType. - - - :param creatable: The creatable of this ConnectorFileType. # noqa: E501 - :type: bool - """ - - self._creatable = creatable - - @property - def editable(self): - """Gets the editable of this ConnectorFileType. # noqa: E501 - - - :return: The editable of this ConnectorFileType. # noqa: E501 - :rtype: bool - """ - return self._editable - - @editable.setter - def editable(self, editable): - """Sets the editable of this ConnectorFileType. - - - :param editable: The editable of this ConnectorFileType. # noqa: E501 - :type: bool - """ - - self._editable = editable - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ConnectorFileType, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ConnectorFileType): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/connector_list.py b/edu_sharing_client/models/connector_list.py deleted file mode 100644 index 8d081c0f..00000000 --- a/edu_sharing_client/models/connector_list.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ConnectorList(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str', - 'connectors': 'list[Connector]' - } - - attribute_map = { - 'url': 'url', - 'connectors': 'connectors' - } - - def __init__(self, url=None, connectors=None): # noqa: E501 - """ConnectorList - a model defined in Swagger""" # noqa: E501 - self._url = None - self._connectors = None - self.discriminator = None - if url is not None: - self.url = url - if connectors is not None: - self.connectors = connectors - - @property - def url(self): - """Gets the url of this ConnectorList. # noqa: E501 - - - :return: The url of this ConnectorList. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this ConnectorList. - - - :param url: The url of this ConnectorList. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def connectors(self): - """Gets the connectors of this ConnectorList. # noqa: E501 - - - :return: The connectors of this ConnectorList. # noqa: E501 - :rtype: list[Connector] - """ - return self._connectors - - @connectors.setter - def connectors(self, connectors): - """Sets the connectors of this ConnectorList. - - - :param connectors: The connectors of this ConnectorList. # noqa: E501 - :type: list[Connector] - """ - - self._connectors = connectors - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ConnectorList, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ConnectorList): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/content.py b/edu_sharing_client/models/content.py deleted file mode 100644 index 82c37462..00000000 --- a/edu_sharing_client/models/content.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Content(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str', - 'hash': 'str', - 'version': 'str' - } - - attribute_map = { - 'url': 'url', - 'hash': 'hash', - 'version': 'version' - } - - def __init__(self, url=None, hash=None, version=None): # noqa: E501 - """Content - a model defined in Swagger""" # noqa: E501 - self._url = None - self._hash = None - self._version = None - self.discriminator = None - if url is not None: - self.url = url - if hash is not None: - self.hash = hash - if version is not None: - self.version = version - - @property - def url(self): - """Gets the url of this Content. # noqa: E501 - - - :return: The url of this Content. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Content. - - - :param url: The url of this Content. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def hash(self): - """Gets the hash of this Content. # noqa: E501 - - - :return: The hash of this Content. # noqa: E501 - :rtype: str - """ - return self._hash - - @hash.setter - def hash(self, hash): - """Sets the hash of this Content. - - - :param hash: The hash of this Content. # noqa: E501 - :type: str - """ - - self._hash = hash - - @property - def version(self): - """Gets the version of this Content. # noqa: E501 - - - :return: The version of this Content. # noqa: E501 - :rtype: str - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this Content. - - - :param version: The version of this Content. # noqa: E501 - :type: str - """ - - self._version = version - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Content, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Content): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/context_menu_entry.py b/edu_sharing_client/models/context_menu_entry.py deleted file mode 100644 index be474b11..00000000 --- a/edu_sharing_client/models/context_menu_entry.py +++ /dev/null @@ -1,475 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ContextMenuEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'position': 'int', - 'icon': 'str', - 'name': 'str', - 'url': 'str', - 'is_disabled': 'bool', - 'is_seperate': 'bool', - 'is_seperate_bottom': 'bool', - 'only_desktop': 'bool', - 'mode': 'str', - 'ajax': 'bool', - 'permission': 'str', - 'toolpermission': 'str', - 'is_directory': 'bool', - 'multiple': 'bool', - 'remove': 'bool' - } - - attribute_map = { - 'position': 'position', - 'icon': 'icon', - 'name': 'name', - 'url': 'url', - 'is_disabled': 'isDisabled', - 'is_seperate': 'isSeperate', - 'is_seperate_bottom': 'isSeperateBottom', - 'only_desktop': 'onlyDesktop', - 'mode': 'mode', - 'ajax': 'ajax', - 'permission': 'permission', - 'toolpermission': 'toolpermission', - 'is_directory': 'isDirectory', - 'multiple': 'multiple', - 'remove': 'remove' - } - - def __init__(self, position=None, icon=None, name=None, url=None, is_disabled=False, is_seperate=False, is_seperate_bottom=False, only_desktop=False, mode=None, ajax=False, permission=None, toolpermission=None, is_directory=False, multiple=False, remove=False): # noqa: E501 - """ContextMenuEntry - a model defined in Swagger""" # noqa: E501 - self._position = None - self._icon = None - self._name = None - self._url = None - self._is_disabled = None - self._is_seperate = None - self._is_seperate_bottom = None - self._only_desktop = None - self._mode = None - self._ajax = None - self._permission = None - self._toolpermission = None - self._is_directory = None - self._multiple = None - self._remove = None - self.discriminator = None - if position is not None: - self.position = position - if icon is not None: - self.icon = icon - if name is not None: - self.name = name - if url is not None: - self.url = url - if is_disabled is not None: - self.is_disabled = is_disabled - if is_seperate is not None: - self.is_seperate = is_seperate - if is_seperate_bottom is not None: - self.is_seperate_bottom = is_seperate_bottom - if only_desktop is not None: - self.only_desktop = only_desktop - if mode is not None: - self.mode = mode - if ajax is not None: - self.ajax = ajax - if permission is not None: - self.permission = permission - if toolpermission is not None: - self.toolpermission = toolpermission - if is_directory is not None: - self.is_directory = is_directory - if multiple is not None: - self.multiple = multiple - if remove is not None: - self.remove = remove - - @property - def position(self): - """Gets the position of this ContextMenuEntry. # noqa: E501 - - - :return: The position of this ContextMenuEntry. # noqa: E501 - :rtype: int - """ - return self._position - - @position.setter - def position(self, position): - """Sets the position of this ContextMenuEntry. - - - :param position: The position of this ContextMenuEntry. # noqa: E501 - :type: int - """ - - self._position = position - - @property - def icon(self): - """Gets the icon of this ContextMenuEntry. # noqa: E501 - - - :return: The icon of this ContextMenuEntry. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this ContextMenuEntry. - - - :param icon: The icon of this ContextMenuEntry. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def name(self): - """Gets the name of this ContextMenuEntry. # noqa: E501 - - - :return: The name of this ContextMenuEntry. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this ContextMenuEntry. - - - :param name: The name of this ContextMenuEntry. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def url(self): - """Gets the url of this ContextMenuEntry. # noqa: E501 - - - :return: The url of this ContextMenuEntry. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this ContextMenuEntry. - - - :param url: The url of this ContextMenuEntry. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def is_disabled(self): - """Gets the is_disabled of this ContextMenuEntry. # noqa: E501 - - - :return: The is_disabled of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_disabled - - @is_disabled.setter - def is_disabled(self, is_disabled): - """Sets the is_disabled of this ContextMenuEntry. - - - :param is_disabled: The is_disabled of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._is_disabled = is_disabled - - @property - def is_seperate(self): - """Gets the is_seperate of this ContextMenuEntry. # noqa: E501 - - - :return: The is_seperate of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_seperate - - @is_seperate.setter - def is_seperate(self, is_seperate): - """Sets the is_seperate of this ContextMenuEntry. - - - :param is_seperate: The is_seperate of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._is_seperate = is_seperate - - @property - def is_seperate_bottom(self): - """Gets the is_seperate_bottom of this ContextMenuEntry. # noqa: E501 - - - :return: The is_seperate_bottom of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_seperate_bottom - - @is_seperate_bottom.setter - def is_seperate_bottom(self, is_seperate_bottom): - """Sets the is_seperate_bottom of this ContextMenuEntry. - - - :param is_seperate_bottom: The is_seperate_bottom of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._is_seperate_bottom = is_seperate_bottom - - @property - def only_desktop(self): - """Gets the only_desktop of this ContextMenuEntry. # noqa: E501 - - - :return: The only_desktop of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._only_desktop - - @only_desktop.setter - def only_desktop(self, only_desktop): - """Sets the only_desktop of this ContextMenuEntry. - - - :param only_desktop: The only_desktop of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._only_desktop = only_desktop - - @property - def mode(self): - """Gets the mode of this ContextMenuEntry. # noqa: E501 - - - :return: The mode of this ContextMenuEntry. # noqa: E501 - :rtype: str - """ - return self._mode - - @mode.setter - def mode(self, mode): - """Sets the mode of this ContextMenuEntry. - - - :param mode: The mode of this ContextMenuEntry. # noqa: E501 - :type: str - """ - - self._mode = mode - - @property - def ajax(self): - """Gets the ajax of this ContextMenuEntry. # noqa: E501 - - - :return: The ajax of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._ajax - - @ajax.setter - def ajax(self, ajax): - """Sets the ajax of this ContextMenuEntry. - - - :param ajax: The ajax of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._ajax = ajax - - @property - def permission(self): - """Gets the permission of this ContextMenuEntry. # noqa: E501 - - - :return: The permission of this ContextMenuEntry. # noqa: E501 - :rtype: str - """ - return self._permission - - @permission.setter - def permission(self, permission): - """Sets the permission of this ContextMenuEntry. - - - :param permission: The permission of this ContextMenuEntry. # noqa: E501 - :type: str - """ - - self._permission = permission - - @property - def toolpermission(self): - """Gets the toolpermission of this ContextMenuEntry. # noqa: E501 - - - :return: The toolpermission of this ContextMenuEntry. # noqa: E501 - :rtype: str - """ - return self._toolpermission - - @toolpermission.setter - def toolpermission(self, toolpermission): - """Sets the toolpermission of this ContextMenuEntry. - - - :param toolpermission: The toolpermission of this ContextMenuEntry. # noqa: E501 - :type: str - """ - - self._toolpermission = toolpermission - - @property - def is_directory(self): - """Gets the is_directory of this ContextMenuEntry. # noqa: E501 - - - :return: The is_directory of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_directory - - @is_directory.setter - def is_directory(self, is_directory): - """Sets the is_directory of this ContextMenuEntry. - - - :param is_directory: The is_directory of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._is_directory = is_directory - - @property - def multiple(self): - """Gets the multiple of this ContextMenuEntry. # noqa: E501 - - - :return: The multiple of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._multiple - - @multiple.setter - def multiple(self, multiple): - """Sets the multiple of this ContextMenuEntry. - - - :param multiple: The multiple of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._multiple = multiple - - @property - def remove(self): - """Gets the remove of this ContextMenuEntry. # noqa: E501 - - - :return: The remove of this ContextMenuEntry. # noqa: E501 - :rtype: bool - """ - return self._remove - - @remove.setter - def remove(self, remove): - """Sets the remove of this ContextMenuEntry. - - - :param remove: The remove of this ContextMenuEntry. # noqa: E501 - :type: bool - """ - - self._remove = remove - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ContextMenuEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ContextMenuEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/counts.py b/edu_sharing_client/models/counts.py deleted file mode 100644 index c7d33198..00000000 --- a/edu_sharing_client/models/counts.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Counts(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'elements': 'list[Element]' - } - - attribute_map = { - 'elements': 'elements' - } - - def __init__(self, elements=None): # noqa: E501 - """Counts - a model defined in Swagger""" # noqa: E501 - self._elements = None - self.discriminator = None - if elements is not None: - self.elements = elements - - @property - def elements(self): - """Gets the elements of this Counts. # noqa: E501 - - - :return: The elements of this Counts. # noqa: E501 - :rtype: list[Element] - """ - return self._elements - - @elements.setter - def elements(self, elements): - """Sets the elements of this Counts. - - - :param elements: The elements of this Counts. # noqa: E501 - :type: list[Element] - """ - - self._elements = elements - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Counts, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Counts): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/create.py b/edu_sharing_client/models/create.py deleted file mode 100644 index 0f46d5a4..00000000 --- a/edu_sharing_client/models/create.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Create(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'only_metadata': 'bool' - } - - attribute_map = { - 'only_metadata': 'onlyMetadata' - } - - def __init__(self, only_metadata=False): # noqa: E501 - """Create - a model defined in Swagger""" # noqa: E501 - self._only_metadata = None - self.discriminator = None - if only_metadata is not None: - self.only_metadata = only_metadata - - @property - def only_metadata(self): - """Gets the only_metadata of this Create. # noqa: E501 - - - :return: The only_metadata of this Create. # noqa: E501 - :rtype: bool - """ - return self._only_metadata - - @only_metadata.setter - def only_metadata(self, only_metadata): - """Sets the only_metadata of this Create. - - - :param only_metadata: The only_metadata of this Create. # noqa: E501 - :type: bool - """ - - self._only_metadata = only_metadata - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Create, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Create): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/delete_option.py b/edu_sharing_client/models/delete_option.py deleted file mode 100644 index 05577610..00000000 --- a/edu_sharing_client/models/delete_option.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class DeleteOption(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'delete': 'bool' - } - - attribute_map = { - 'delete': 'delete' - } - - def __init__(self, delete=False): # noqa: E501 - """DeleteOption - a model defined in Swagger""" # noqa: E501 - self._delete = None - self.discriminator = None - if delete is not None: - self.delete = delete - - @property - def delete(self): - """Gets the delete of this DeleteOption. # noqa: E501 - - - :return: The delete of this DeleteOption. # noqa: E501 - :rtype: bool - """ - return self._delete - - @delete.setter - def delete(self, delete): - """Sets the delete of this DeleteOption. - - - :param delete: The delete of this DeleteOption. # noqa: E501 - :type: bool - """ - - self._delete = delete - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(DeleteOption, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, DeleteOption): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/dynamic_config.py b/edu_sharing_client/models/dynamic_config.py deleted file mode 100644 index 725bb80e..00000000 --- a/edu_sharing_client/models/dynamic_config.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class DynamicConfig(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'node_id': 'str', - 'value': 'str' - } - - attribute_map = { - 'node_id': 'nodeId', - 'value': 'value' - } - - def __init__(self, node_id=None, value=None): # noqa: E501 - """DynamicConfig - a model defined in Swagger""" # noqa: E501 - self._node_id = None - self._value = None - self.discriminator = None - if node_id is not None: - self.node_id = node_id - if value is not None: - self.value = value - - @property - def node_id(self): - """Gets the node_id of this DynamicConfig. # noqa: E501 - - - :return: The node_id of this DynamicConfig. # noqa: E501 - :rtype: str - """ - return self._node_id - - @node_id.setter - def node_id(self, node_id): - """Sets the node_id of this DynamicConfig. - - - :param node_id: The node_id of this DynamicConfig. # noqa: E501 - :type: str - """ - - self._node_id = node_id - - @property - def value(self): - """Gets the value of this DynamicConfig. # noqa: E501 - - - :return: The value of this DynamicConfig. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this DynamicConfig. - - - :param value: The value of this DynamicConfig. # noqa: E501 - :type: str - """ - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(DynamicConfig, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, DynamicConfig): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/element.py b/edu_sharing_client/models/element.py deleted file mode 100644 index f66dc36c..00000000 --- a/edu_sharing_client/models/element.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Element(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'name': 'str', - 'type': 'str' - } - - attribute_map = { - 'id': 'id', - 'name': 'name', - 'type': 'type' - } - - def __init__(self, id=None, name=None, type=None): # noqa: E501 - """Element - a model defined in Swagger""" # noqa: E501 - self._id = None - self._name = None - self._type = None - self.discriminator = None - if id is not None: - self.id = id - if name is not None: - self.name = name - if type is not None: - self.type = type - - @property - def id(self): - """Gets the id of this Element. # noqa: E501 - - - :return: The id of this Element. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Element. - - - :param id: The id of this Element. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def name(self): - """Gets the name of this Element. # noqa: E501 - - - :return: The name of this Element. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Element. - - - :param name: The name of this Element. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def type(self): - """Gets the type of this Element. # noqa: E501 - - - :return: The type of this Element. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Element. - - - :param type: The type of this Element. # noqa: E501 - :type: str - """ - - self._type = type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Element, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Element): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/error_response.py b/edu_sharing_client/models/error_response.py deleted file mode 100644 index c18e8b3e..00000000 --- a/edu_sharing_client/models/error_response.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ErrorResponse(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'error': 'str', - 'message': 'str', - 'log_level': 'str', - 'stacktrace_array': 'list[str]' - } - - attribute_map = { - 'error': 'error', - 'message': 'message', - 'log_level': 'logLevel', - 'stacktrace_array': 'stacktraceArray' - } - - def __init__(self, error=None, message=None, log_level=None, stacktrace_array=None): # noqa: E501 - """ErrorResponse - a model defined in Swagger""" # noqa: E501 - self._error = None - self._message = None - self._log_level = None - self._stacktrace_array = None - self.discriminator = None - self.error = error - self.message = message - if log_level is not None: - self.log_level = log_level - self.stacktrace_array = stacktrace_array - - @property - def error(self): - """Gets the error of this ErrorResponse. # noqa: E501 - - - :return: The error of this ErrorResponse. # noqa: E501 - :rtype: str - """ - return self._error - - @error.setter - def error(self, error): - """Sets the error of this ErrorResponse. - - - :param error: The error of this ErrorResponse. # noqa: E501 - :type: str - """ - if error is None: - raise ValueError("Invalid value for `error`, must not be `None`") # noqa: E501 - - self._error = error - - @property - def message(self): - """Gets the message of this ErrorResponse. # noqa: E501 - - - :return: The message of this ErrorResponse. # noqa: E501 - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this ErrorResponse. - - - :param message: The message of this ErrorResponse. # noqa: E501 - :type: str - """ - if message is None: - raise ValueError("Invalid value for `message`, must not be `None`") # noqa: E501 - - self._message = message - - @property - def log_level(self): - """Gets the log_level of this ErrorResponse. # noqa: E501 - - - :return: The log_level of this ErrorResponse. # noqa: E501 - :rtype: str - """ - return self._log_level - - @log_level.setter - def log_level(self, log_level): - """Sets the log_level of this ErrorResponse. - - - :param log_level: The log_level of this ErrorResponse. # noqa: E501 - :type: str - """ - - self._log_level = log_level - - @property - def stacktrace_array(self): - """Gets the stacktrace_array of this ErrorResponse. # noqa: E501 - - - :return: The stacktrace_array of this ErrorResponse. # noqa: E501 - :rtype: list[str] - """ - return self._stacktrace_array - - @stacktrace_array.setter - def stacktrace_array(self, stacktrace_array): - """Sets the stacktrace_array of this ErrorResponse. - - - :param stacktrace_array: The stacktrace_array of this ErrorResponse. # noqa: E501 - :type: list[str] - """ - if stacktrace_array is None: - raise ValueError("Invalid value for `stacktrace_array`, must not be `None`") # noqa: E501 - - self._stacktrace_array = stacktrace_array - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ErrorResponse, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ErrorResponse): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/excel_result.py b/edu_sharing_client/models/excel_result.py deleted file mode 100644 index 0dff80ed..00000000 --- a/edu_sharing_client/models/excel_result.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ExcelResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'rows': 'int' - } - - attribute_map = { - 'rows': 'rows' - } - - def __init__(self, rows=None): # noqa: E501 - """ExcelResult - a model defined in Swagger""" # noqa: E501 - self._rows = None - self.discriminator = None - if rows is not None: - self.rows = rows - - @property - def rows(self): - """Gets the rows of this ExcelResult. # noqa: E501 - - - :return: The rows of this ExcelResult. # noqa: E501 - :rtype: int - """ - return self._rows - - @rows.setter - def rows(self, rows): - """Sets the rows of this ExcelResult. - - - :param rows: The rows of this ExcelResult. # noqa: E501 - :type: int - """ - - self._rows = rows - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ExcelResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ExcelResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/facette.py b/edu_sharing_client/models/facette.py deleted file mode 100644 index b9905dfc..00000000 --- a/edu_sharing_client/models/facette.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Facette(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - '_property': 'str', - 'values': 'list[Value]' - } - - attribute_map = { - '_property': 'property', - 'values': 'values' - } - - def __init__(self, _property=None, values=None): # noqa: E501 - """Facette - a model defined in Swagger""" # noqa: E501 - self.__property = None - self._values = None - self.discriminator = None - self._property = _property - self.values = values - - @property - def _property(self): - """Gets the _property of this Facette. # noqa: E501 - - - :return: The _property of this Facette. # noqa: E501 - :rtype: str - """ - return self.__property - - @_property.setter - def _property(self, _property): - """Sets the _property of this Facette. - - - :param _property: The _property of this Facette. # noqa: E501 - :type: str - """ - if _property is None: - raise ValueError("Invalid value for `_property`, must not be `None`") # noqa: E501 - - self.__property = _property - - @property - def values(self): - """Gets the values of this Facette. # noqa: E501 - - - :return: The values of this Facette. # noqa: E501 - :rtype: list[Value] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this Facette. - - - :param values: The values of this Facette. # noqa: E501 - :type: list[Value] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Facette, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Facette): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/filter.py b/edu_sharing_client/models/filter.py deleted file mode 100644 index 3b377768..00000000 --- a/edu_sharing_client/models/filter.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Filter(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'entries': 'list[FilterEntry]' - } - - attribute_map = { - 'entries': 'entries' - } - - def __init__(self, entries=None): # noqa: E501 - """Filter - a model defined in Swagger""" # noqa: E501 - self._entries = None - self.discriminator = None - self.entries = entries - - @property - def entries(self): - """Gets the entries of this Filter. # noqa: E501 - - - :return: The entries of this Filter. # noqa: E501 - :rtype: list[FilterEntry] - """ - return self._entries - - @entries.setter - def entries(self, entries): - """Sets the entries of this Filter. - - - :param entries: The entries of this Filter. # noqa: E501 - :type: list[FilterEntry] - """ - if entries is None: - raise ValueError("Invalid value for `entries`, must not be `None`") # noqa: E501 - - self._entries = entries - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Filter, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Filter): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/filter_entry.py b/edu_sharing_client/models/filter_entry.py deleted file mode 100644 index fb184ff9..00000000 --- a/edu_sharing_client/models/filter_entry.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class FilterEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - '_property': 'str', - 'values': 'list[str]' - } - - attribute_map = { - '_property': 'property', - 'values': 'values' - } - - def __init__(self, _property=None, values=None): # noqa: E501 - """FilterEntry - a model defined in Swagger""" # noqa: E501 - self.__property = None - self._values = None - self.discriminator = None - self._property = _property - self.values = values - - @property - def _property(self): - """Gets the _property of this FilterEntry. # noqa: E501 - - - :return: The _property of this FilterEntry. # noqa: E501 - :rtype: str - """ - return self.__property - - @_property.setter - def _property(self, _property): - """Sets the _property of this FilterEntry. - - - :param _property: The _property of this FilterEntry. # noqa: E501 - :type: str - """ - if _property is None: - raise ValueError("Invalid value for `_property`, must not be `None`") # noqa: E501 - - self.__property = _property - - @property - def values(self): - """Gets the values of this FilterEntry. # noqa: E501 - - - :return: The values of this FilterEntry. # noqa: E501 - :rtype: list[str] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this FilterEntry. - - - :param values: The values of this FilterEntry. # noqa: E501 - :type: list[str] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(FilterEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, FilterEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/frontpage.py b/edu_sharing_client/models/frontpage.py deleted file mode 100644 index 88945140..00000000 --- a/edu_sharing_client/models/frontpage.py +++ /dev/null @@ -1,253 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Frontpage(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'total_count': 'int', - 'display_count': 'int', - 'mode': 'str', - 'timespan': 'str', - 'queries': 'list[Query]', - 'collection': 'str' - } - - attribute_map = { - 'total_count': 'totalCount', - 'display_count': 'displayCount', - 'mode': 'mode', - 'timespan': 'timespan', - 'queries': 'queries', - 'collection': 'collection' - } - - def __init__(self, total_count=None, display_count=None, mode=None, timespan=None, queries=None, collection=None): # noqa: E501 - """Frontpage - a model defined in Swagger""" # noqa: E501 - self._total_count = None - self._display_count = None - self._mode = None - self._timespan = None - self._queries = None - self._collection = None - self.discriminator = None - if total_count is not None: - self.total_count = total_count - if display_count is not None: - self.display_count = display_count - if mode is not None: - self.mode = mode - if timespan is not None: - self.timespan = timespan - if queries is not None: - self.queries = queries - if collection is not None: - self.collection = collection - - @property - def total_count(self): - """Gets the total_count of this Frontpage. # noqa: E501 - - - :return: The total_count of this Frontpage. # noqa: E501 - :rtype: int - """ - return self._total_count - - @total_count.setter - def total_count(self, total_count): - """Sets the total_count of this Frontpage. - - - :param total_count: The total_count of this Frontpage. # noqa: E501 - :type: int - """ - - self._total_count = total_count - - @property - def display_count(self): - """Gets the display_count of this Frontpage. # noqa: E501 - - - :return: The display_count of this Frontpage. # noqa: E501 - :rtype: int - """ - return self._display_count - - @display_count.setter - def display_count(self, display_count): - """Sets the display_count of this Frontpage. - - - :param display_count: The display_count of this Frontpage. # noqa: E501 - :type: int - """ - - self._display_count = display_count - - @property - def mode(self): - """Gets the mode of this Frontpage. # noqa: E501 - - - :return: The mode of this Frontpage. # noqa: E501 - :rtype: str - """ - return self._mode - - @mode.setter - def mode(self, mode): - """Sets the mode of this Frontpage. - - - :param mode: The mode of this Frontpage. # noqa: E501 - :type: str - """ - allowed_values = ["collection", "rating", "views", "downloads"] # noqa: E501 - if mode not in allowed_values: - raise ValueError( - "Invalid value for `mode` ({0}), must be one of {1}" # noqa: E501 - .format(mode, allowed_values) - ) - - self._mode = mode - - @property - def timespan(self): - """Gets the timespan of this Frontpage. # noqa: E501 - - - :return: The timespan of this Frontpage. # noqa: E501 - :rtype: str - """ - return self._timespan - - @timespan.setter - def timespan(self, timespan): - """Sets the timespan of this Frontpage. - - - :param timespan: The timespan of this Frontpage. # noqa: E501 - :type: str - """ - allowed_values = ["days_30", "days_100", "all"] # noqa: E501 - if timespan not in allowed_values: - raise ValueError( - "Invalid value for `timespan` ({0}), must be one of {1}" # noqa: E501 - .format(timespan, allowed_values) - ) - - self._timespan = timespan - - @property - def queries(self): - """Gets the queries of this Frontpage. # noqa: E501 - - - :return: The queries of this Frontpage. # noqa: E501 - :rtype: list[Query] - """ - return self._queries - - @queries.setter - def queries(self, queries): - """Sets the queries of this Frontpage. - - - :param queries: The queries of this Frontpage. # noqa: E501 - :type: list[Query] - """ - - self._queries = queries - - @property - def collection(self): - """Gets the collection of this Frontpage. # noqa: E501 - - - :return: The collection of this Frontpage. # noqa: E501 - :rtype: str - """ - return self._collection - - @collection.setter - def collection(self, collection): - """Sets the collection of this Frontpage. - - - :param collection: The collection of this Frontpage. # noqa: E501 - :type: str - """ - - self._collection = collection - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Frontpage, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Frontpage): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/general.py b/edu_sharing_client/models/general.py deleted file mode 100644 index ba9cf24e..00000000 --- a/edu_sharing_client/models/general.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class General(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'referenced_in_name': 'str', - 'referenced_in_type': 'str', - 'referenced_in_instance': 'str' - } - - attribute_map = { - 'referenced_in_name': 'referencedInName', - 'referenced_in_type': 'referencedInType', - 'referenced_in_instance': 'referencedInInstance' - } - - def __init__(self, referenced_in_name=None, referenced_in_type=None, referenced_in_instance=None): # noqa: E501 - """General - a model defined in Swagger""" # noqa: E501 - self._referenced_in_name = None - self._referenced_in_type = None - self._referenced_in_instance = None - self.discriminator = None - if referenced_in_name is not None: - self.referenced_in_name = referenced_in_name - if referenced_in_type is not None: - self.referenced_in_type = referenced_in_type - if referenced_in_instance is not None: - self.referenced_in_instance = referenced_in_instance - - @property - def referenced_in_name(self): - """Gets the referenced_in_name of this General. # noqa: E501 - - - :return: The referenced_in_name of this General. # noqa: E501 - :rtype: str - """ - return self._referenced_in_name - - @referenced_in_name.setter - def referenced_in_name(self, referenced_in_name): - """Sets the referenced_in_name of this General. - - - :param referenced_in_name: The referenced_in_name of this General. # noqa: E501 - :type: str - """ - - self._referenced_in_name = referenced_in_name - - @property - def referenced_in_type(self): - """Gets the referenced_in_type of this General. # noqa: E501 - - - :return: The referenced_in_type of this General. # noqa: E501 - :rtype: str - """ - return self._referenced_in_type - - @referenced_in_type.setter - def referenced_in_type(self, referenced_in_type): - """Sets the referenced_in_type of this General. - - - :param referenced_in_type: The referenced_in_type of this General. # noqa: E501 - :type: str - """ - - self._referenced_in_type = referenced_in_type - - @property - def referenced_in_instance(self): - """Gets the referenced_in_instance of this General. # noqa: E501 - - - :return: The referenced_in_instance of this General. # noqa: E501 - :rtype: str - """ - return self._referenced_in_instance - - @referenced_in_instance.setter - def referenced_in_instance(self, referenced_in_instance): - """Sets the referenced_in_instance of this General. - - - :param referenced_in_instance: The referenced_in_instance of this General. # noqa: E501 - :type: str - """ - - self._referenced_in_instance = referenced_in_instance - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(General, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, General): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/geo.py b/edu_sharing_client/models/geo.py deleted file mode 100644 index a2c94395..00000000 --- a/edu_sharing_client/models/geo.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Geo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'longitude': 'float', - 'latitude': 'float', - 'address_country': 'str' - } - - attribute_map = { - 'longitude': 'longitude', - 'latitude': 'latitude', - 'address_country': 'addressCountry' - } - - def __init__(self, longitude=None, latitude=None, address_country=None): # noqa: E501 - """Geo - a model defined in Swagger""" # noqa: E501 - self._longitude = None - self._latitude = None - self._address_country = None - self.discriminator = None - if longitude is not None: - self.longitude = longitude - if latitude is not None: - self.latitude = latitude - if address_country is not None: - self.address_country = address_country - - @property - def longitude(self): - """Gets the longitude of this Geo. # noqa: E501 - - - :return: The longitude of this Geo. # noqa: E501 - :rtype: float - """ - return self._longitude - - @longitude.setter - def longitude(self, longitude): - """Sets the longitude of this Geo. - - - :param longitude: The longitude of this Geo. # noqa: E501 - :type: float - """ - - self._longitude = longitude - - @property - def latitude(self): - """Gets the latitude of this Geo. # noqa: E501 - - - :return: The latitude of this Geo. # noqa: E501 - :rtype: float - """ - return self._latitude - - @latitude.setter - def latitude(self, latitude): - """Sets the latitude of this Geo. - - - :param latitude: The latitude of this Geo. # noqa: E501 - :type: float - """ - - self._latitude = latitude - - @property - def address_country(self): - """Gets the address_country of this Geo. # noqa: E501 - - - :return: The address_country of this Geo. # noqa: E501 - :rtype: str - """ - return self._address_country - - @address_country.setter - def address_country(self, address_country): - """Sets the address_country of this Geo. - - - :param address_country: The address_country of this Geo. # noqa: E501 - :type: str - """ - - self._address_country = address_country - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Geo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Geo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/group.py b/edu_sharing_client/models/group.py deleted file mode 100644 index 1c64dbf7..00000000 --- a/edu_sharing_client/models/group.py +++ /dev/null @@ -1,248 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Group(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'ref': 'NodeRef', - 'editable': 'bool', - 'authority_name': 'str', - 'authority_type': 'str', - 'group_name': 'str', - 'profile': 'GroupProfile' - } - - attribute_map = { - 'ref': 'ref', - 'editable': 'editable', - 'authority_name': 'authorityName', - 'authority_type': 'authorityType', - 'group_name': 'groupName', - 'profile': 'profile' - } - - def __init__(self, ref=None, editable=False, authority_name=None, authority_type=None, group_name=None, profile=None): # noqa: E501 - """Group - a model defined in Swagger""" # noqa: E501 - self._ref = None - self._editable = None - self._authority_name = None - self._authority_type = None - self._group_name = None - self._profile = None - self.discriminator = None - if ref is not None: - self.ref = ref - if editable is not None: - self.editable = editable - self.authority_name = authority_name - if authority_type is not None: - self.authority_type = authority_type - if group_name is not None: - self.group_name = group_name - if profile is not None: - self.profile = profile - - @property - def ref(self): - """Gets the ref of this Group. # noqa: E501 - - - :return: The ref of this Group. # noqa: E501 - :rtype: NodeRef - """ - return self._ref - - @ref.setter - def ref(self, ref): - """Sets the ref of this Group. - - - :param ref: The ref of this Group. # noqa: E501 - :type: NodeRef - """ - - self._ref = ref - - @property - def editable(self): - """Gets the editable of this Group. # noqa: E501 - - - :return: The editable of this Group. # noqa: E501 - :rtype: bool - """ - return self._editable - - @editable.setter - def editable(self, editable): - """Sets the editable of this Group. - - - :param editable: The editable of this Group. # noqa: E501 - :type: bool - """ - - self._editable = editable - - @property - def authority_name(self): - """Gets the authority_name of this Group. # noqa: E501 - - - :return: The authority_name of this Group. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this Group. - - - :param authority_name: The authority_name of this Group. # noqa: E501 - :type: str - """ - if authority_name is None: - raise ValueError("Invalid value for `authority_name`, must not be `None`") # noqa: E501 - - self._authority_name = authority_name - - @property - def authority_type(self): - """Gets the authority_type of this Group. # noqa: E501 - - - :return: The authority_type of this Group. # noqa: E501 - :rtype: str - """ - return self._authority_type - - @authority_type.setter - def authority_type(self, authority_type): - """Sets the authority_type of this Group. - - - :param authority_type: The authority_type of this Group. # noqa: E501 - :type: str - """ - allowed_values = ["USER", "GROUP", "OWNER", "EVERYONE", "GUEST"] # noqa: E501 - if authority_type not in allowed_values: - raise ValueError( - "Invalid value for `authority_type` ({0}), must be one of {1}" # noqa: E501 - .format(authority_type, allowed_values) - ) - - self._authority_type = authority_type - - @property - def group_name(self): - """Gets the group_name of this Group. # noqa: E501 - - - :return: The group_name of this Group. # noqa: E501 - :rtype: str - """ - return self._group_name - - @group_name.setter - def group_name(self, group_name): - """Sets the group_name of this Group. - - - :param group_name: The group_name of this Group. # noqa: E501 - :type: str - """ - - self._group_name = group_name - - @property - def profile(self): - """Gets the profile of this Group. # noqa: E501 - - - :return: The profile of this Group. # noqa: E501 - :rtype: GroupProfile - """ - return self._profile - - @profile.setter - def profile(self, profile): - """Sets the profile of this Group. - - - :param profile: The profile of this Group. # noqa: E501 - :type: GroupProfile - """ - - self._profile = profile - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Group, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Group): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/group_entries.py b/edu_sharing_client/models/group_entries.py deleted file mode 100644 index 0e0f182f..00000000 --- a/edu_sharing_client/models/group_entries.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class GroupEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'groups': 'list[Group]', - 'pagination': 'Pagination' - } - - attribute_map = { - 'groups': 'groups', - 'pagination': 'pagination' - } - - def __init__(self, groups=None, pagination=None): # noqa: E501 - """GroupEntries - a model defined in Swagger""" # noqa: E501 - self._groups = None - self._pagination = None - self.discriminator = None - self.groups = groups - self.pagination = pagination - - @property - def groups(self): - """Gets the groups of this GroupEntries. # noqa: E501 - - - :return: The groups of this GroupEntries. # noqa: E501 - :rtype: list[Group] - """ - return self._groups - - @groups.setter - def groups(self, groups): - """Sets the groups of this GroupEntries. - - - :param groups: The groups of this GroupEntries. # noqa: E501 - :type: list[Group] - """ - if groups is None: - raise ValueError("Invalid value for `groups`, must not be `None`") # noqa: E501 - - self._groups = groups - - @property - def pagination(self): - """Gets the pagination of this GroupEntries. # noqa: E501 - - - :return: The pagination of this GroupEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this GroupEntries. - - - :param pagination: The pagination of this GroupEntries. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(GroupEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, GroupEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/group_entry.py b/edu_sharing_client/models/group_entry.py deleted file mode 100644 index 6839c396..00000000 --- a/edu_sharing_client/models/group_entry.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class GroupEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'group': 'Group' - } - - attribute_map = { - 'group': 'group' - } - - def __init__(self, group=None): # noqa: E501 - """GroupEntry - a model defined in Swagger""" # noqa: E501 - self._group = None - self.discriminator = None - self.group = group - - @property - def group(self): - """Gets the group of this GroupEntry. # noqa: E501 - - - :return: The group of this GroupEntry. # noqa: E501 - :rtype: Group - """ - return self._group - - @group.setter - def group(self, group): - """Sets the group of this GroupEntry. - - - :param group: The group of this GroupEntry. # noqa: E501 - :type: Group - """ - if group is None: - raise ValueError("Invalid value for `group`, must not be `None`") # noqa: E501 - - self._group = group - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(GroupEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, GroupEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/group_profile.py b/edu_sharing_client/models/group_profile.py deleted file mode 100644 index 20f53c60..00000000 --- a/edu_sharing_client/models/group_profile.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class GroupProfile(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'group_email': 'str', - 'display_name': 'str', - 'group_type': 'str', - 'scope_type': 'str' - } - - attribute_map = { - 'group_email': 'groupEmail', - 'display_name': 'displayName', - 'group_type': 'groupType', - 'scope_type': 'scopeType' - } - - def __init__(self, group_email=None, display_name=None, group_type=None, scope_type=None): # noqa: E501 - """GroupProfile - a model defined in Swagger""" # noqa: E501 - self._group_email = None - self._display_name = None - self._group_type = None - self._scope_type = None - self.discriminator = None - if group_email is not None: - self.group_email = group_email - if display_name is not None: - self.display_name = display_name - if group_type is not None: - self.group_type = group_type - if scope_type is not None: - self.scope_type = scope_type - - @property - def group_email(self): - """Gets the group_email of this GroupProfile. # noqa: E501 - - - :return: The group_email of this GroupProfile. # noqa: E501 - :rtype: str - """ - return self._group_email - - @group_email.setter - def group_email(self, group_email): - """Sets the group_email of this GroupProfile. - - - :param group_email: The group_email of this GroupProfile. # noqa: E501 - :type: str - """ - - self._group_email = group_email - - @property - def display_name(self): - """Gets the display_name of this GroupProfile. # noqa: E501 - - - :return: The display_name of this GroupProfile. # noqa: E501 - :rtype: str - """ - return self._display_name - - @display_name.setter - def display_name(self, display_name): - """Sets the display_name of this GroupProfile. - - - :param display_name: The display_name of this GroupProfile. # noqa: E501 - :type: str - """ - - self._display_name = display_name - - @property - def group_type(self): - """Gets the group_type of this GroupProfile. # noqa: E501 - - - :return: The group_type of this GroupProfile. # noqa: E501 - :rtype: str - """ - return self._group_type - - @group_type.setter - def group_type(self, group_type): - """Sets the group_type of this GroupProfile. - - - :param group_type: The group_type of this GroupProfile. # noqa: E501 - :type: str - """ - - self._group_type = group_type - - @property - def scope_type(self): - """Gets the scope_type of this GroupProfile. # noqa: E501 - - - :return: The scope_type of this GroupProfile. # noqa: E501 - :rtype: str - """ - return self._scope_type - - @scope_type.setter - def scope_type(self, scope_type): - """Sets the scope_type of this GroupProfile. - - - :param scope_type: The scope_type of this GroupProfile. # noqa: E501 - :type: str - """ - - self._scope_type = scope_type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(GroupProfile, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, GroupProfile): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/group_v2.py b/edu_sharing_client/models/group_v2.py deleted file mode 100644 index 974ff03f..00000000 --- a/edu_sharing_client/models/group_v2.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class GroupV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'views': 'list[str]' - } - - attribute_map = { - 'id': 'id', - 'views': 'views' - } - - def __init__(self, id=None, views=None): # noqa: E501 - """GroupV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._views = None - self.discriminator = None - if id is not None: - self.id = id - if views is not None: - self.views = views - - @property - def id(self): - """Gets the id of this GroupV2. # noqa: E501 - - - :return: The id of this GroupV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this GroupV2. - - - :param id: The id of this GroupV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def views(self): - """Gets the views of this GroupV2. # noqa: E501 - - - :return: The views of this GroupV2. # noqa: E501 - :rtype: list[str] - """ - return self._views - - @views.setter - def views(self, views): - """Sets the views of this GroupV2. - - - :param views: The views of this GroupV2. # noqa: E501 - :type: list[str] - """ - - self._views = views - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(GroupV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, GroupV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/guest.py b/edu_sharing_client/models/guest.py deleted file mode 100644 index 20d0f956..00000000 --- a/edu_sharing_client/models/guest.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Guest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'enabled': 'bool' - } - - attribute_map = { - 'enabled': 'enabled' - } - - def __init__(self, enabled=False): # noqa: E501 - """Guest - a model defined in Swagger""" # noqa: E501 - self._enabled = None - self.discriminator = None - if enabled is not None: - self.enabled = enabled - - @property - def enabled(self): - """Gets the enabled of this Guest. # noqa: E501 - - - :return: The enabled of this Guest. # noqa: E501 - :rtype: bool - """ - return self._enabled - - @enabled.setter - def enabled(self, enabled): - """Sets the enabled of this Guest. - - - :param enabled: The enabled of this Guest. # noqa: E501 - :type: bool - """ - - self._enabled = enabled - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Guest, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Guest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/help_menu_options.py b/edu_sharing_client/models/help_menu_options.py deleted file mode 100644 index 8f87fe40..00000000 --- a/edu_sharing_client/models/help_menu_options.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class HelpMenuOptions(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'icon': 'str', - 'url': 'str' - } - - attribute_map = { - 'key': 'key', - 'icon': 'icon', - 'url': 'url' - } - - def __init__(self, key=None, icon=None, url=None): # noqa: E501 - """HelpMenuOptions - a model defined in Swagger""" # noqa: E501 - self._key = None - self._icon = None - self._url = None - self.discriminator = None - if key is not None: - self.key = key - if icon is not None: - self.icon = icon - if url is not None: - self.url = url - - @property - def key(self): - """Gets the key of this HelpMenuOptions. # noqa: E501 - - - :return: The key of this HelpMenuOptions. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this HelpMenuOptions. - - - :param key: The key of this HelpMenuOptions. # noqa: E501 - :type: str - """ - - self._key = key - - @property - def icon(self): - """Gets the icon of this HelpMenuOptions. # noqa: E501 - - - :return: The icon of this HelpMenuOptions. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this HelpMenuOptions. - - - :param icon: The icon of this HelpMenuOptions. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def url(self): - """Gets the url of this HelpMenuOptions. # noqa: E501 - - - :return: The url of this HelpMenuOptions. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this HelpMenuOptions. - - - :param url: The url of this HelpMenuOptions. # noqa: E501 - :type: str - """ - - self._url = url - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(HelpMenuOptions, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, HelpMenuOptions): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/home_folder_options.py b/edu_sharing_client/models/home_folder_options.py deleted file mode 100644 index 023d2839..00000000 --- a/edu_sharing_client/models/home_folder_options.py +++ /dev/null @@ -1,207 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class HomeFolderOptions(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'folders': 'str', - 'private_files': 'str', - 'cc_files': 'str', - 'keep_folder_structure': 'bool' - } - - attribute_map = { - 'folders': 'folders', - 'private_files': 'privateFiles', - 'cc_files': 'ccFiles', - 'keep_folder_structure': 'keepFolderStructure' - } - - def __init__(self, folders=None, private_files=None, cc_files=None, keep_folder_structure=False): # noqa: E501 - """HomeFolderOptions - a model defined in Swagger""" # noqa: E501 - self._folders = None - self._private_files = None - self._cc_files = None - self._keep_folder_structure = None - self.discriminator = None - if folders is not None: - self.folders = folders - if private_files is not None: - self.private_files = private_files - if cc_files is not None: - self.cc_files = cc_files - if keep_folder_structure is not None: - self.keep_folder_structure = keep_folder_structure - - @property - def folders(self): - """Gets the folders of this HomeFolderOptions. # noqa: E501 - - - :return: The folders of this HomeFolderOptions. # noqa: E501 - :rtype: str - """ - return self._folders - - @folders.setter - def folders(self, folders): - """Sets the folders of this HomeFolderOptions. - - - :param folders: The folders of this HomeFolderOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign"] # noqa: E501 - if folders not in allowed_values: - raise ValueError( - "Invalid value for `folders` ({0}), must be one of {1}" # noqa: E501 - .format(folders, allowed_values) - ) - - self._folders = folders - - @property - def private_files(self): - """Gets the private_files of this HomeFolderOptions. # noqa: E501 - - - :return: The private_files of this HomeFolderOptions. # noqa: E501 - :rtype: str - """ - return self._private_files - - @private_files.setter - def private_files(self, private_files): - """Sets the private_files of this HomeFolderOptions. - - - :param private_files: The private_files of this HomeFolderOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign", "delete"] # noqa: E501 - if private_files not in allowed_values: - raise ValueError( - "Invalid value for `private_files` ({0}), must be one of {1}" # noqa: E501 - .format(private_files, allowed_values) - ) - - self._private_files = private_files - - @property - def cc_files(self): - """Gets the cc_files of this HomeFolderOptions. # noqa: E501 - - - :return: The cc_files of this HomeFolderOptions. # noqa: E501 - :rtype: str - """ - return self._cc_files - - @cc_files.setter - def cc_files(self, cc_files): - """Sets the cc_files of this HomeFolderOptions. - - - :param cc_files: The cc_files of this HomeFolderOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign", "delete"] # noqa: E501 - if cc_files not in allowed_values: - raise ValueError( - "Invalid value for `cc_files` ({0}), must be one of {1}" # noqa: E501 - .format(cc_files, allowed_values) - ) - - self._cc_files = cc_files - - @property - def keep_folder_structure(self): - """Gets the keep_folder_structure of this HomeFolderOptions. # noqa: E501 - - - :return: The keep_folder_structure of this HomeFolderOptions. # noqa: E501 - :rtype: bool - """ - return self._keep_folder_structure - - @keep_folder_structure.setter - def keep_folder_structure(self, keep_folder_structure): - """Sets the keep_folder_structure of this HomeFolderOptions. - - - :param keep_folder_structure: The keep_folder_structure of this HomeFolderOptions. # noqa: E501 - :type: bool - """ - - self._keep_folder_structure = keep_folder_structure - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(HomeFolderOptions, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, HomeFolderOptions): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/icon.py b/edu_sharing_client/models/icon.py deleted file mode 100644 index 7e223001..00000000 --- a/edu_sharing_client/models/icon.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Icon(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str' - } - - attribute_map = { - 'url': 'url' - } - - def __init__(self, url=None): # noqa: E501 - """Icon - a model defined in Swagger""" # noqa: E501 - self._url = None - self.discriminator = None - if url is not None: - self.url = url - - @property - def url(self): - """Gets the url of this Icon. # noqa: E501 - - - :return: The url of this Icon. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Icon. - - - :param url: The url of this Icon. # noqa: E501 - :type: str - """ - - self._url = url - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Icon, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Icon): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/image.py b/edu_sharing_client/models/image.py deleted file mode 100644 index e3981562..00000000 --- a/edu_sharing_client/models/image.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Image(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'src': 'str', - 'replace': 'str' - } - - attribute_map = { - 'src': 'src', - 'replace': 'replace' - } - - def __init__(self, src=None, replace=None): # noqa: E501 - """Image - a model defined in Swagger""" # noqa: E501 - self._src = None - self._replace = None - self.discriminator = None - if src is not None: - self.src = src - if replace is not None: - self.replace = replace - - @property - def src(self): - """Gets the src of this Image. # noqa: E501 - - - :return: The src of this Image. # noqa: E501 - :rtype: str - """ - return self._src - - @src.setter - def src(self, src): - """Sets the src of this Image. - - - :param src: The src of this Image. # noqa: E501 - :type: str - """ - - self._src = src - - @property - def replace(self): - """Gets the replace of this Image. # noqa: E501 - - - :return: The replace of this Image. # noqa: E501 - :rtype: str - """ - return self._replace - - @replace.setter - def replace(self, replace): - """Sets the replace of this Image. - - - :param replace: The replace of this Image. # noqa: E501 - :type: str - """ - - self._replace = replace - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Image, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Image): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/interface.py b/edu_sharing_client/models/interface.py deleted file mode 100644 index 75b9a31d..00000000 --- a/edu_sharing_client/models/interface.py +++ /dev/null @@ -1,253 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Interface(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str', - 'set': 'str', - 'metadata_prefix': 'str', - 'documentation': 'str', - 'format': 'str', - 'type': 'str' - } - - attribute_map = { - 'url': 'url', - 'set': 'set', - 'metadata_prefix': 'metadataPrefix', - 'documentation': 'documentation', - 'format': 'format', - 'type': 'type' - } - - def __init__(self, url=None, set=None, metadata_prefix=None, documentation=None, format=None, type=None): # noqa: E501 - """Interface - a model defined in Swagger""" # noqa: E501 - self._url = None - self._set = None - self._metadata_prefix = None - self._documentation = None - self._format = None - self._type = None - self.discriminator = None - if url is not None: - self.url = url - if set is not None: - self.set = set - if metadata_prefix is not None: - self.metadata_prefix = metadata_prefix - if documentation is not None: - self.documentation = documentation - if format is not None: - self.format = format - if type is not None: - self.type = type - - @property - def url(self): - """Gets the url of this Interface. # noqa: E501 - - - :return: The url of this Interface. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Interface. - - - :param url: The url of this Interface. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def set(self): - """Gets the set of this Interface. # noqa: E501 - - - :return: The set of this Interface. # noqa: E501 - :rtype: str - """ - return self._set - - @set.setter - def set(self, set): - """Sets the set of this Interface. - - - :param set: The set of this Interface. # noqa: E501 - :type: str - """ - - self._set = set - - @property - def metadata_prefix(self): - """Gets the metadata_prefix of this Interface. # noqa: E501 - - - :return: The metadata_prefix of this Interface. # noqa: E501 - :rtype: str - """ - return self._metadata_prefix - - @metadata_prefix.setter - def metadata_prefix(self, metadata_prefix): - """Sets the metadata_prefix of this Interface. - - - :param metadata_prefix: The metadata_prefix of this Interface. # noqa: E501 - :type: str - """ - - self._metadata_prefix = metadata_prefix - - @property - def documentation(self): - """Gets the documentation of this Interface. # noqa: E501 - - - :return: The documentation of this Interface. # noqa: E501 - :rtype: str - """ - return self._documentation - - @documentation.setter - def documentation(self, documentation): - """Sets the documentation of this Interface. - - - :param documentation: The documentation of this Interface. # noqa: E501 - :type: str - """ - - self._documentation = documentation - - @property - def format(self): - """Gets the format of this Interface. # noqa: E501 - - - :return: The format of this Interface. # noqa: E501 - :rtype: str - """ - return self._format - - @format.setter - def format(self, format): - """Sets the format of this Interface. - - - :param format: The format of this Interface. # noqa: E501 - :type: str - """ - allowed_values = ["Json", "XML", "Text"] # noqa: E501 - if format not in allowed_values: - raise ValueError( - "Invalid value for `format` ({0}), must be one of {1}" # noqa: E501 - .format(format, allowed_values) - ) - - self._format = format - - @property - def type(self): - """Gets the type of this Interface. # noqa: E501 - - - :return: The type of this Interface. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Interface. - - - :param type: The type of this Interface. # noqa: E501 - :type: str - """ - allowed_values = ["Search", "Sitemap", "Statistics", "OAI", "Generic_Api"] # noqa: E501 - if type not in allowed_values: - raise ValueError( - "Invalid value for `type` ({0}), must be one of {1}" # noqa: E501 - .format(type, allowed_values) - ) - - self._type = type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Interface, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Interface): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/job_detail.py b/edu_sharing_client/models/job_detail.py deleted file mode 100644 index 303ee96d..00000000 --- a/edu_sharing_client/models/job_detail.py +++ /dev/null @@ -1,345 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class JobDetail(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'group': 'str', - 'description': 'str', - 'job_data_map': 'dict(str, object)', - 'key': 'Key', - 'volatile': 'bool', - 'full_name': 'str', - 'stateful': 'bool', - 'durable': 'bool', - 'job_listener_names': 'list[str]' - } - - attribute_map = { - 'name': 'name', - 'group': 'group', - 'description': 'description', - 'job_data_map': 'jobDataMap', - 'key': 'key', - 'volatile': 'volatile', - 'full_name': 'fullName', - 'stateful': 'stateful', - 'durable': 'durable', - 'job_listener_names': 'jobListenerNames' - } - - def __init__(self, name=None, group=None, description=None, job_data_map=None, key=None, volatile=False, full_name=None, stateful=False, durable=False, job_listener_names=None): # noqa: E501 - """JobDetail - a model defined in Swagger""" # noqa: E501 - self._name = None - self._group = None - self._description = None - self._job_data_map = None - self._key = None - self._volatile = None - self._full_name = None - self._stateful = None - self._durable = None - self._job_listener_names = None - self.discriminator = None - if name is not None: - self.name = name - if group is not None: - self.group = group - if description is not None: - self.description = description - if job_data_map is not None: - self.job_data_map = job_data_map - if key is not None: - self.key = key - if volatile is not None: - self.volatile = volatile - if full_name is not None: - self.full_name = full_name - if stateful is not None: - self.stateful = stateful - if durable is not None: - self.durable = durable - if job_listener_names is not None: - self.job_listener_names = job_listener_names - - @property - def name(self): - """Gets the name of this JobDetail. # noqa: E501 - - - :return: The name of this JobDetail. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this JobDetail. - - - :param name: The name of this JobDetail. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def group(self): - """Gets the group of this JobDetail. # noqa: E501 - - - :return: The group of this JobDetail. # noqa: E501 - :rtype: str - """ - return self._group - - @group.setter - def group(self, group): - """Sets the group of this JobDetail. - - - :param group: The group of this JobDetail. # noqa: E501 - :type: str - """ - - self._group = group - - @property - def description(self): - """Gets the description of this JobDetail. # noqa: E501 - - - :return: The description of this JobDetail. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this JobDetail. - - - :param description: The description of this JobDetail. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def job_data_map(self): - """Gets the job_data_map of this JobDetail. # noqa: E501 - - - :return: The job_data_map of this JobDetail. # noqa: E501 - :rtype: dict(str, object) - """ - return self._job_data_map - - @job_data_map.setter - def job_data_map(self, job_data_map): - """Sets the job_data_map of this JobDetail. - - - :param job_data_map: The job_data_map of this JobDetail. # noqa: E501 - :type: dict(str, object) - """ - - self._job_data_map = job_data_map - - @property - def key(self): - """Gets the key of this JobDetail. # noqa: E501 - - - :return: The key of this JobDetail. # noqa: E501 - :rtype: Key - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this JobDetail. - - - :param key: The key of this JobDetail. # noqa: E501 - :type: Key - """ - - self._key = key - - @property - def volatile(self): - """Gets the volatile of this JobDetail. # noqa: E501 - - - :return: The volatile of this JobDetail. # noqa: E501 - :rtype: bool - """ - return self._volatile - - @volatile.setter - def volatile(self, volatile): - """Sets the volatile of this JobDetail. - - - :param volatile: The volatile of this JobDetail. # noqa: E501 - :type: bool - """ - - self._volatile = volatile - - @property - def full_name(self): - """Gets the full_name of this JobDetail. # noqa: E501 - - - :return: The full_name of this JobDetail. # noqa: E501 - :rtype: str - """ - return self._full_name - - @full_name.setter - def full_name(self, full_name): - """Sets the full_name of this JobDetail. - - - :param full_name: The full_name of this JobDetail. # noqa: E501 - :type: str - """ - - self._full_name = full_name - - @property - def stateful(self): - """Gets the stateful of this JobDetail. # noqa: E501 - - - :return: The stateful of this JobDetail. # noqa: E501 - :rtype: bool - """ - return self._stateful - - @stateful.setter - def stateful(self, stateful): - """Sets the stateful of this JobDetail. - - - :param stateful: The stateful of this JobDetail. # noqa: E501 - :type: bool - """ - - self._stateful = stateful - - @property - def durable(self): - """Gets the durable of this JobDetail. # noqa: E501 - - - :return: The durable of this JobDetail. # noqa: E501 - :rtype: bool - """ - return self._durable - - @durable.setter - def durable(self, durable): - """Sets the durable of this JobDetail. - - - :param durable: The durable of this JobDetail. # noqa: E501 - :type: bool - """ - - self._durable = durable - - @property - def job_listener_names(self): - """Gets the job_listener_names of this JobDetail. # noqa: E501 - - - :return: The job_listener_names of this JobDetail. # noqa: E501 - :rtype: list[str] - """ - return self._job_listener_names - - @job_listener_names.setter - def job_listener_names(self, job_listener_names): - """Sets the job_listener_names of this JobDetail. - - - :param job_listener_names: The job_listener_names of this JobDetail. # noqa: E501 - :type: list[str] - """ - - self._job_listener_names = job_listener_names - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(JobDetail, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, JobDetail): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/job_info.py b/edu_sharing_client/models/job_info.py deleted file mode 100644 index b7432e72..00000000 --- a/edu_sharing_client/models/job_info.py +++ /dev/null @@ -1,247 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class JobInfo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'start_time': 'int', - 'finish_time': 'int', - 'status': 'str', - 'worst_level': 'Level', - 'log': 'list[LogEntry]', - 'job_detail': 'JobDetail' - } - - attribute_map = { - 'start_time': 'startTime', - 'finish_time': 'finishTime', - 'status': 'status', - 'worst_level': 'worstLevel', - 'log': 'log', - 'job_detail': 'jobDetail' - } - - def __init__(self, start_time=None, finish_time=None, status=None, worst_level=None, log=None, job_detail=None): # noqa: E501 - """JobInfo - a model defined in Swagger""" # noqa: E501 - self._start_time = None - self._finish_time = None - self._status = None - self._worst_level = None - self._log = None - self._job_detail = None - self.discriminator = None - if start_time is not None: - self.start_time = start_time - if finish_time is not None: - self.finish_time = finish_time - if status is not None: - self.status = status - if worst_level is not None: - self.worst_level = worst_level - if log is not None: - self.log = log - if job_detail is not None: - self.job_detail = job_detail - - @property - def start_time(self): - """Gets the start_time of this JobInfo. # noqa: E501 - - - :return: The start_time of this JobInfo. # noqa: E501 - :rtype: int - """ - return self._start_time - - @start_time.setter - def start_time(self, start_time): - """Sets the start_time of this JobInfo. - - - :param start_time: The start_time of this JobInfo. # noqa: E501 - :type: int - """ - - self._start_time = start_time - - @property - def finish_time(self): - """Gets the finish_time of this JobInfo. # noqa: E501 - - - :return: The finish_time of this JobInfo. # noqa: E501 - :rtype: int - """ - return self._finish_time - - @finish_time.setter - def finish_time(self, finish_time): - """Sets the finish_time of this JobInfo. - - - :param finish_time: The finish_time of this JobInfo. # noqa: E501 - :type: int - """ - - self._finish_time = finish_time - - @property - def status(self): - """Gets the status of this JobInfo. # noqa: E501 - - - :return: The status of this JobInfo. # noqa: E501 - :rtype: str - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this JobInfo. - - - :param status: The status of this JobInfo. # noqa: E501 - :type: str - """ - allowed_values = ["Running", "Failed", "Aborted", "Finished"] # noqa: E501 - if status not in allowed_values: - raise ValueError( - "Invalid value for `status` ({0}), must be one of {1}" # noqa: E501 - .format(status, allowed_values) - ) - - self._status = status - - @property - def worst_level(self): - """Gets the worst_level of this JobInfo. # noqa: E501 - - - :return: The worst_level of this JobInfo. # noqa: E501 - :rtype: Level - """ - return self._worst_level - - @worst_level.setter - def worst_level(self, worst_level): - """Sets the worst_level of this JobInfo. - - - :param worst_level: The worst_level of this JobInfo. # noqa: E501 - :type: Level - """ - - self._worst_level = worst_level - - @property - def log(self): - """Gets the log of this JobInfo. # noqa: E501 - - - :return: The log of this JobInfo. # noqa: E501 - :rtype: list[LogEntry] - """ - return self._log - - @log.setter - def log(self, log): - """Sets the log of this JobInfo. - - - :param log: The log of this JobInfo. # noqa: E501 - :type: list[LogEntry] - """ - - self._log = log - - @property - def job_detail(self): - """Gets the job_detail of this JobInfo. # noqa: E501 - - - :return: The job_detail of this JobInfo. # noqa: E501 - :rtype: JobDetail - """ - return self._job_detail - - @job_detail.setter - def job_detail(self, job_detail): - """Sets the job_detail of this JobInfo. - - - :param job_detail: The job_detail of this JobInfo. # noqa: E501 - :type: JobDetail - """ - - self._job_detail = job_detail - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(JobInfo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, JobInfo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/key.py b/edu_sharing_client/models/key.py deleted file mode 100644 index c983a422..00000000 --- a/edu_sharing_client/models/key.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Key(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'first': 'object', - 'second': 'object', - 'name': 'str', - 'group': 'str' - } - - attribute_map = { - 'first': 'first', - 'second': 'second', - 'name': 'name', - 'group': 'group' - } - - def __init__(self, first=None, second=None, name=None, group=None): # noqa: E501 - """Key - a model defined in Swagger""" # noqa: E501 - self._first = None - self._second = None - self._name = None - self._group = None - self.discriminator = None - if first is not None: - self.first = first - if second is not None: - self.second = second - if name is not None: - self.name = name - if group is not None: - self.group = group - - @property - def first(self): - """Gets the first of this Key. # noqa: E501 - - - :return: The first of this Key. # noqa: E501 - :rtype: object - """ - return self._first - - @first.setter - def first(self, first): - """Sets the first of this Key. - - - :param first: The first of this Key. # noqa: E501 - :type: object - """ - - self._first = first - - @property - def second(self): - """Gets the second of this Key. # noqa: E501 - - - :return: The second of this Key. # noqa: E501 - :rtype: object - """ - return self._second - - @second.setter - def second(self, second): - """Sets the second of this Key. - - - :param second: The second of this Key. # noqa: E501 - :type: object - """ - - self._second = second - - @property - def name(self): - """Gets the name of this Key. # noqa: E501 - - - :return: The name of this Key. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Key. - - - :param name: The name of this Key. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def group(self): - """Gets the group of this Key. # noqa: E501 - - - :return: The group of this Key. # noqa: E501 - :rtype: str - """ - return self._group - - @group.setter - def group(self, group): - """Sets the group of this Key. - - - :param group: The group of this Key. # noqa: E501 - :type: str - """ - - self._group = group - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Key, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Key): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/key_value_pair.py b/edu_sharing_client/models/key_value_pair.py deleted file mode 100644 index 2d7d3cde..00000000 --- a/edu_sharing_client/models/key_value_pair.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class KeyValuePair(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'value': 'str' - } - - attribute_map = { - 'key': 'key', - 'value': 'value' - } - - def __init__(self, key=None, value=None): # noqa: E501 - """KeyValuePair - a model defined in Swagger""" # noqa: E501 - self._key = None - self._value = None - self.discriminator = None - if key is not None: - self.key = key - if value is not None: - self.value = value - - @property - def key(self): - """Gets the key of this KeyValuePair. # noqa: E501 - - - :return: The key of this KeyValuePair. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this KeyValuePair. - - - :param key: The key of this KeyValuePair. # noqa: E501 - :type: str - """ - - self._key = key - - @property - def value(self): - """Gets the value of this KeyValuePair. # noqa: E501 - - - :return: The value of this KeyValuePair. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this KeyValuePair. - - - :param value: The value of this KeyValuePair. # noqa: E501 - :type: str - """ - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(KeyValuePair, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, KeyValuePair): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/language.py b/edu_sharing_client/models/language.py deleted file mode 100644 index 681fee4c..00000000 --- a/edu_sharing_client/models/language.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Language(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'language': 'str', - 'string': 'list[KeyValuePair]' - } - - attribute_map = { - 'language': 'language', - 'string': 'string' - } - - def __init__(self, language=None, string=None): # noqa: E501 - """Language - a model defined in Swagger""" # noqa: E501 - self._language = None - self._string = None - self.discriminator = None - if language is not None: - self.language = language - if string is not None: - self.string = string - - @property - def language(self): - """Gets the language of this Language. # noqa: E501 - - - :return: The language of this Language. # noqa: E501 - :rtype: str - """ - return self._language - - @language.setter - def language(self, language): - """Sets the language of this Language. - - - :param language: The language of this Language. # noqa: E501 - :type: str - """ - - self._language = language - - @property - def string(self): - """Gets the string of this Language. # noqa: E501 - - - :return: The string of this Language. # noqa: E501 - :rtype: list[KeyValuePair] - """ - return self._string - - @string.setter - def string(self, string): - """Sets the string of this Language. - - - :param string: The string of this Language. # noqa: E501 - :type: list[KeyValuePair] - """ - - self._string = string - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Language, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Language): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/level.py b/edu_sharing_client/models/level.py deleted file mode 100644 index 966bd611..00000000 --- a/edu_sharing_client/models/level.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Level(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'syslog_equivalent': 'int' - } - - attribute_map = { - 'syslog_equivalent': 'syslogEquivalent' - } - - def __init__(self, syslog_equivalent=None): # noqa: E501 - """Level - a model defined in Swagger""" # noqa: E501 - self._syslog_equivalent = None - self.discriminator = None - if syslog_equivalent is not None: - self.syslog_equivalent = syslog_equivalent - - @property - def syslog_equivalent(self): - """Gets the syslog_equivalent of this Level. # noqa: E501 - - - :return: The syslog_equivalent of this Level. # noqa: E501 - :rtype: int - """ - return self._syslog_equivalent - - @syslog_equivalent.setter - def syslog_equivalent(self, syslog_equivalent): - """Sets the syslog_equivalent of this Level. - - - :param syslog_equivalent: The syslog_equivalent of this Level. # noqa: E501 - :type: int - """ - - self._syslog_equivalent = syslog_equivalent - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Level, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Level): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/license.py b/edu_sharing_client/models/license.py deleted file mode 100644 index 073b8ab7..00000000 --- a/edu_sharing_client/models/license.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class License(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'icon': 'str', - 'url': 'str' - } - - attribute_map = { - 'icon': 'icon', - 'url': 'url' - } - - def __init__(self, icon=None, url=None): # noqa: E501 - """License - a model defined in Swagger""" # noqa: E501 - self._icon = None - self._url = None - self.discriminator = None - if icon is not None: - self.icon = icon - if url is not None: - self.url = url - - @property - def icon(self): - """Gets the icon of this License. # noqa: E501 - - - :return: The icon of this License. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this License. - - - :param icon: The icon of this License. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def url(self): - """Gets the url of this License. # noqa: E501 - - - :return: The url of this License. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this License. - - - :param url: The url of this License. # noqa: E501 - :type: str - """ - - self._url = url - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(License, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, License): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/license_agreement.py b/edu_sharing_client/models/license_agreement.py deleted file mode 100644 index 94baf831..00000000 --- a/edu_sharing_client/models/license_agreement.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class LicenseAgreement(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'node_id': 'list[LicenseAgreementNode]' - } - - attribute_map = { - 'node_id': 'nodeId' - } - - def __init__(self, node_id=None): # noqa: E501 - """LicenseAgreement - a model defined in Swagger""" # noqa: E501 - self._node_id = None - self.discriminator = None - if node_id is not None: - self.node_id = node_id - - @property - def node_id(self): - """Gets the node_id of this LicenseAgreement. # noqa: E501 - - - :return: The node_id of this LicenseAgreement. # noqa: E501 - :rtype: list[LicenseAgreementNode] - """ - return self._node_id - - @node_id.setter - def node_id(self, node_id): - """Sets the node_id of this LicenseAgreement. - - - :param node_id: The node_id of this LicenseAgreement. # noqa: E501 - :type: list[LicenseAgreementNode] - """ - - self._node_id = node_id - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(LicenseAgreement, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, LicenseAgreement): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/license_agreement_node.py b/edu_sharing_client/models/license_agreement_node.py deleted file mode 100644 index 5dcd4d7b..00000000 --- a/edu_sharing_client/models/license_agreement_node.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class LicenseAgreementNode(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'language': 'str', - 'value': 'str' - } - - attribute_map = { - 'language': 'language', - 'value': 'value' - } - - def __init__(self, language=None, value=None): # noqa: E501 - """LicenseAgreementNode - a model defined in Swagger""" # noqa: E501 - self._language = None - self._value = None - self.discriminator = None - if language is not None: - self.language = language - if value is not None: - self.value = value - - @property - def language(self): - """Gets the language of this LicenseAgreementNode. # noqa: E501 - - - :return: The language of this LicenseAgreementNode. # noqa: E501 - :rtype: str - """ - return self._language - - @language.setter - def language(self, language): - """Sets the language of this LicenseAgreementNode. - - - :param language: The language of this LicenseAgreementNode. # noqa: E501 - :type: str - """ - - self._language = language - - @property - def value(self): - """Gets the value of this LicenseAgreementNode. # noqa: E501 - - - :return: The value of this LicenseAgreementNode. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this LicenseAgreementNode. - - - :param value: The value of this LicenseAgreementNode. # noqa: E501 - :type: str - """ - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(LicenseAgreementNode, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, LicenseAgreementNode): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/list_v2.py b/edu_sharing_client/models/list_v2.py deleted file mode 100644 index 647b91f6..00000000 --- a/edu_sharing_client/models/list_v2.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ListV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'columns': 'list[ColumnV2]' - } - - attribute_map = { - 'id': 'id', - 'columns': 'columns' - } - - def __init__(self, id=None, columns=None): # noqa: E501 - """ListV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._columns = None - self.discriminator = None - if id is not None: - self.id = id - if columns is not None: - self.columns = columns - - @property - def id(self): - """Gets the id of this ListV2. # noqa: E501 - - - :return: The id of this ListV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ListV2. - - - :param id: The id of this ListV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def columns(self): - """Gets the columns of this ListV2. # noqa: E501 - - - :return: The columns of this ListV2. # noqa: E501 - :rtype: list[ColumnV2] - """ - return self._columns - - @columns.setter - def columns(self, columns): - """Sets the columns of this ListV2. - - - :param columns: The columns of this ListV2. # noqa: E501 - :type: list[ColumnV2] - """ - - self._columns = columns - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ListV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ListV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/location.py b/edu_sharing_client/models/location.py deleted file mode 100644 index 4fefd223..00000000 --- a/edu_sharing_client/models/location.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Location(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'geo': 'Geo' - } - - attribute_map = { - 'geo': 'geo' - } - - def __init__(self, geo=None): # noqa: E501 - """Location - a model defined in Swagger""" # noqa: E501 - self._geo = None - self.discriminator = None - if geo is not None: - self.geo = geo - - @property - def geo(self): - """Gets the geo of this Location. # noqa: E501 - - - :return: The geo of this Location. # noqa: E501 - :rtype: Geo - """ - return self._geo - - @geo.setter - def geo(self, geo): - """Sets the geo of this Location. - - - :param geo: The geo of this Location. # noqa: E501 - :type: Geo - """ - - self._geo = geo - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Location, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Location): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/log_entry.py b/edu_sharing_client/models/log_entry.py deleted file mode 100644 index 9cd0b75b..00000000 --- a/edu_sharing_client/models/log_entry.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class LogEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'class_name': 'str', - 'level': 'Level', - '_date': 'int', - 'message': 'str' - } - - attribute_map = { - 'class_name': 'className', - 'level': 'level', - '_date': 'date', - 'message': 'message' - } - - def __init__(self, class_name=None, level=None, _date=None, message=None): # noqa: E501 - """LogEntry - a model defined in Swagger""" # noqa: E501 - self._class_name = None - self._level = None - self.__date = None - self._message = None - self.discriminator = None - if class_name is not None: - self.class_name = class_name - if level is not None: - self.level = level - if _date is not None: - self._date = _date - if message is not None: - self.message = message - - @property - def class_name(self): - """Gets the class_name of this LogEntry. # noqa: E501 - - - :return: The class_name of this LogEntry. # noqa: E501 - :rtype: str - """ - return self._class_name - - @class_name.setter - def class_name(self, class_name): - """Sets the class_name of this LogEntry. - - - :param class_name: The class_name of this LogEntry. # noqa: E501 - :type: str - """ - - self._class_name = class_name - - @property - def level(self): - """Gets the level of this LogEntry. # noqa: E501 - - - :return: The level of this LogEntry. # noqa: E501 - :rtype: Level - """ - return self._level - - @level.setter - def level(self, level): - """Sets the level of this LogEntry. - - - :param level: The level of this LogEntry. # noqa: E501 - :type: Level - """ - - self._level = level - - @property - def _date(self): - """Gets the _date of this LogEntry. # noqa: E501 - - - :return: The _date of this LogEntry. # noqa: E501 - :rtype: int - """ - return self.__date - - @_date.setter - def _date(self, _date): - """Sets the _date of this LogEntry. - - - :param _date: The _date of this LogEntry. # noqa: E501 - :type: int - """ - - self.__date = _date - - @property - def message(self): - """Gets the message of this LogEntry. # noqa: E501 - - - :return: The message of this LogEntry. # noqa: E501 - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this LogEntry. - - - :param message: The message of this LogEntry. # noqa: E501 - :type: str - """ - - self._message = message - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(LogEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, LogEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/login.py b/edu_sharing_client/models/login.py deleted file mode 100644 index 2bd0f32b..00000000 --- a/edu_sharing_client/models/login.py +++ /dev/null @@ -1,350 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Login(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'remote_authentications': 'dict(str, RemoteAuthDescription)', - 'is_valid_login': 'bool', - 'is_admin': 'bool', - 'current_scope': 'str', - 'user_home': 'str', - 'session_timeout': 'int', - 'tool_permissions': 'list[str]', - 'status_code': 'str', - 'authority_name': 'str', - 'is_guest': 'bool' - } - - attribute_map = { - 'remote_authentications': 'remoteAuthentications', - 'is_valid_login': 'isValidLogin', - 'is_admin': 'isAdmin', - 'current_scope': 'currentScope', - 'user_home': 'userHome', - 'session_timeout': 'sessionTimeout', - 'tool_permissions': 'toolPermissions', - 'status_code': 'statusCode', - 'authority_name': 'authorityName', - 'is_guest': 'isGuest' - } - - def __init__(self, remote_authentications=None, is_valid_login=False, is_admin=False, current_scope=None, user_home=None, session_timeout=None, tool_permissions=None, status_code=None, authority_name=None, is_guest=False): # noqa: E501 - """Login - a model defined in Swagger""" # noqa: E501 - self._remote_authentications = None - self._is_valid_login = None - self._is_admin = None - self._current_scope = None - self._user_home = None - self._session_timeout = None - self._tool_permissions = None - self._status_code = None - self._authority_name = None - self._is_guest = None - self.discriminator = None - if remote_authentications is not None: - self.remote_authentications = remote_authentications - self.is_valid_login = is_valid_login - self.is_admin = is_admin - self.current_scope = current_scope - if user_home is not None: - self.user_home = user_home - self.session_timeout = session_timeout - if tool_permissions is not None: - self.tool_permissions = tool_permissions - if status_code is not None: - self.status_code = status_code - if authority_name is not None: - self.authority_name = authority_name - self.is_guest = is_guest - - @property - def remote_authentications(self): - """Gets the remote_authentications of this Login. # noqa: E501 - - - :return: The remote_authentications of this Login. # noqa: E501 - :rtype: dict(str, RemoteAuthDescription) - """ - return self._remote_authentications - - @remote_authentications.setter - def remote_authentications(self, remote_authentications): - """Sets the remote_authentications of this Login. - - - :param remote_authentications: The remote_authentications of this Login. # noqa: E501 - :type: dict(str, RemoteAuthDescription) - """ - - self._remote_authentications = remote_authentications - - @property - def is_valid_login(self): - """Gets the is_valid_login of this Login. # noqa: E501 - - - :return: The is_valid_login of this Login. # noqa: E501 - :rtype: bool - """ - return self._is_valid_login - - @is_valid_login.setter - def is_valid_login(self, is_valid_login): - """Sets the is_valid_login of this Login. - - - :param is_valid_login: The is_valid_login of this Login. # noqa: E501 - :type: bool - """ - if is_valid_login is None: - raise ValueError("Invalid value for `is_valid_login`, must not be `None`") # noqa: E501 - - self._is_valid_login = is_valid_login - - @property - def is_admin(self): - """Gets the is_admin of this Login. # noqa: E501 - - - :return: The is_admin of this Login. # noqa: E501 - :rtype: bool - """ - return self._is_admin - - @is_admin.setter - def is_admin(self, is_admin): - """Sets the is_admin of this Login. - - - :param is_admin: The is_admin of this Login. # noqa: E501 - :type: bool - """ - if is_admin is None: - raise ValueError("Invalid value for `is_admin`, must not be `None`") # noqa: E501 - - self._is_admin = is_admin - - @property - def current_scope(self): - """Gets the current_scope of this Login. # noqa: E501 - - - :return: The current_scope of this Login. # noqa: E501 - :rtype: str - """ - return self._current_scope - - @current_scope.setter - def current_scope(self, current_scope): - """Sets the current_scope of this Login. - - - :param current_scope: The current_scope of this Login. # noqa: E501 - :type: str - """ - if current_scope is None: - raise ValueError("Invalid value for `current_scope`, must not be `None`") # noqa: E501 - - self._current_scope = current_scope - - @property - def user_home(self): - """Gets the user_home of this Login. # noqa: E501 - - - :return: The user_home of this Login. # noqa: E501 - :rtype: str - """ - return self._user_home - - @user_home.setter - def user_home(self, user_home): - """Sets the user_home of this Login. - - - :param user_home: The user_home of this Login. # noqa: E501 - :type: str - """ - - self._user_home = user_home - - @property - def session_timeout(self): - """Gets the session_timeout of this Login. # noqa: E501 - - - :return: The session_timeout of this Login. # noqa: E501 - :rtype: int - """ - return self._session_timeout - - @session_timeout.setter - def session_timeout(self, session_timeout): - """Sets the session_timeout of this Login. - - - :param session_timeout: The session_timeout of this Login. # noqa: E501 - :type: int - """ - if session_timeout is None: - raise ValueError("Invalid value for `session_timeout`, must not be `None`") # noqa: E501 - - self._session_timeout = session_timeout - - @property - def tool_permissions(self): - """Gets the tool_permissions of this Login. # noqa: E501 - - - :return: The tool_permissions of this Login. # noqa: E501 - :rtype: list[str] - """ - return self._tool_permissions - - @tool_permissions.setter - def tool_permissions(self, tool_permissions): - """Sets the tool_permissions of this Login. - - - :param tool_permissions: The tool_permissions of this Login. # noqa: E501 - :type: list[str] - """ - - self._tool_permissions = tool_permissions - - @property - def status_code(self): - """Gets the status_code of this Login. # noqa: E501 - - - :return: The status_code of this Login. # noqa: E501 - :rtype: str - """ - return self._status_code - - @status_code.setter - def status_code(self, status_code): - """Sets the status_code of this Login. - - - :param status_code: The status_code of this Login. # noqa: E501 - :type: str - """ - - self._status_code = status_code - - @property - def authority_name(self): - """Gets the authority_name of this Login. # noqa: E501 - - - :return: The authority_name of this Login. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this Login. - - - :param authority_name: The authority_name of this Login. # noqa: E501 - :type: str - """ - - self._authority_name = authority_name - - @property - def is_guest(self): - """Gets the is_guest of this Login. # noqa: E501 - - - :return: The is_guest of this Login. # noqa: E501 - :rtype: bool - """ - return self._is_guest - - @is_guest.setter - def is_guest(self, is_guest): - """Sets the is_guest of this Login. - - - :param is_guest: The is_guest of this Login. # noqa: E501 - :type: bool - """ - if is_guest is None: - raise ValueError("Invalid value for `is_guest`, must not be `None`") # noqa: E501 - - self._is_guest = is_guest - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Login, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Login): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/login_credentials.py b/edu_sharing_client/models/login_credentials.py deleted file mode 100644 index c16b889e..00000000 --- a/edu_sharing_client/models/login_credentials.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class LoginCredentials(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'user_name': 'str', - 'password': 'str', - 'scope': 'str' - } - - attribute_map = { - 'user_name': 'userName', - 'password': 'password', - 'scope': 'scope' - } - - def __init__(self, user_name=None, password=None, scope=None): # noqa: E501 - """LoginCredentials - a model defined in Swagger""" # noqa: E501 - self._user_name = None - self._password = None - self._scope = None - self.discriminator = None - self.user_name = user_name - self.password = password - self.scope = scope - - @property - def user_name(self): - """Gets the user_name of this LoginCredentials. # noqa: E501 - - - :return: The user_name of this LoginCredentials. # noqa: E501 - :rtype: str - """ - return self._user_name - - @user_name.setter - def user_name(self, user_name): - """Sets the user_name of this LoginCredentials. - - - :param user_name: The user_name of this LoginCredentials. # noqa: E501 - :type: str - """ - if user_name is None: - raise ValueError("Invalid value for `user_name`, must not be `None`") # noqa: E501 - - self._user_name = user_name - - @property - def password(self): - """Gets the password of this LoginCredentials. # noqa: E501 - - - :return: The password of this LoginCredentials. # noqa: E501 - :rtype: str - """ - return self._password - - @password.setter - def password(self, password): - """Sets the password of this LoginCredentials. - - - :param password: The password of this LoginCredentials. # noqa: E501 - :type: str - """ - if password is None: - raise ValueError("Invalid value for `password`, must not be `None`") # noqa: E501 - - self._password = password - - @property - def scope(self): - """Gets the scope of this LoginCredentials. # noqa: E501 - - - :return: The scope of this LoginCredentials. # noqa: E501 - :rtype: str - """ - return self._scope - - @scope.setter - def scope(self, scope): - """Sets the scope of this LoginCredentials. - - - :param scope: The scope of this LoginCredentials. # noqa: E501 - :type: str - """ - if scope is None: - raise ValueError("Invalid value for `scope`, must not be `None`") # noqa: E501 - - self._scope = scope - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(LoginCredentials, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, LoginCredentials): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/logout_info.py b/edu_sharing_client/models/logout_info.py deleted file mode 100644 index 28f97e3c..00000000 --- a/edu_sharing_client/models/logout_info.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class LogoutInfo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str', - 'destroy_session': 'bool', - 'ajax': 'bool', - 'next': 'str' - } - - attribute_map = { - 'url': 'url', - 'destroy_session': 'destroySession', - 'ajax': 'ajax', - 'next': 'next' - } - - def __init__(self, url=None, destroy_session=False, ajax=False, next=None): # noqa: E501 - """LogoutInfo - a model defined in Swagger""" # noqa: E501 - self._url = None - self._destroy_session = None - self._ajax = None - self._next = None - self.discriminator = None - if url is not None: - self.url = url - if destroy_session is not None: - self.destroy_session = destroy_session - if ajax is not None: - self.ajax = ajax - if next is not None: - self.next = next - - @property - def url(self): - """Gets the url of this LogoutInfo. # noqa: E501 - - - :return: The url of this LogoutInfo. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this LogoutInfo. - - - :param url: The url of this LogoutInfo. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def destroy_session(self): - """Gets the destroy_session of this LogoutInfo. # noqa: E501 - - - :return: The destroy_session of this LogoutInfo. # noqa: E501 - :rtype: bool - """ - return self._destroy_session - - @destroy_session.setter - def destroy_session(self, destroy_session): - """Sets the destroy_session of this LogoutInfo. - - - :param destroy_session: The destroy_session of this LogoutInfo. # noqa: E501 - :type: bool - """ - - self._destroy_session = destroy_session - - @property - def ajax(self): - """Gets the ajax of this LogoutInfo. # noqa: E501 - - - :return: The ajax of this LogoutInfo. # noqa: E501 - :rtype: bool - """ - return self._ajax - - @ajax.setter - def ajax(self, ajax): - """Sets the ajax of this LogoutInfo. - - - :param ajax: The ajax of this LogoutInfo. # noqa: E501 - :type: bool - """ - - self._ajax = ajax - - @property - def next(self): - """Gets the next of this LogoutInfo. # noqa: E501 - - - :return: The next of this LogoutInfo. # noqa: E501 - :rtype: str - """ - return self._next - - @next.setter - def next(self, next): - """Sets the next of this LogoutInfo. - - - :param next: The next of this LogoutInfo. # noqa: E501 - :type: str - """ - - self._next = next - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(LogoutInfo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, LogoutInfo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mainnav.py b/edu_sharing_client/models/mainnav.py deleted file mode 100644 index 58a4c9c3..00000000 --- a/edu_sharing_client/models/mainnav.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Mainnav(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'icon': 'Icon', - 'main_menu_style': 'str' - } - - attribute_map = { - 'icon': 'icon', - 'main_menu_style': 'mainMenuStyle' - } - - def __init__(self, icon=None, main_menu_style=None): # noqa: E501 - """Mainnav - a model defined in Swagger""" # noqa: E501 - self._icon = None - self._main_menu_style = None - self.discriminator = None - if icon is not None: - self.icon = icon - if main_menu_style is not None: - self.main_menu_style = main_menu_style - - @property - def icon(self): - """Gets the icon of this Mainnav. # noqa: E501 - - - :return: The icon of this Mainnav. # noqa: E501 - :rtype: Icon - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this Mainnav. - - - :param icon: The icon of this Mainnav. # noqa: E501 - :type: Icon - """ - - self._icon = icon - - @property - def main_menu_style(self): - """Gets the main_menu_style of this Mainnav. # noqa: E501 - - - :return: The main_menu_style of this Mainnav. # noqa: E501 - :rtype: str - """ - return self._main_menu_style - - @main_menu_style.setter - def main_menu_style(self, main_menu_style): - """Sets the main_menu_style of this Mainnav. - - - :param main_menu_style: The main_menu_style of this Mainnav. # noqa: E501 - :type: str - """ - - self._main_menu_style = main_menu_style - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Mainnav, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Mainnav): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mc_org_connect_result.py b/edu_sharing_client/models/mc_org_connect_result.py deleted file mode 100644 index 7372b80a..00000000 --- a/edu_sharing_client/models/mc_org_connect_result.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class McOrgConnectResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'rows': 'int' - } - - attribute_map = { - 'rows': 'rows' - } - - def __init__(self, rows=None): # noqa: E501 - """McOrgConnectResult - a model defined in Swagger""" # noqa: E501 - self._rows = None - self.discriminator = None - if rows is not None: - self.rows = rows - - @property - def rows(self): - """Gets the rows of this McOrgConnectResult. # noqa: E501 - - - :return: The rows of this McOrgConnectResult. # noqa: E501 - :rtype: int - """ - return self._rows - - @rows.setter - def rows(self, rows): - """Sets the rows of this McOrgConnectResult. - - - :param rows: The rows of this McOrgConnectResult. # noqa: E501 - :type: int - """ - - self._rows = rows - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(McOrgConnectResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, McOrgConnectResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds.py b/edu_sharing_client/models/mds.py deleted file mode 100644 index 45ea8968..00000000 --- a/edu_sharing_client/models/mds.py +++ /dev/null @@ -1,247 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Mds(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'types': 'list[MdsType]', - 'ref': 'MdsRef', - 'forms': 'list[MdsForm]', - 'lists': 'list[MdsList]', - 'views': 'list[MdsView]', - 'queries': 'MdsQueries' - } - - attribute_map = { - 'types': 'types', - 'ref': 'ref', - 'forms': 'forms', - 'lists': 'lists', - 'views': 'views', - 'queries': 'queries' - } - - def __init__(self, types=None, ref=None, forms=None, lists=None, views=None, queries=None): # noqa: E501 - """Mds - a model defined in Swagger""" # noqa: E501 - self._types = None - self._ref = None - self._forms = None - self._lists = None - self._views = None - self._queries = None - self.discriminator = None - self.types = types - self.ref = ref - self.forms = forms - self.lists = lists - self.views = views - self.queries = queries - - @property - def types(self): - """Gets the types of this Mds. # noqa: E501 - - - :return: The types of this Mds. # noqa: E501 - :rtype: list[MdsType] - """ - return self._types - - @types.setter - def types(self, types): - """Sets the types of this Mds. - - - :param types: The types of this Mds. # noqa: E501 - :type: list[MdsType] - """ - if types is None: - raise ValueError("Invalid value for `types`, must not be `None`") # noqa: E501 - - self._types = types - - @property - def ref(self): - """Gets the ref of this Mds. # noqa: E501 - - - :return: The ref of this Mds. # noqa: E501 - :rtype: MdsRef - """ - return self._ref - - @ref.setter - def ref(self, ref): - """Sets the ref of this Mds. - - - :param ref: The ref of this Mds. # noqa: E501 - :type: MdsRef - """ - if ref is None: - raise ValueError("Invalid value for `ref`, must not be `None`") # noqa: E501 - - self._ref = ref - - @property - def forms(self): - """Gets the forms of this Mds. # noqa: E501 - - - :return: The forms of this Mds. # noqa: E501 - :rtype: list[MdsForm] - """ - return self._forms - - @forms.setter - def forms(self, forms): - """Sets the forms of this Mds. - - - :param forms: The forms of this Mds. # noqa: E501 - :type: list[MdsForm] - """ - if forms is None: - raise ValueError("Invalid value for `forms`, must not be `None`") # noqa: E501 - - self._forms = forms - - @property - def lists(self): - """Gets the lists of this Mds. # noqa: E501 - - - :return: The lists of this Mds. # noqa: E501 - :rtype: list[MdsList] - """ - return self._lists - - @lists.setter - def lists(self, lists): - """Sets the lists of this Mds. - - - :param lists: The lists of this Mds. # noqa: E501 - :type: list[MdsList] - """ - if lists is None: - raise ValueError("Invalid value for `lists`, must not be `None`") # noqa: E501 - - self._lists = lists - - @property - def views(self): - """Gets the views of this Mds. # noqa: E501 - - - :return: The views of this Mds. # noqa: E501 - :rtype: list[MdsView] - """ - return self._views - - @views.setter - def views(self, views): - """Sets the views of this Mds. - - - :param views: The views of this Mds. # noqa: E501 - :type: list[MdsView] - """ - if views is None: - raise ValueError("Invalid value for `views`, must not be `None`") # noqa: E501 - - self._views = views - - @property - def queries(self): - """Gets the queries of this Mds. # noqa: E501 - - - :return: The queries of this Mds. # noqa: E501 - :rtype: MdsQueries - """ - return self._queries - - @queries.setter - def queries(self, queries): - """Sets the queries of this Mds. - - - :param queries: The queries of this Mds. # noqa: E501 - :type: MdsQueries - """ - if queries is None: - raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 - - self._queries = queries - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Mds, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Mds): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_entries_v2.py b/edu_sharing_client/models/mds_entries_v2.py deleted file mode 100644 index f927b5d7..00000000 --- a/edu_sharing_client/models/mds_entries_v2.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsEntriesV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'metadatasets': 'list[MetadataSetInfo]' - } - - attribute_map = { - 'metadatasets': 'metadatasets' - } - - def __init__(self, metadatasets=None): # noqa: E501 - """MdsEntriesV2 - a model defined in Swagger""" # noqa: E501 - self._metadatasets = None - self.discriminator = None - if metadatasets is not None: - self.metadatasets = metadatasets - - @property - def metadatasets(self): - """Gets the metadatasets of this MdsEntriesV2. # noqa: E501 - - - :return: The metadatasets of this MdsEntriesV2. # noqa: E501 - :rtype: list[MetadataSetInfo] - """ - return self._metadatasets - - @metadatasets.setter - def metadatasets(self, metadatasets): - """Sets the metadatasets of this MdsEntriesV2. - - - :param metadatasets: The metadatasets of this MdsEntriesV2. # noqa: E501 - :type: list[MetadataSetInfo] - """ - - self._metadatasets = metadatasets - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsEntriesV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsEntriesV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_entry.py b/edu_sharing_client/models/mds_entry.py deleted file mode 100644 index a8d7489a..00000000 --- a/edu_sharing_client/models/mds_entry.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'mds': 'Mds' - } - - attribute_map = { - 'mds': 'mds' - } - - def __init__(self, mds=None): # noqa: E501 - """MdsEntry - a model defined in Swagger""" # noqa: E501 - self._mds = None - self.discriminator = None - self.mds = mds - - @property - def mds(self): - """Gets the mds of this MdsEntry. # noqa: E501 - - - :return: The mds of this MdsEntry. # noqa: E501 - :rtype: Mds - """ - return self._mds - - @mds.setter - def mds(self, mds): - """Sets the mds of this MdsEntry. - - - :param mds: The mds of this MdsEntry. # noqa: E501 - :type: Mds - """ - if mds is None: - raise ValueError("Invalid value for `mds`, must not be `None`") # noqa: E501 - - self._mds = mds - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_form.py b/edu_sharing_client/models/mds_form.py deleted file mode 100644 index 2a923894..00000000 --- a/edu_sharing_client/models/mds_form.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsForm(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'panels': 'list[MdsFormPanel]' - } - - attribute_map = { - 'id': 'id', - 'panels': 'panels' - } - - def __init__(self, id=None, panels=None): # noqa: E501 - """MdsForm - a model defined in Swagger""" # noqa: E501 - self._id = None - self._panels = None - self.discriminator = None - self.id = id - self.panels = panels - - @property - def id(self): - """Gets the id of this MdsForm. # noqa: E501 - - - :return: The id of this MdsForm. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this MdsForm. - - - :param id: The id of this MdsForm. # noqa: E501 - :type: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id - - @property - def panels(self): - """Gets the panels of this MdsForm. # noqa: E501 - - - :return: The panels of this MdsForm. # noqa: E501 - :rtype: list[MdsFormPanel] - """ - return self._panels - - @panels.setter - def panels(self, panels): - """Sets the panels of this MdsForm. - - - :param panels: The panels of this MdsForm. # noqa: E501 - :type: list[MdsFormPanel] - """ - if panels is None: - raise ValueError("Invalid value for `panels`, must not be `None`") # noqa: E501 - - self._panels = panels - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsForm, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsForm): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_form_panel.py b/edu_sharing_client/models/mds_form_panel.py deleted file mode 100644 index 35995edc..00000000 --- a/edu_sharing_client/models/mds_form_panel.py +++ /dev/null @@ -1,328 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsFormPanel(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'style_name': 'str', - 'label': 'str', - 'layout': 'str', - 'on_create': 'bool', - 'on_update': 'bool', - 'multi_upload': 'bool', - 'order': 'str', - 'properties': 'list[MdsFormProperty]' - } - - attribute_map = { - 'name': 'name', - 'style_name': 'styleName', - 'label': 'label', - 'layout': 'layout', - 'on_create': 'onCreate', - 'on_update': 'onUpdate', - 'multi_upload': 'multiUpload', - 'order': 'order', - 'properties': 'properties' - } - - def __init__(self, name=None, style_name=None, label=None, layout=None, on_create=False, on_update=False, multi_upload=False, order=None, properties=None): # noqa: E501 - """MdsFormPanel - a model defined in Swagger""" # noqa: E501 - self._name = None - self._style_name = None - self._label = None - self._layout = None - self._on_create = None - self._on_update = None - self._multi_upload = None - self._order = None - self._properties = None - self.discriminator = None - self.name = name - self.style_name = style_name - self.label = label - self.layout = layout - self.on_create = on_create - self.on_update = on_update - self.multi_upload = multi_upload - self.order = order - self.properties = properties - - @property - def name(self): - """Gets the name of this MdsFormPanel. # noqa: E501 - - - :return: The name of this MdsFormPanel. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsFormPanel. - - - :param name: The name of this MdsFormPanel. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def style_name(self): - """Gets the style_name of this MdsFormPanel. # noqa: E501 - - - :return: The style_name of this MdsFormPanel. # noqa: E501 - :rtype: str - """ - return self._style_name - - @style_name.setter - def style_name(self, style_name): - """Sets the style_name of this MdsFormPanel. - - - :param style_name: The style_name of this MdsFormPanel. # noqa: E501 - :type: str - """ - if style_name is None: - raise ValueError("Invalid value for `style_name`, must not be `None`") # noqa: E501 - - self._style_name = style_name - - @property - def label(self): - """Gets the label of this MdsFormPanel. # noqa: E501 - - - :return: The label of this MdsFormPanel. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsFormPanel. - - - :param label: The label of this MdsFormPanel. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def layout(self): - """Gets the layout of this MdsFormPanel. # noqa: E501 - - - :return: The layout of this MdsFormPanel. # noqa: E501 - :rtype: str - """ - return self._layout - - @layout.setter - def layout(self, layout): - """Sets the layout of this MdsFormPanel. - - - :param layout: The layout of this MdsFormPanel. # noqa: E501 - :type: str - """ - if layout is None: - raise ValueError("Invalid value for `layout`, must not be `None`") # noqa: E501 - - self._layout = layout - - @property - def on_create(self): - """Gets the on_create of this MdsFormPanel. # noqa: E501 - - - :return: The on_create of this MdsFormPanel. # noqa: E501 - :rtype: bool - """ - return self._on_create - - @on_create.setter - def on_create(self, on_create): - """Sets the on_create of this MdsFormPanel. - - - :param on_create: The on_create of this MdsFormPanel. # noqa: E501 - :type: bool - """ - if on_create is None: - raise ValueError("Invalid value for `on_create`, must not be `None`") # noqa: E501 - - self._on_create = on_create - - @property - def on_update(self): - """Gets the on_update of this MdsFormPanel. # noqa: E501 - - - :return: The on_update of this MdsFormPanel. # noqa: E501 - :rtype: bool - """ - return self._on_update - - @on_update.setter - def on_update(self, on_update): - """Sets the on_update of this MdsFormPanel. - - - :param on_update: The on_update of this MdsFormPanel. # noqa: E501 - :type: bool - """ - if on_update is None: - raise ValueError("Invalid value for `on_update`, must not be `None`") # noqa: E501 - - self._on_update = on_update - - @property - def multi_upload(self): - """Gets the multi_upload of this MdsFormPanel. # noqa: E501 - - - :return: The multi_upload of this MdsFormPanel. # noqa: E501 - :rtype: bool - """ - return self._multi_upload - - @multi_upload.setter - def multi_upload(self, multi_upload): - """Sets the multi_upload of this MdsFormPanel. - - - :param multi_upload: The multi_upload of this MdsFormPanel. # noqa: E501 - :type: bool - """ - if multi_upload is None: - raise ValueError("Invalid value for `multi_upload`, must not be `None`") # noqa: E501 - - self._multi_upload = multi_upload - - @property - def order(self): - """Gets the order of this MdsFormPanel. # noqa: E501 - - - :return: The order of this MdsFormPanel. # noqa: E501 - :rtype: str - """ - return self._order - - @order.setter - def order(self, order): - """Sets the order of this MdsFormPanel. - - - :param order: The order of this MdsFormPanel. # noqa: E501 - :type: str - """ - if order is None: - raise ValueError("Invalid value for `order`, must not be `None`") # noqa: E501 - - self._order = order - - @property - def properties(self): - """Gets the properties of this MdsFormPanel. # noqa: E501 - - - :return: The properties of this MdsFormPanel. # noqa: E501 - :rtype: list[MdsFormProperty] - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this MdsFormPanel. - - - :param properties: The properties of this MdsFormPanel. # noqa: E501 - :type: list[MdsFormProperty] - """ - if properties is None: - raise ValueError("Invalid value for `properties`, must not be `None`") # noqa: E501 - - self._properties = properties - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsFormPanel, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsFormPanel): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_form_property.py b/edu_sharing_client/models/mds_form_property.py deleted file mode 100644 index 0643ce1b..00000000 --- a/edu_sharing_client/models/mds_form_property.py +++ /dev/null @@ -1,544 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsFormProperty(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'label': 'str', - 'label_hint': 'str', - 'form_height': 'str', - 'form_length': 'str', - 'widget': 'str', - 'widget_title': 'str', - 'copy_from': 'list[str]', - 'validators': 'list[str]', - 'parameters': 'list[MdsFormPropertyParameter]', - 'values': 'list[MdsFormPropertyValue]', - 'default_values': 'list[str]', - 'multiple': 'bool', - 'place_holder': 'str', - 'style_name': 'str', - 'style_name_label': 'str', - 'type': 'str' - } - - attribute_map = { - 'name': 'name', - 'label': 'label', - 'label_hint': 'labelHint', - 'form_height': 'formHeight', - 'form_length': 'formLength', - 'widget': 'widget', - 'widget_title': 'widgetTitle', - 'copy_from': 'copyFrom', - 'validators': 'validators', - 'parameters': 'parameters', - 'values': 'values', - 'default_values': 'defaultValues', - 'multiple': 'multiple', - 'place_holder': 'placeHolder', - 'style_name': 'styleName', - 'style_name_label': 'styleNameLabel', - 'type': 'type' - } - - def __init__(self, name=None, label=None, label_hint=None, form_height=None, form_length=None, widget=None, widget_title=None, copy_from=None, validators=None, parameters=None, values=None, default_values=None, multiple=False, place_holder=None, style_name=None, style_name_label=None, type=None): # noqa: E501 - """MdsFormProperty - a model defined in Swagger""" # noqa: E501 - self._name = None - self._label = None - self._label_hint = None - self._form_height = None - self._form_length = None - self._widget = None - self._widget_title = None - self._copy_from = None - self._validators = None - self._parameters = None - self._values = None - self._default_values = None - self._multiple = None - self._place_holder = None - self._style_name = None - self._style_name_label = None - self._type = None - self.discriminator = None - self.name = name - self.label = label - self.label_hint = label_hint - self.form_height = form_height - self.form_length = form_length - self.widget = widget - self.widget_title = widget_title - self.copy_from = copy_from - self.validators = validators - self.parameters = parameters - self.values = values - self.default_values = default_values - self.multiple = multiple - self.place_holder = place_holder - self.style_name = style_name - self.style_name_label = style_name_label - self.type = type - - @property - def name(self): - """Gets the name of this MdsFormProperty. # noqa: E501 - - - :return: The name of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsFormProperty. - - - :param name: The name of this MdsFormProperty. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def label(self): - """Gets the label of this MdsFormProperty. # noqa: E501 - - - :return: The label of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsFormProperty. - - - :param label: The label of this MdsFormProperty. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def label_hint(self): - """Gets the label_hint of this MdsFormProperty. # noqa: E501 - - - :return: The label_hint of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._label_hint - - @label_hint.setter - def label_hint(self, label_hint): - """Sets the label_hint of this MdsFormProperty. - - - :param label_hint: The label_hint of this MdsFormProperty. # noqa: E501 - :type: str - """ - if label_hint is None: - raise ValueError("Invalid value for `label_hint`, must not be `None`") # noqa: E501 - - self._label_hint = label_hint - - @property - def form_height(self): - """Gets the form_height of this MdsFormProperty. # noqa: E501 - - - :return: The form_height of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._form_height - - @form_height.setter - def form_height(self, form_height): - """Sets the form_height of this MdsFormProperty. - - - :param form_height: The form_height of this MdsFormProperty. # noqa: E501 - :type: str - """ - if form_height is None: - raise ValueError("Invalid value for `form_height`, must not be `None`") # noqa: E501 - - self._form_height = form_height - - @property - def form_length(self): - """Gets the form_length of this MdsFormProperty. # noqa: E501 - - - :return: The form_length of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._form_length - - @form_length.setter - def form_length(self, form_length): - """Sets the form_length of this MdsFormProperty. - - - :param form_length: The form_length of this MdsFormProperty. # noqa: E501 - :type: str - """ - if form_length is None: - raise ValueError("Invalid value for `form_length`, must not be `None`") # noqa: E501 - - self._form_length = form_length - - @property - def widget(self): - """Gets the widget of this MdsFormProperty. # noqa: E501 - - - :return: The widget of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._widget - - @widget.setter - def widget(self, widget): - """Sets the widget of this MdsFormProperty. - - - :param widget: The widget of this MdsFormProperty. # noqa: E501 - :type: str - """ - if widget is None: - raise ValueError("Invalid value for `widget`, must not be `None`") # noqa: E501 - - self._widget = widget - - @property - def widget_title(self): - """Gets the widget_title of this MdsFormProperty. # noqa: E501 - - - :return: The widget_title of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._widget_title - - @widget_title.setter - def widget_title(self, widget_title): - """Sets the widget_title of this MdsFormProperty. - - - :param widget_title: The widget_title of this MdsFormProperty. # noqa: E501 - :type: str - """ - if widget_title is None: - raise ValueError("Invalid value for `widget_title`, must not be `None`") # noqa: E501 - - self._widget_title = widget_title - - @property - def copy_from(self): - """Gets the copy_from of this MdsFormProperty. # noqa: E501 - - - :return: The copy_from of this MdsFormProperty. # noqa: E501 - :rtype: list[str] - """ - return self._copy_from - - @copy_from.setter - def copy_from(self, copy_from): - """Sets the copy_from of this MdsFormProperty. - - - :param copy_from: The copy_from of this MdsFormProperty. # noqa: E501 - :type: list[str] - """ - if copy_from is None: - raise ValueError("Invalid value for `copy_from`, must not be `None`") # noqa: E501 - - self._copy_from = copy_from - - @property - def validators(self): - """Gets the validators of this MdsFormProperty. # noqa: E501 - - - :return: The validators of this MdsFormProperty. # noqa: E501 - :rtype: list[str] - """ - return self._validators - - @validators.setter - def validators(self, validators): - """Sets the validators of this MdsFormProperty. - - - :param validators: The validators of this MdsFormProperty. # noqa: E501 - :type: list[str] - """ - if validators is None: - raise ValueError("Invalid value for `validators`, must not be `None`") # noqa: E501 - - self._validators = validators - - @property - def parameters(self): - """Gets the parameters of this MdsFormProperty. # noqa: E501 - - - :return: The parameters of this MdsFormProperty. # noqa: E501 - :rtype: list[MdsFormPropertyParameter] - """ - return self._parameters - - @parameters.setter - def parameters(self, parameters): - """Sets the parameters of this MdsFormProperty. - - - :param parameters: The parameters of this MdsFormProperty. # noqa: E501 - :type: list[MdsFormPropertyParameter] - """ - if parameters is None: - raise ValueError("Invalid value for `parameters`, must not be `None`") # noqa: E501 - - self._parameters = parameters - - @property - def values(self): - """Gets the values of this MdsFormProperty. # noqa: E501 - - - :return: The values of this MdsFormProperty. # noqa: E501 - :rtype: list[MdsFormPropertyValue] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this MdsFormProperty. - - - :param values: The values of this MdsFormProperty. # noqa: E501 - :type: list[MdsFormPropertyValue] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - @property - def default_values(self): - """Gets the default_values of this MdsFormProperty. # noqa: E501 - - - :return: The default_values of this MdsFormProperty. # noqa: E501 - :rtype: list[str] - """ - return self._default_values - - @default_values.setter - def default_values(self, default_values): - """Sets the default_values of this MdsFormProperty. - - - :param default_values: The default_values of this MdsFormProperty. # noqa: E501 - :type: list[str] - """ - if default_values is None: - raise ValueError("Invalid value for `default_values`, must not be `None`") # noqa: E501 - - self._default_values = default_values - - @property - def multiple(self): - """Gets the multiple of this MdsFormProperty. # noqa: E501 - - - :return: The multiple of this MdsFormProperty. # noqa: E501 - :rtype: bool - """ - return self._multiple - - @multiple.setter - def multiple(self, multiple): - """Sets the multiple of this MdsFormProperty. - - - :param multiple: The multiple of this MdsFormProperty. # noqa: E501 - :type: bool - """ - if multiple is None: - raise ValueError("Invalid value for `multiple`, must not be `None`") # noqa: E501 - - self._multiple = multiple - - @property - def place_holder(self): - """Gets the place_holder of this MdsFormProperty. # noqa: E501 - - - :return: The place_holder of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._place_holder - - @place_holder.setter - def place_holder(self, place_holder): - """Sets the place_holder of this MdsFormProperty. - - - :param place_holder: The place_holder of this MdsFormProperty. # noqa: E501 - :type: str - """ - if place_holder is None: - raise ValueError("Invalid value for `place_holder`, must not be `None`") # noqa: E501 - - self._place_holder = place_holder - - @property - def style_name(self): - """Gets the style_name of this MdsFormProperty. # noqa: E501 - - - :return: The style_name of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._style_name - - @style_name.setter - def style_name(self, style_name): - """Sets the style_name of this MdsFormProperty. - - - :param style_name: The style_name of this MdsFormProperty. # noqa: E501 - :type: str - """ - if style_name is None: - raise ValueError("Invalid value for `style_name`, must not be `None`") # noqa: E501 - - self._style_name = style_name - - @property - def style_name_label(self): - """Gets the style_name_label of this MdsFormProperty. # noqa: E501 - - - :return: The style_name_label of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._style_name_label - - @style_name_label.setter - def style_name_label(self, style_name_label): - """Sets the style_name_label of this MdsFormProperty. - - - :param style_name_label: The style_name_label of this MdsFormProperty. # noqa: E501 - :type: str - """ - if style_name_label is None: - raise ValueError("Invalid value for `style_name_label`, must not be `None`") # noqa: E501 - - self._style_name_label = style_name_label - - @property - def type(self): - """Gets the type of this MdsFormProperty. # noqa: E501 - - - :return: The type of this MdsFormProperty. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this MdsFormProperty. - - - :param type: The type of this MdsFormProperty. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsFormProperty, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsFormProperty): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_form_property_parameter.py b/edu_sharing_client/models/mds_form_property_parameter.py deleted file mode 100644 index d44ba50c..00000000 --- a/edu_sharing_client/models/mds_form_property_parameter.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsFormPropertyParameter(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'value': 'str' - } - - attribute_map = { - 'name': 'name', - 'value': 'value' - } - - def __init__(self, name=None, value=None): # noqa: E501 - """MdsFormPropertyParameter - a model defined in Swagger""" # noqa: E501 - self._name = None - self._value = None - self.discriminator = None - self.name = name - self.value = value - - @property - def name(self): - """Gets the name of this MdsFormPropertyParameter. # noqa: E501 - - - :return: The name of this MdsFormPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsFormPropertyParameter. - - - :param name: The name of this MdsFormPropertyParameter. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def value(self): - """Gets the value of this MdsFormPropertyParameter. # noqa: E501 - - - :return: The value of this MdsFormPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsFormPropertyParameter. - - - :param value: The value of this MdsFormPropertyParameter. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsFormPropertyParameter, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsFormPropertyParameter): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_form_property_value.py b/edu_sharing_client/models/mds_form_property_value.py deleted file mode 100644 index 97381383..00000000 --- a/edu_sharing_client/models/mds_form_property_value.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsFormPropertyValue(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'value': 'str' - } - - attribute_map = { - 'key': 'key', - 'value': 'value' - } - - def __init__(self, key=None, value=None): # noqa: E501 - """MdsFormPropertyValue - a model defined in Swagger""" # noqa: E501 - self._key = None - self._value = None - self.discriminator = None - self.key = key - self.value = value - - @property - def key(self): - """Gets the key of this MdsFormPropertyValue. # noqa: E501 - - - :return: The key of this MdsFormPropertyValue. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this MdsFormPropertyValue. - - - :param key: The key of this MdsFormPropertyValue. # noqa: E501 - :type: str - """ - if key is None: - raise ValueError("Invalid value for `key`, must not be `None`") # noqa: E501 - - self._key = key - - @property - def value(self): - """Gets the value of this MdsFormPropertyValue. # noqa: E501 - - - :return: The value of this MdsFormPropertyValue. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsFormPropertyValue. - - - :param value: The value of this MdsFormPropertyValue. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsFormPropertyValue, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsFormPropertyValue): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_list.py b/edu_sharing_client/models/mds_list.py deleted file mode 100644 index 30ce32b3..00000000 --- a/edu_sharing_client/models/mds_list.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsList(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'label': 'str', - 'properties': 'list[MdsListProperty]' - } - - attribute_map = { - 'id': 'id', - 'label': 'label', - 'properties': 'properties' - } - - def __init__(self, id=None, label=None, properties=None): # noqa: E501 - """MdsList - a model defined in Swagger""" # noqa: E501 - self._id = None - self._label = None - self._properties = None - self.discriminator = None - self.id = id - self.label = label - self.properties = properties - - @property - def id(self): - """Gets the id of this MdsList. # noqa: E501 - - - :return: The id of this MdsList. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this MdsList. - - - :param id: The id of this MdsList. # noqa: E501 - :type: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id - - @property - def label(self): - """Gets the label of this MdsList. # noqa: E501 - - - :return: The label of this MdsList. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsList. - - - :param label: The label of this MdsList. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def properties(self): - """Gets the properties of this MdsList. # noqa: E501 - - - :return: The properties of this MdsList. # noqa: E501 - :rtype: list[MdsListProperty] - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this MdsList. - - - :param properties: The properties of this MdsList. # noqa: E501 - :type: list[MdsListProperty] - """ - if properties is None: - raise ValueError("Invalid value for `properties`, must not be `None`") # noqa: E501 - - self._properties = properties - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsList, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsList): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_list_property.py b/edu_sharing_client/models/mds_list_property.py deleted file mode 100644 index e2cdd56c..00000000 --- a/edu_sharing_client/models/mds_list_property.py +++ /dev/null @@ -1,517 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsListProperty(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'label': 'str', - 'label_hint': 'str', - 'form_height': 'str', - 'form_length': 'str', - 'widget': 'str', - 'widget_title': 'str', - 'copy_from': 'list[str]', - 'parameters': 'list[MdsListPropertyParameter]', - 'values': 'list[MdsListPropertyValue]', - 'default_values': 'list[str]', - 'multiple': 'bool', - 'place_holder': 'str', - 'style_name': 'str', - 'style_name_label': 'str', - 'type': 'str' - } - - attribute_map = { - 'name': 'name', - 'label': 'label', - 'label_hint': 'labelHint', - 'form_height': 'formHeight', - 'form_length': 'formLength', - 'widget': 'widget', - 'widget_title': 'widgetTitle', - 'copy_from': 'copyFrom', - 'parameters': 'parameters', - 'values': 'values', - 'default_values': 'defaultValues', - 'multiple': 'multiple', - 'place_holder': 'placeHolder', - 'style_name': 'styleName', - 'style_name_label': 'styleNameLabel', - 'type': 'type' - } - - def __init__(self, name=None, label=None, label_hint=None, form_height=None, form_length=None, widget=None, widget_title=None, copy_from=None, parameters=None, values=None, default_values=None, multiple=False, place_holder=None, style_name=None, style_name_label=None, type=None): # noqa: E501 - """MdsListProperty - a model defined in Swagger""" # noqa: E501 - self._name = None - self._label = None - self._label_hint = None - self._form_height = None - self._form_length = None - self._widget = None - self._widget_title = None - self._copy_from = None - self._parameters = None - self._values = None - self._default_values = None - self._multiple = None - self._place_holder = None - self._style_name = None - self._style_name_label = None - self._type = None - self.discriminator = None - self.name = name - self.label = label - self.label_hint = label_hint - self.form_height = form_height - self.form_length = form_length - self.widget = widget - self.widget_title = widget_title - self.copy_from = copy_from - self.parameters = parameters - self.values = values - self.default_values = default_values - self.multiple = multiple - self.place_holder = place_holder - self.style_name = style_name - self.style_name_label = style_name_label - self.type = type - - @property - def name(self): - """Gets the name of this MdsListProperty. # noqa: E501 - - - :return: The name of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsListProperty. - - - :param name: The name of this MdsListProperty. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def label(self): - """Gets the label of this MdsListProperty. # noqa: E501 - - - :return: The label of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsListProperty. - - - :param label: The label of this MdsListProperty. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def label_hint(self): - """Gets the label_hint of this MdsListProperty. # noqa: E501 - - - :return: The label_hint of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._label_hint - - @label_hint.setter - def label_hint(self, label_hint): - """Sets the label_hint of this MdsListProperty. - - - :param label_hint: The label_hint of this MdsListProperty. # noqa: E501 - :type: str - """ - if label_hint is None: - raise ValueError("Invalid value for `label_hint`, must not be `None`") # noqa: E501 - - self._label_hint = label_hint - - @property - def form_height(self): - """Gets the form_height of this MdsListProperty. # noqa: E501 - - - :return: The form_height of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._form_height - - @form_height.setter - def form_height(self, form_height): - """Sets the form_height of this MdsListProperty. - - - :param form_height: The form_height of this MdsListProperty. # noqa: E501 - :type: str - """ - if form_height is None: - raise ValueError("Invalid value for `form_height`, must not be `None`") # noqa: E501 - - self._form_height = form_height - - @property - def form_length(self): - """Gets the form_length of this MdsListProperty. # noqa: E501 - - - :return: The form_length of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._form_length - - @form_length.setter - def form_length(self, form_length): - """Sets the form_length of this MdsListProperty. - - - :param form_length: The form_length of this MdsListProperty. # noqa: E501 - :type: str - """ - if form_length is None: - raise ValueError("Invalid value for `form_length`, must not be `None`") # noqa: E501 - - self._form_length = form_length - - @property - def widget(self): - """Gets the widget of this MdsListProperty. # noqa: E501 - - - :return: The widget of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._widget - - @widget.setter - def widget(self, widget): - """Sets the widget of this MdsListProperty. - - - :param widget: The widget of this MdsListProperty. # noqa: E501 - :type: str - """ - if widget is None: - raise ValueError("Invalid value for `widget`, must not be `None`") # noqa: E501 - - self._widget = widget - - @property - def widget_title(self): - """Gets the widget_title of this MdsListProperty. # noqa: E501 - - - :return: The widget_title of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._widget_title - - @widget_title.setter - def widget_title(self, widget_title): - """Sets the widget_title of this MdsListProperty. - - - :param widget_title: The widget_title of this MdsListProperty. # noqa: E501 - :type: str - """ - if widget_title is None: - raise ValueError("Invalid value for `widget_title`, must not be `None`") # noqa: E501 - - self._widget_title = widget_title - - @property - def copy_from(self): - """Gets the copy_from of this MdsListProperty. # noqa: E501 - - - :return: The copy_from of this MdsListProperty. # noqa: E501 - :rtype: list[str] - """ - return self._copy_from - - @copy_from.setter - def copy_from(self, copy_from): - """Sets the copy_from of this MdsListProperty. - - - :param copy_from: The copy_from of this MdsListProperty. # noqa: E501 - :type: list[str] - """ - if copy_from is None: - raise ValueError("Invalid value for `copy_from`, must not be `None`") # noqa: E501 - - self._copy_from = copy_from - - @property - def parameters(self): - """Gets the parameters of this MdsListProperty. # noqa: E501 - - - :return: The parameters of this MdsListProperty. # noqa: E501 - :rtype: list[MdsListPropertyParameter] - """ - return self._parameters - - @parameters.setter - def parameters(self, parameters): - """Sets the parameters of this MdsListProperty. - - - :param parameters: The parameters of this MdsListProperty. # noqa: E501 - :type: list[MdsListPropertyParameter] - """ - if parameters is None: - raise ValueError("Invalid value for `parameters`, must not be `None`") # noqa: E501 - - self._parameters = parameters - - @property - def values(self): - """Gets the values of this MdsListProperty. # noqa: E501 - - - :return: The values of this MdsListProperty. # noqa: E501 - :rtype: list[MdsListPropertyValue] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this MdsListProperty. - - - :param values: The values of this MdsListProperty. # noqa: E501 - :type: list[MdsListPropertyValue] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - @property - def default_values(self): - """Gets the default_values of this MdsListProperty. # noqa: E501 - - - :return: The default_values of this MdsListProperty. # noqa: E501 - :rtype: list[str] - """ - return self._default_values - - @default_values.setter - def default_values(self, default_values): - """Sets the default_values of this MdsListProperty. - - - :param default_values: The default_values of this MdsListProperty. # noqa: E501 - :type: list[str] - """ - if default_values is None: - raise ValueError("Invalid value for `default_values`, must not be `None`") # noqa: E501 - - self._default_values = default_values - - @property - def multiple(self): - """Gets the multiple of this MdsListProperty. # noqa: E501 - - - :return: The multiple of this MdsListProperty. # noqa: E501 - :rtype: bool - """ - return self._multiple - - @multiple.setter - def multiple(self, multiple): - """Sets the multiple of this MdsListProperty. - - - :param multiple: The multiple of this MdsListProperty. # noqa: E501 - :type: bool - """ - if multiple is None: - raise ValueError("Invalid value for `multiple`, must not be `None`") # noqa: E501 - - self._multiple = multiple - - @property - def place_holder(self): - """Gets the place_holder of this MdsListProperty. # noqa: E501 - - - :return: The place_holder of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._place_holder - - @place_holder.setter - def place_holder(self, place_holder): - """Sets the place_holder of this MdsListProperty. - - - :param place_holder: The place_holder of this MdsListProperty. # noqa: E501 - :type: str - """ - if place_holder is None: - raise ValueError("Invalid value for `place_holder`, must not be `None`") # noqa: E501 - - self._place_holder = place_holder - - @property - def style_name(self): - """Gets the style_name of this MdsListProperty. # noqa: E501 - - - :return: The style_name of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._style_name - - @style_name.setter - def style_name(self, style_name): - """Sets the style_name of this MdsListProperty. - - - :param style_name: The style_name of this MdsListProperty. # noqa: E501 - :type: str - """ - if style_name is None: - raise ValueError("Invalid value for `style_name`, must not be `None`") # noqa: E501 - - self._style_name = style_name - - @property - def style_name_label(self): - """Gets the style_name_label of this MdsListProperty. # noqa: E501 - - - :return: The style_name_label of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._style_name_label - - @style_name_label.setter - def style_name_label(self, style_name_label): - """Sets the style_name_label of this MdsListProperty. - - - :param style_name_label: The style_name_label of this MdsListProperty. # noqa: E501 - :type: str - """ - if style_name_label is None: - raise ValueError("Invalid value for `style_name_label`, must not be `None`") # noqa: E501 - - self._style_name_label = style_name_label - - @property - def type(self): - """Gets the type of this MdsListProperty. # noqa: E501 - - - :return: The type of this MdsListProperty. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this MdsListProperty. - - - :param type: The type of this MdsListProperty. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsListProperty, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsListProperty): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_list_property_parameter.py b/edu_sharing_client/models/mds_list_property_parameter.py deleted file mode 100644 index c3c9a42a..00000000 --- a/edu_sharing_client/models/mds_list_property_parameter.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsListPropertyParameter(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'value': 'str' - } - - attribute_map = { - 'name': 'name', - 'value': 'value' - } - - def __init__(self, name=None, value=None): # noqa: E501 - """MdsListPropertyParameter - a model defined in Swagger""" # noqa: E501 - self._name = None - self._value = None - self.discriminator = None - self.name = name - self.value = value - - @property - def name(self): - """Gets the name of this MdsListPropertyParameter. # noqa: E501 - - - :return: The name of this MdsListPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsListPropertyParameter. - - - :param name: The name of this MdsListPropertyParameter. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def value(self): - """Gets the value of this MdsListPropertyParameter. # noqa: E501 - - - :return: The value of this MdsListPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsListPropertyParameter. - - - :param value: The value of this MdsListPropertyParameter. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsListPropertyParameter, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsListPropertyParameter): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_list_property_value.py b/edu_sharing_client/models/mds_list_property_value.py deleted file mode 100644 index 6bd5018c..00000000 --- a/edu_sharing_client/models/mds_list_property_value.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsListPropertyValue(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'value': 'str' - } - - attribute_map = { - 'key': 'key', - 'value': 'value' - } - - def __init__(self, key=None, value=None): # noqa: E501 - """MdsListPropertyValue - a model defined in Swagger""" # noqa: E501 - self._key = None - self._value = None - self.discriminator = None - self.key = key - self.value = value - - @property - def key(self): - """Gets the key of this MdsListPropertyValue. # noqa: E501 - - - :return: The key of this MdsListPropertyValue. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this MdsListPropertyValue. - - - :param key: The key of this MdsListPropertyValue. # noqa: E501 - :type: str - """ - if key is None: - raise ValueError("Invalid value for `key`, must not be `None`") # noqa: E501 - - self._key = key - - @property - def value(self): - """Gets the value of this MdsListPropertyValue. # noqa: E501 - - - :return: The value of this MdsListPropertyValue. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsListPropertyValue. - - - :param value: The value of this MdsListPropertyValue. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsListPropertyValue, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsListPropertyValue): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_property.py b/edu_sharing_client/models/mds_property.py deleted file mode 100644 index f135a7af..00000000 --- a/edu_sharing_client/models/mds_property.py +++ /dev/null @@ -1,301 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsProperty(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'type': 'str', - 'default_value': 'str', - 'processtype': 'str', - 'key_contenturl': 'str', - 'concatewithtype': 'bool', - 'multiple': 'bool', - 'copy_from': 'str' - } - - attribute_map = { - 'name': 'name', - 'type': 'type', - 'default_value': 'defaultValue', - 'processtype': 'processtype', - 'key_contenturl': 'keyContenturl', - 'concatewithtype': 'concatewithtype', - 'multiple': 'multiple', - 'copy_from': 'copyFrom' - } - - def __init__(self, name=None, type=None, default_value=None, processtype=None, key_contenturl=None, concatewithtype=False, multiple=False, copy_from=None): # noqa: E501 - """MdsProperty - a model defined in Swagger""" # noqa: E501 - self._name = None - self._type = None - self._default_value = None - self._processtype = None - self._key_contenturl = None - self._concatewithtype = None - self._multiple = None - self._copy_from = None - self.discriminator = None - self.name = name - self.type = type - self.default_value = default_value - self.processtype = processtype - self.key_contenturl = key_contenturl - self.concatewithtype = concatewithtype - self.multiple = multiple - self.copy_from = copy_from - - @property - def name(self): - """Gets the name of this MdsProperty. # noqa: E501 - - - :return: The name of this MdsProperty. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsProperty. - - - :param name: The name of this MdsProperty. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def type(self): - """Gets the type of this MdsProperty. # noqa: E501 - - - :return: The type of this MdsProperty. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this MdsProperty. - - - :param type: The type of this MdsProperty. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - @property - def default_value(self): - """Gets the default_value of this MdsProperty. # noqa: E501 - - - :return: The default_value of this MdsProperty. # noqa: E501 - :rtype: str - """ - return self._default_value - - @default_value.setter - def default_value(self, default_value): - """Sets the default_value of this MdsProperty. - - - :param default_value: The default_value of this MdsProperty. # noqa: E501 - :type: str - """ - if default_value is None: - raise ValueError("Invalid value for `default_value`, must not be `None`") # noqa: E501 - - self._default_value = default_value - - @property - def processtype(self): - """Gets the processtype of this MdsProperty. # noqa: E501 - - - :return: The processtype of this MdsProperty. # noqa: E501 - :rtype: str - """ - return self._processtype - - @processtype.setter - def processtype(self, processtype): - """Sets the processtype of this MdsProperty. - - - :param processtype: The processtype of this MdsProperty. # noqa: E501 - :type: str - """ - if processtype is None: - raise ValueError("Invalid value for `processtype`, must not be `None`") # noqa: E501 - - self._processtype = processtype - - @property - def key_contenturl(self): - """Gets the key_contenturl of this MdsProperty. # noqa: E501 - - - :return: The key_contenturl of this MdsProperty. # noqa: E501 - :rtype: str - """ - return self._key_contenturl - - @key_contenturl.setter - def key_contenturl(self, key_contenturl): - """Sets the key_contenturl of this MdsProperty. - - - :param key_contenturl: The key_contenturl of this MdsProperty. # noqa: E501 - :type: str - """ - if key_contenturl is None: - raise ValueError("Invalid value for `key_contenturl`, must not be `None`") # noqa: E501 - - self._key_contenturl = key_contenturl - - @property - def concatewithtype(self): - """Gets the concatewithtype of this MdsProperty. # noqa: E501 - - - :return: The concatewithtype of this MdsProperty. # noqa: E501 - :rtype: bool - """ - return self._concatewithtype - - @concatewithtype.setter - def concatewithtype(self, concatewithtype): - """Sets the concatewithtype of this MdsProperty. - - - :param concatewithtype: The concatewithtype of this MdsProperty. # noqa: E501 - :type: bool - """ - if concatewithtype is None: - raise ValueError("Invalid value for `concatewithtype`, must not be `None`") # noqa: E501 - - self._concatewithtype = concatewithtype - - @property - def multiple(self): - """Gets the multiple of this MdsProperty. # noqa: E501 - - - :return: The multiple of this MdsProperty. # noqa: E501 - :rtype: bool - """ - return self._multiple - - @multiple.setter - def multiple(self, multiple): - """Sets the multiple of this MdsProperty. - - - :param multiple: The multiple of this MdsProperty. # noqa: E501 - :type: bool - """ - if multiple is None: - raise ValueError("Invalid value for `multiple`, must not be `None`") # noqa: E501 - - self._multiple = multiple - - @property - def copy_from(self): - """Gets the copy_from of this MdsProperty. # noqa: E501 - - - :return: The copy_from of this MdsProperty. # noqa: E501 - :rtype: str - """ - return self._copy_from - - @copy_from.setter - def copy_from(self, copy_from): - """Sets the copy_from of this MdsProperty. - - - :param copy_from: The copy_from of this MdsProperty. # noqa: E501 - :type: str - """ - if copy_from is None: - raise ValueError("Invalid value for `copy_from`, must not be `None`") # noqa: E501 - - self._copy_from = copy_from - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsProperty, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsProperty): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_queries.py b/edu_sharing_client/models/mds_queries.py deleted file mode 100644 index 40f436c2..00000000 --- a/edu_sharing_client/models/mds_queries.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsQueries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'base_query': 'str', - 'queries': 'list[MdsQuery]' - } - - attribute_map = { - 'base_query': 'baseQuery', - 'queries': 'queries' - } - - def __init__(self, base_query=None, queries=None): # noqa: E501 - """MdsQueries - a model defined in Swagger""" # noqa: E501 - self._base_query = None - self._queries = None - self.discriminator = None - self.base_query = base_query - self.queries = queries - - @property - def base_query(self): - """Gets the base_query of this MdsQueries. # noqa: E501 - - - :return: The base_query of this MdsQueries. # noqa: E501 - :rtype: str - """ - return self._base_query - - @base_query.setter - def base_query(self, base_query): - """Sets the base_query of this MdsQueries. - - - :param base_query: The base_query of this MdsQueries. # noqa: E501 - :type: str - """ - if base_query is None: - raise ValueError("Invalid value for `base_query`, must not be `None`") # noqa: E501 - - self._base_query = base_query - - @property - def queries(self): - """Gets the queries of this MdsQueries. # noqa: E501 - - - :return: The queries of this MdsQueries. # noqa: E501 - :rtype: list[MdsQuery] - """ - return self._queries - - @queries.setter - def queries(self, queries): - """Sets the queries of this MdsQueries. - - - :param queries: The queries of this MdsQueries. # noqa: E501 - :type: list[MdsQuery] - """ - if queries is None: - raise ValueError("Invalid value for `queries`, must not be `None`") # noqa: E501 - - self._queries = queries - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsQueries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsQueries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_query.py b/edu_sharing_client/models/mds_query.py deleted file mode 100644 index 29d15511..00000000 --- a/edu_sharing_client/models/mds_query.py +++ /dev/null @@ -1,328 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsQuery(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'criteriaboxid': 'str', - 'handlerclass': 'str', - 'join': 'str', - 'label': 'str', - 'layout': 'str', - 'properties': 'list[MdsQueryProperty]', - 'statement': 'str', - 'stylename': 'str', - 'widget': 'str' - } - - attribute_map = { - 'criteriaboxid': 'criteriaboxid', - 'handlerclass': 'handlerclass', - 'join': 'join', - 'label': 'label', - 'layout': 'layout', - 'properties': 'properties', - 'statement': 'statement', - 'stylename': 'stylename', - 'widget': 'widget' - } - - def __init__(self, criteriaboxid=None, handlerclass=None, join=None, label=None, layout=None, properties=None, statement=None, stylename=None, widget=None): # noqa: E501 - """MdsQuery - a model defined in Swagger""" # noqa: E501 - self._criteriaboxid = None - self._handlerclass = None - self._join = None - self._label = None - self._layout = None - self._properties = None - self._statement = None - self._stylename = None - self._widget = None - self.discriminator = None - self.criteriaboxid = criteriaboxid - self.handlerclass = handlerclass - self.join = join - self.label = label - self.layout = layout - self.properties = properties - self.statement = statement - self.stylename = stylename - self.widget = widget - - @property - def criteriaboxid(self): - """Gets the criteriaboxid of this MdsQuery. # noqa: E501 - - - :return: The criteriaboxid of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._criteriaboxid - - @criteriaboxid.setter - def criteriaboxid(self, criteriaboxid): - """Sets the criteriaboxid of this MdsQuery. - - - :param criteriaboxid: The criteriaboxid of this MdsQuery. # noqa: E501 - :type: str - """ - if criteriaboxid is None: - raise ValueError("Invalid value for `criteriaboxid`, must not be `None`") # noqa: E501 - - self._criteriaboxid = criteriaboxid - - @property - def handlerclass(self): - """Gets the handlerclass of this MdsQuery. # noqa: E501 - - - :return: The handlerclass of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._handlerclass - - @handlerclass.setter - def handlerclass(self, handlerclass): - """Sets the handlerclass of this MdsQuery. - - - :param handlerclass: The handlerclass of this MdsQuery. # noqa: E501 - :type: str - """ - if handlerclass is None: - raise ValueError("Invalid value for `handlerclass`, must not be `None`") # noqa: E501 - - self._handlerclass = handlerclass - - @property - def join(self): - """Gets the join of this MdsQuery. # noqa: E501 - - - :return: The join of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._join - - @join.setter - def join(self, join): - """Sets the join of this MdsQuery. - - - :param join: The join of this MdsQuery. # noqa: E501 - :type: str - """ - if join is None: - raise ValueError("Invalid value for `join`, must not be `None`") # noqa: E501 - - self._join = join - - @property - def label(self): - """Gets the label of this MdsQuery. # noqa: E501 - - - :return: The label of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsQuery. - - - :param label: The label of this MdsQuery. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def layout(self): - """Gets the layout of this MdsQuery. # noqa: E501 - - - :return: The layout of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._layout - - @layout.setter - def layout(self, layout): - """Sets the layout of this MdsQuery. - - - :param layout: The layout of this MdsQuery. # noqa: E501 - :type: str - """ - if layout is None: - raise ValueError("Invalid value for `layout`, must not be `None`") # noqa: E501 - - self._layout = layout - - @property - def properties(self): - """Gets the properties of this MdsQuery. # noqa: E501 - - - :return: The properties of this MdsQuery. # noqa: E501 - :rtype: list[MdsQueryProperty] - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this MdsQuery. - - - :param properties: The properties of this MdsQuery. # noqa: E501 - :type: list[MdsQueryProperty] - """ - if properties is None: - raise ValueError("Invalid value for `properties`, must not be `None`") # noqa: E501 - - self._properties = properties - - @property - def statement(self): - """Gets the statement of this MdsQuery. # noqa: E501 - - - :return: The statement of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._statement - - @statement.setter - def statement(self, statement): - """Sets the statement of this MdsQuery. - - - :param statement: The statement of this MdsQuery. # noqa: E501 - :type: str - """ - if statement is None: - raise ValueError("Invalid value for `statement`, must not be `None`") # noqa: E501 - - self._statement = statement - - @property - def stylename(self): - """Gets the stylename of this MdsQuery. # noqa: E501 - - - :return: The stylename of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._stylename - - @stylename.setter - def stylename(self, stylename): - """Sets the stylename of this MdsQuery. - - - :param stylename: The stylename of this MdsQuery. # noqa: E501 - :type: str - """ - if stylename is None: - raise ValueError("Invalid value for `stylename`, must not be `None`") # noqa: E501 - - self._stylename = stylename - - @property - def widget(self): - """Gets the widget of this MdsQuery. # noqa: E501 - - - :return: The widget of this MdsQuery. # noqa: E501 - :rtype: str - """ - return self._widget - - @widget.setter - def widget(self, widget): - """Sets the widget of this MdsQuery. - - - :param widget: The widget of this MdsQuery. # noqa: E501 - :type: str - """ - if widget is None: - raise ValueError("Invalid value for `widget`, must not be `None`") # noqa: E501 - - self._widget = widget - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsQuery, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsQuery): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_query_criteria.py b/edu_sharing_client/models/mds_query_criteria.py deleted file mode 100644 index 83b856d1..00000000 --- a/edu_sharing_client/models/mds_query_criteria.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsQueryCriteria(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - '_property': 'str', - 'values': 'list[str]' - } - - attribute_map = { - '_property': 'property', - 'values': 'values' - } - - def __init__(self, _property=None, values=None): # noqa: E501 - """MdsQueryCriteria - a model defined in Swagger""" # noqa: E501 - self.__property = None - self._values = None - self.discriminator = None - self._property = _property - self.values = values - - @property - def _property(self): - """Gets the _property of this MdsQueryCriteria. # noqa: E501 - - - :return: The _property of this MdsQueryCriteria. # noqa: E501 - :rtype: str - """ - return self.__property - - @_property.setter - def _property(self, _property): - """Sets the _property of this MdsQueryCriteria. - - - :param _property: The _property of this MdsQueryCriteria. # noqa: E501 - :type: str - """ - if _property is None: - raise ValueError("Invalid value for `_property`, must not be `None`") # noqa: E501 - - self.__property = _property - - @property - def values(self): - """Gets the values of this MdsQueryCriteria. # noqa: E501 - - - :return: The values of this MdsQueryCriteria. # noqa: E501 - :rtype: list[str] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this MdsQueryCriteria. - - - :param values: The values of this MdsQueryCriteria. # noqa: E501 - :type: list[str] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsQueryCriteria, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsQueryCriteria): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_query_property.py b/edu_sharing_client/models/mds_query_property.py deleted file mode 100644 index 0ae24fa3..00000000 --- a/edu_sharing_client/models/mds_query_property.py +++ /dev/null @@ -1,652 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsQueryProperty(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'label': 'str', - 'label_hint': 'str', - 'form_height': 'str', - 'form_length': 'str', - 'widget': 'str', - 'widget_title': 'str', - 'copy_from': 'list[str]', - 'parameters': 'list[MdsQueryPropertyParameter]', - 'values': 'list[MdsQueryPropertyValue]', - 'default_values': 'list[str]', - 'multiple': 'bool', - 'place_holder': 'str', - 'style_name': 'str', - 'style_name_label': 'str', - 'type': 'str', - 'validators': 'list[str]', - 'statement': 'str', - 'multiple_join': 'str', - 'toogle': 'bool', - 'init_by_get_param': 'str' - } - - attribute_map = { - 'name': 'name', - 'label': 'label', - 'label_hint': 'labelHint', - 'form_height': 'formHeight', - 'form_length': 'formLength', - 'widget': 'widget', - 'widget_title': 'widgetTitle', - 'copy_from': 'copyFrom', - 'parameters': 'parameters', - 'values': 'values', - 'default_values': 'defaultValues', - 'multiple': 'multiple', - 'place_holder': 'placeHolder', - 'style_name': 'styleName', - 'style_name_label': 'styleNameLabel', - 'type': 'type', - 'validators': 'validators', - 'statement': 'statement', - 'multiple_join': 'multipleJoin', - 'toogle': 'toogle', - 'init_by_get_param': 'initByGetParam' - } - - def __init__(self, name=None, label=None, label_hint=None, form_height=None, form_length=None, widget=None, widget_title=None, copy_from=None, parameters=None, values=None, default_values=None, multiple=False, place_holder=None, style_name=None, style_name_label=None, type=None, validators=None, statement=None, multiple_join=None, toogle=False, init_by_get_param=None): # noqa: E501 - """MdsQueryProperty - a model defined in Swagger""" # noqa: E501 - self._name = None - self._label = None - self._label_hint = None - self._form_height = None - self._form_length = None - self._widget = None - self._widget_title = None - self._copy_from = None - self._parameters = None - self._values = None - self._default_values = None - self._multiple = None - self._place_holder = None - self._style_name = None - self._style_name_label = None - self._type = None - self._validators = None - self._statement = None - self._multiple_join = None - self._toogle = None - self._init_by_get_param = None - self.discriminator = None - self.name = name - self.label = label - self.label_hint = label_hint - self.form_height = form_height - self.form_length = form_length - self.widget = widget - self.widget_title = widget_title - self.copy_from = copy_from - self.parameters = parameters - self.values = values - self.default_values = default_values - self.multiple = multiple - self.place_holder = place_holder - self.style_name = style_name - self.style_name_label = style_name_label - self.type = type - self.validators = validators - self.statement = statement - self.multiple_join = multiple_join - self.toogle = toogle - self.init_by_get_param = init_by_get_param - - @property - def name(self): - """Gets the name of this MdsQueryProperty. # noqa: E501 - - - :return: The name of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsQueryProperty. - - - :param name: The name of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def label(self): - """Gets the label of this MdsQueryProperty. # noqa: E501 - - - :return: The label of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsQueryProperty. - - - :param label: The label of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def label_hint(self): - """Gets the label_hint of this MdsQueryProperty. # noqa: E501 - - - :return: The label_hint of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._label_hint - - @label_hint.setter - def label_hint(self, label_hint): - """Sets the label_hint of this MdsQueryProperty. - - - :param label_hint: The label_hint of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if label_hint is None: - raise ValueError("Invalid value for `label_hint`, must not be `None`") # noqa: E501 - - self._label_hint = label_hint - - @property - def form_height(self): - """Gets the form_height of this MdsQueryProperty. # noqa: E501 - - - :return: The form_height of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._form_height - - @form_height.setter - def form_height(self, form_height): - """Sets the form_height of this MdsQueryProperty. - - - :param form_height: The form_height of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if form_height is None: - raise ValueError("Invalid value for `form_height`, must not be `None`") # noqa: E501 - - self._form_height = form_height - - @property - def form_length(self): - """Gets the form_length of this MdsQueryProperty. # noqa: E501 - - - :return: The form_length of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._form_length - - @form_length.setter - def form_length(self, form_length): - """Sets the form_length of this MdsQueryProperty. - - - :param form_length: The form_length of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if form_length is None: - raise ValueError("Invalid value for `form_length`, must not be `None`") # noqa: E501 - - self._form_length = form_length - - @property - def widget(self): - """Gets the widget of this MdsQueryProperty. # noqa: E501 - - - :return: The widget of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._widget - - @widget.setter - def widget(self, widget): - """Sets the widget of this MdsQueryProperty. - - - :param widget: The widget of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if widget is None: - raise ValueError("Invalid value for `widget`, must not be `None`") # noqa: E501 - - self._widget = widget - - @property - def widget_title(self): - """Gets the widget_title of this MdsQueryProperty. # noqa: E501 - - - :return: The widget_title of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._widget_title - - @widget_title.setter - def widget_title(self, widget_title): - """Sets the widget_title of this MdsQueryProperty. - - - :param widget_title: The widget_title of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if widget_title is None: - raise ValueError("Invalid value for `widget_title`, must not be `None`") # noqa: E501 - - self._widget_title = widget_title - - @property - def copy_from(self): - """Gets the copy_from of this MdsQueryProperty. # noqa: E501 - - - :return: The copy_from of this MdsQueryProperty. # noqa: E501 - :rtype: list[str] - """ - return self._copy_from - - @copy_from.setter - def copy_from(self, copy_from): - """Sets the copy_from of this MdsQueryProperty. - - - :param copy_from: The copy_from of this MdsQueryProperty. # noqa: E501 - :type: list[str] - """ - if copy_from is None: - raise ValueError("Invalid value for `copy_from`, must not be `None`") # noqa: E501 - - self._copy_from = copy_from - - @property - def parameters(self): - """Gets the parameters of this MdsQueryProperty. # noqa: E501 - - - :return: The parameters of this MdsQueryProperty. # noqa: E501 - :rtype: list[MdsQueryPropertyParameter] - """ - return self._parameters - - @parameters.setter - def parameters(self, parameters): - """Sets the parameters of this MdsQueryProperty. - - - :param parameters: The parameters of this MdsQueryProperty. # noqa: E501 - :type: list[MdsQueryPropertyParameter] - """ - if parameters is None: - raise ValueError("Invalid value for `parameters`, must not be `None`") # noqa: E501 - - self._parameters = parameters - - @property - def values(self): - """Gets the values of this MdsQueryProperty. # noqa: E501 - - - :return: The values of this MdsQueryProperty. # noqa: E501 - :rtype: list[MdsQueryPropertyValue] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this MdsQueryProperty. - - - :param values: The values of this MdsQueryProperty. # noqa: E501 - :type: list[MdsQueryPropertyValue] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - @property - def default_values(self): - """Gets the default_values of this MdsQueryProperty. # noqa: E501 - - - :return: The default_values of this MdsQueryProperty. # noqa: E501 - :rtype: list[str] - """ - return self._default_values - - @default_values.setter - def default_values(self, default_values): - """Sets the default_values of this MdsQueryProperty. - - - :param default_values: The default_values of this MdsQueryProperty. # noqa: E501 - :type: list[str] - """ - if default_values is None: - raise ValueError("Invalid value for `default_values`, must not be `None`") # noqa: E501 - - self._default_values = default_values - - @property - def multiple(self): - """Gets the multiple of this MdsQueryProperty. # noqa: E501 - - - :return: The multiple of this MdsQueryProperty. # noqa: E501 - :rtype: bool - """ - return self._multiple - - @multiple.setter - def multiple(self, multiple): - """Sets the multiple of this MdsQueryProperty. - - - :param multiple: The multiple of this MdsQueryProperty. # noqa: E501 - :type: bool - """ - if multiple is None: - raise ValueError("Invalid value for `multiple`, must not be `None`") # noqa: E501 - - self._multiple = multiple - - @property - def place_holder(self): - """Gets the place_holder of this MdsQueryProperty. # noqa: E501 - - - :return: The place_holder of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._place_holder - - @place_holder.setter - def place_holder(self, place_holder): - """Sets the place_holder of this MdsQueryProperty. - - - :param place_holder: The place_holder of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if place_holder is None: - raise ValueError("Invalid value for `place_holder`, must not be `None`") # noqa: E501 - - self._place_holder = place_holder - - @property - def style_name(self): - """Gets the style_name of this MdsQueryProperty. # noqa: E501 - - - :return: The style_name of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._style_name - - @style_name.setter - def style_name(self, style_name): - """Sets the style_name of this MdsQueryProperty. - - - :param style_name: The style_name of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if style_name is None: - raise ValueError("Invalid value for `style_name`, must not be `None`") # noqa: E501 - - self._style_name = style_name - - @property - def style_name_label(self): - """Gets the style_name_label of this MdsQueryProperty. # noqa: E501 - - - :return: The style_name_label of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._style_name_label - - @style_name_label.setter - def style_name_label(self, style_name_label): - """Sets the style_name_label of this MdsQueryProperty. - - - :param style_name_label: The style_name_label of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if style_name_label is None: - raise ValueError("Invalid value for `style_name_label`, must not be `None`") # noqa: E501 - - self._style_name_label = style_name_label - - @property - def type(self): - """Gets the type of this MdsQueryProperty. # noqa: E501 - - - :return: The type of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this MdsQueryProperty. - - - :param type: The type of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - @property - def validators(self): - """Gets the validators of this MdsQueryProperty. # noqa: E501 - - - :return: The validators of this MdsQueryProperty. # noqa: E501 - :rtype: list[str] - """ - return self._validators - - @validators.setter - def validators(self, validators): - """Sets the validators of this MdsQueryProperty. - - - :param validators: The validators of this MdsQueryProperty. # noqa: E501 - :type: list[str] - """ - if validators is None: - raise ValueError("Invalid value for `validators`, must not be `None`") # noqa: E501 - - self._validators = validators - - @property - def statement(self): - """Gets the statement of this MdsQueryProperty. # noqa: E501 - - - :return: The statement of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._statement - - @statement.setter - def statement(self, statement): - """Sets the statement of this MdsQueryProperty. - - - :param statement: The statement of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if statement is None: - raise ValueError("Invalid value for `statement`, must not be `None`") # noqa: E501 - - self._statement = statement - - @property - def multiple_join(self): - """Gets the multiple_join of this MdsQueryProperty. # noqa: E501 - - - :return: The multiple_join of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._multiple_join - - @multiple_join.setter - def multiple_join(self, multiple_join): - """Sets the multiple_join of this MdsQueryProperty. - - - :param multiple_join: The multiple_join of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if multiple_join is None: - raise ValueError("Invalid value for `multiple_join`, must not be `None`") # noqa: E501 - - self._multiple_join = multiple_join - - @property - def toogle(self): - """Gets the toogle of this MdsQueryProperty. # noqa: E501 - - - :return: The toogle of this MdsQueryProperty. # noqa: E501 - :rtype: bool - """ - return self._toogle - - @toogle.setter - def toogle(self, toogle): - """Sets the toogle of this MdsQueryProperty. - - - :param toogle: The toogle of this MdsQueryProperty. # noqa: E501 - :type: bool - """ - if toogle is None: - raise ValueError("Invalid value for `toogle`, must not be `None`") # noqa: E501 - - self._toogle = toogle - - @property - def init_by_get_param(self): - """Gets the init_by_get_param of this MdsQueryProperty. # noqa: E501 - - - :return: The init_by_get_param of this MdsQueryProperty. # noqa: E501 - :rtype: str - """ - return self._init_by_get_param - - @init_by_get_param.setter - def init_by_get_param(self, init_by_get_param): - """Sets the init_by_get_param of this MdsQueryProperty. - - - :param init_by_get_param: The init_by_get_param of this MdsQueryProperty. # noqa: E501 - :type: str - """ - if init_by_get_param is None: - raise ValueError("Invalid value for `init_by_get_param`, must not be `None`") # noqa: E501 - - self._init_by_get_param = init_by_get_param - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsQueryProperty, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsQueryProperty): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_query_property_parameter.py b/edu_sharing_client/models/mds_query_property_parameter.py deleted file mode 100644 index eea01b53..00000000 --- a/edu_sharing_client/models/mds_query_property_parameter.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsQueryPropertyParameter(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'value': 'str' - } - - attribute_map = { - 'name': 'name', - 'value': 'value' - } - - def __init__(self, name=None, value=None): # noqa: E501 - """MdsQueryPropertyParameter - a model defined in Swagger""" # noqa: E501 - self._name = None - self._value = None - self.discriminator = None - self.name = name - self.value = value - - @property - def name(self): - """Gets the name of this MdsQueryPropertyParameter. # noqa: E501 - - - :return: The name of this MdsQueryPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsQueryPropertyParameter. - - - :param name: The name of this MdsQueryPropertyParameter. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def value(self): - """Gets the value of this MdsQueryPropertyParameter. # noqa: E501 - - - :return: The value of this MdsQueryPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsQueryPropertyParameter. - - - :param value: The value of this MdsQueryPropertyParameter. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsQueryPropertyParameter, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsQueryPropertyParameter): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_query_property_value.py b/edu_sharing_client/models/mds_query_property_value.py deleted file mode 100644 index 28b3bf82..00000000 --- a/edu_sharing_client/models/mds_query_property_value.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsQueryPropertyValue(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'value': 'str' - } - - attribute_map = { - 'key': 'key', - 'value': 'value' - } - - def __init__(self, key=None, value=None): # noqa: E501 - """MdsQueryPropertyValue - a model defined in Swagger""" # noqa: E501 - self._key = None - self._value = None - self.discriminator = None - self.key = key - self.value = value - - @property - def key(self): - """Gets the key of this MdsQueryPropertyValue. # noqa: E501 - - - :return: The key of this MdsQueryPropertyValue. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this MdsQueryPropertyValue. - - - :param key: The key of this MdsQueryPropertyValue. # noqa: E501 - :type: str - """ - if key is None: - raise ValueError("Invalid value for `key`, must not be `None`") # noqa: E501 - - self._key = key - - @property - def value(self): - """Gets the value of this MdsQueryPropertyValue. # noqa: E501 - - - :return: The value of this MdsQueryPropertyValue. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsQueryPropertyValue. - - - :param value: The value of this MdsQueryPropertyValue. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsQueryPropertyValue, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsQueryPropertyValue): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_ref.py b/edu_sharing_client/models/mds_ref.py deleted file mode 100644 index e8d4fa17..00000000 --- a/edu_sharing_client/models/mds_ref.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsRef(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repo': 'str', - 'id': 'str' - } - - attribute_map = { - 'repo': 'repo', - 'id': 'id' - } - - def __init__(self, repo=None, id=None): # noqa: E501 - """MdsRef - a model defined in Swagger""" # noqa: E501 - self._repo = None - self._id = None - self.discriminator = None - self.repo = repo - self.id = id - - @property - def repo(self): - """Gets the repo of this MdsRef. # noqa: E501 - - - :return: The repo of this MdsRef. # noqa: E501 - :rtype: str - """ - return self._repo - - @repo.setter - def repo(self, repo): - """Sets the repo of this MdsRef. - - - :param repo: The repo of this MdsRef. # noqa: E501 - :type: str - """ - if repo is None: - raise ValueError("Invalid value for `repo`, must not be `None`") # noqa: E501 - - self._repo = repo - - @property - def id(self): - """Gets the id of this MdsRef. # noqa: E501 - - - :return: The id of this MdsRef. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this MdsRef. - - - :param id: The id of this MdsRef. # noqa: E501 - :type: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsRef, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsRef): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_type.py b/edu_sharing_client/models/mds_type.py deleted file mode 100644 index ccf45205..00000000 --- a/edu_sharing_client/models/mds_type.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsType(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'type': 'str', - 'properties': 'list[MdsProperty]' - } - - attribute_map = { - 'type': 'type', - 'properties': 'properties' - } - - def __init__(self, type=None, properties=None): # noqa: E501 - """MdsType - a model defined in Swagger""" # noqa: E501 - self._type = None - self._properties = None - self.discriminator = None - self.type = type - self.properties = properties - - @property - def type(self): - """Gets the type of this MdsType. # noqa: E501 - - - :return: The type of this MdsType. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this MdsType. - - - :param type: The type of this MdsType. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - @property - def properties(self): - """Gets the properties of this MdsType. # noqa: E501 - - - :return: The properties of this MdsType. # noqa: E501 - :rtype: list[MdsProperty] - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this MdsType. - - - :param properties: The properties of this MdsType. # noqa: E501 - :type: list[MdsProperty] - """ - if properties is None: - raise ValueError("Invalid value for `properties`, must not be `None`") # noqa: E501 - - self._properties = properties - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsType, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsType): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_v2.py b/edu_sharing_client/models/mds_v2.py deleted file mode 100644 index 58eb589e..00000000 --- a/edu_sharing_client/models/mds_v2.py +++ /dev/null @@ -1,267 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'create': 'Create', - 'widgets': 'list[WidgetV2]', - 'views': 'list[ViewV2]', - 'groups': 'list[GroupV2]', - 'lists': 'list[ListV2]', - 'sorts': 'list[SortV2]' - } - - attribute_map = { - 'name': 'name', - 'create': 'create', - 'widgets': 'widgets', - 'views': 'views', - 'groups': 'groups', - 'lists': 'lists', - 'sorts': 'sorts' - } - - def __init__(self, name=None, create=None, widgets=None, views=None, groups=None, lists=None, sorts=None): # noqa: E501 - """MdsV2 - a model defined in Swagger""" # noqa: E501 - self._name = None - self._create = None - self._widgets = None - self._views = None - self._groups = None - self._lists = None - self._sorts = None - self.discriminator = None - if name is not None: - self.name = name - if create is not None: - self.create = create - if widgets is not None: - self.widgets = widgets - if views is not None: - self.views = views - if groups is not None: - self.groups = groups - if lists is not None: - self.lists = lists - if sorts is not None: - self.sorts = sorts - - @property - def name(self): - """Gets the name of this MdsV2. # noqa: E501 - - - :return: The name of this MdsV2. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsV2. - - - :param name: The name of this MdsV2. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def create(self): - """Gets the create of this MdsV2. # noqa: E501 - - - :return: The create of this MdsV2. # noqa: E501 - :rtype: Create - """ - return self._create - - @create.setter - def create(self, create): - """Sets the create of this MdsV2. - - - :param create: The create of this MdsV2. # noqa: E501 - :type: Create - """ - - self._create = create - - @property - def widgets(self): - """Gets the widgets of this MdsV2. # noqa: E501 - - - :return: The widgets of this MdsV2. # noqa: E501 - :rtype: list[WidgetV2] - """ - return self._widgets - - @widgets.setter - def widgets(self, widgets): - """Sets the widgets of this MdsV2. - - - :param widgets: The widgets of this MdsV2. # noqa: E501 - :type: list[WidgetV2] - """ - - self._widgets = widgets - - @property - def views(self): - """Gets the views of this MdsV2. # noqa: E501 - - - :return: The views of this MdsV2. # noqa: E501 - :rtype: list[ViewV2] - """ - return self._views - - @views.setter - def views(self, views): - """Sets the views of this MdsV2. - - - :param views: The views of this MdsV2. # noqa: E501 - :type: list[ViewV2] - """ - - self._views = views - - @property - def groups(self): - """Gets the groups of this MdsV2. # noqa: E501 - - - :return: The groups of this MdsV2. # noqa: E501 - :rtype: list[GroupV2] - """ - return self._groups - - @groups.setter - def groups(self, groups): - """Sets the groups of this MdsV2. - - - :param groups: The groups of this MdsV2. # noqa: E501 - :type: list[GroupV2] - """ - - self._groups = groups - - @property - def lists(self): - """Gets the lists of this MdsV2. # noqa: E501 - - - :return: The lists of this MdsV2. # noqa: E501 - :rtype: list[ListV2] - """ - return self._lists - - @lists.setter - def lists(self, lists): - """Sets the lists of this MdsV2. - - - :param lists: The lists of this MdsV2. # noqa: E501 - :type: list[ListV2] - """ - - self._lists = lists - - @property - def sorts(self): - """Gets the sorts of this MdsV2. # noqa: E501 - - - :return: The sorts of this MdsV2. # noqa: E501 - :rtype: list[SortV2] - """ - return self._sorts - - @sorts.setter - def sorts(self, sorts): - """Sets the sorts of this MdsV2. - - - :param sorts: The sorts of this MdsV2. # noqa: E501 - :type: list[SortV2] - """ - - self._sorts = sorts - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_view.py b/edu_sharing_client/models/mds_view.py deleted file mode 100644 index d23a2b49..00000000 --- a/edu_sharing_client/models/mds_view.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsView(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'properties': 'list[MdsViewProperty]' - } - - attribute_map = { - 'id': 'id', - 'properties': 'properties' - } - - def __init__(self, id=None, properties=None): # noqa: E501 - """MdsView - a model defined in Swagger""" # noqa: E501 - self._id = None - self._properties = None - self.discriminator = None - self.id = id - self.properties = properties - - @property - def id(self): - """Gets the id of this MdsView. # noqa: E501 - - - :return: The id of this MdsView. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this MdsView. - - - :param id: The id of this MdsView. # noqa: E501 - :type: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id - - @property - def properties(self): - """Gets the properties of this MdsView. # noqa: E501 - - - :return: The properties of this MdsView. # noqa: E501 - :rtype: list[MdsViewProperty] - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this MdsView. - - - :param properties: The properties of this MdsView. # noqa: E501 - :type: list[MdsViewProperty] - """ - if properties is None: - raise ValueError("Invalid value for `properties`, must not be `None`") # noqa: E501 - - self._properties = properties - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsView, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsView): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_view_property.py b/edu_sharing_client/models/mds_view_property.py deleted file mode 100644 index f4078513..00000000 --- a/edu_sharing_client/models/mds_view_property.py +++ /dev/null @@ -1,517 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsViewProperty(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'label': 'str', - 'label_hint': 'str', - 'form_height': 'str', - 'form_length': 'str', - 'widget': 'str', - 'widget_title': 'str', - 'copy_from': 'list[str]', - 'parameters': 'list[MdsViewPropertyParameter]', - 'values': 'list[MdsViewPropertyValue]', - 'default_values': 'list[str]', - 'multiple': 'bool', - 'place_holder': 'str', - 'style_name': 'str', - 'style_name_label': 'str', - 'type': 'str' - } - - attribute_map = { - 'name': 'name', - 'label': 'label', - 'label_hint': 'labelHint', - 'form_height': 'formHeight', - 'form_length': 'formLength', - 'widget': 'widget', - 'widget_title': 'widgetTitle', - 'copy_from': 'copyFrom', - 'parameters': 'parameters', - 'values': 'values', - 'default_values': 'defaultValues', - 'multiple': 'multiple', - 'place_holder': 'placeHolder', - 'style_name': 'styleName', - 'style_name_label': 'styleNameLabel', - 'type': 'type' - } - - def __init__(self, name=None, label=None, label_hint=None, form_height=None, form_length=None, widget=None, widget_title=None, copy_from=None, parameters=None, values=None, default_values=None, multiple=False, place_holder=None, style_name=None, style_name_label=None, type=None): # noqa: E501 - """MdsViewProperty - a model defined in Swagger""" # noqa: E501 - self._name = None - self._label = None - self._label_hint = None - self._form_height = None - self._form_length = None - self._widget = None - self._widget_title = None - self._copy_from = None - self._parameters = None - self._values = None - self._default_values = None - self._multiple = None - self._place_holder = None - self._style_name = None - self._style_name_label = None - self._type = None - self.discriminator = None - self.name = name - self.label = label - self.label_hint = label_hint - self.form_height = form_height - self.form_length = form_length - self.widget = widget - self.widget_title = widget_title - self.copy_from = copy_from - self.parameters = parameters - self.values = values - self.default_values = default_values - self.multiple = multiple - self.place_holder = place_holder - self.style_name = style_name - self.style_name_label = style_name_label - self.type = type - - @property - def name(self): - """Gets the name of this MdsViewProperty. # noqa: E501 - - - :return: The name of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsViewProperty. - - - :param name: The name of this MdsViewProperty. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def label(self): - """Gets the label of this MdsViewProperty. # noqa: E501 - - - :return: The label of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._label - - @label.setter - def label(self, label): - """Sets the label of this MdsViewProperty. - - - :param label: The label of this MdsViewProperty. # noqa: E501 - :type: str - """ - if label is None: - raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501 - - self._label = label - - @property - def label_hint(self): - """Gets the label_hint of this MdsViewProperty. # noqa: E501 - - - :return: The label_hint of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._label_hint - - @label_hint.setter - def label_hint(self, label_hint): - """Sets the label_hint of this MdsViewProperty. - - - :param label_hint: The label_hint of this MdsViewProperty. # noqa: E501 - :type: str - """ - if label_hint is None: - raise ValueError("Invalid value for `label_hint`, must not be `None`") # noqa: E501 - - self._label_hint = label_hint - - @property - def form_height(self): - """Gets the form_height of this MdsViewProperty. # noqa: E501 - - - :return: The form_height of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._form_height - - @form_height.setter - def form_height(self, form_height): - """Sets the form_height of this MdsViewProperty. - - - :param form_height: The form_height of this MdsViewProperty. # noqa: E501 - :type: str - """ - if form_height is None: - raise ValueError("Invalid value for `form_height`, must not be `None`") # noqa: E501 - - self._form_height = form_height - - @property - def form_length(self): - """Gets the form_length of this MdsViewProperty. # noqa: E501 - - - :return: The form_length of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._form_length - - @form_length.setter - def form_length(self, form_length): - """Sets the form_length of this MdsViewProperty. - - - :param form_length: The form_length of this MdsViewProperty. # noqa: E501 - :type: str - """ - if form_length is None: - raise ValueError("Invalid value for `form_length`, must not be `None`") # noqa: E501 - - self._form_length = form_length - - @property - def widget(self): - """Gets the widget of this MdsViewProperty. # noqa: E501 - - - :return: The widget of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._widget - - @widget.setter - def widget(self, widget): - """Sets the widget of this MdsViewProperty. - - - :param widget: The widget of this MdsViewProperty. # noqa: E501 - :type: str - """ - if widget is None: - raise ValueError("Invalid value for `widget`, must not be `None`") # noqa: E501 - - self._widget = widget - - @property - def widget_title(self): - """Gets the widget_title of this MdsViewProperty. # noqa: E501 - - - :return: The widget_title of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._widget_title - - @widget_title.setter - def widget_title(self, widget_title): - """Sets the widget_title of this MdsViewProperty. - - - :param widget_title: The widget_title of this MdsViewProperty. # noqa: E501 - :type: str - """ - if widget_title is None: - raise ValueError("Invalid value for `widget_title`, must not be `None`") # noqa: E501 - - self._widget_title = widget_title - - @property - def copy_from(self): - """Gets the copy_from of this MdsViewProperty. # noqa: E501 - - - :return: The copy_from of this MdsViewProperty. # noqa: E501 - :rtype: list[str] - """ - return self._copy_from - - @copy_from.setter - def copy_from(self, copy_from): - """Sets the copy_from of this MdsViewProperty. - - - :param copy_from: The copy_from of this MdsViewProperty. # noqa: E501 - :type: list[str] - """ - if copy_from is None: - raise ValueError("Invalid value for `copy_from`, must not be `None`") # noqa: E501 - - self._copy_from = copy_from - - @property - def parameters(self): - """Gets the parameters of this MdsViewProperty. # noqa: E501 - - - :return: The parameters of this MdsViewProperty. # noqa: E501 - :rtype: list[MdsViewPropertyParameter] - """ - return self._parameters - - @parameters.setter - def parameters(self, parameters): - """Sets the parameters of this MdsViewProperty. - - - :param parameters: The parameters of this MdsViewProperty. # noqa: E501 - :type: list[MdsViewPropertyParameter] - """ - if parameters is None: - raise ValueError("Invalid value for `parameters`, must not be `None`") # noqa: E501 - - self._parameters = parameters - - @property - def values(self): - """Gets the values of this MdsViewProperty. # noqa: E501 - - - :return: The values of this MdsViewProperty. # noqa: E501 - :rtype: list[MdsViewPropertyValue] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this MdsViewProperty. - - - :param values: The values of this MdsViewProperty. # noqa: E501 - :type: list[MdsViewPropertyValue] - """ - if values is None: - raise ValueError("Invalid value for `values`, must not be `None`") # noqa: E501 - - self._values = values - - @property - def default_values(self): - """Gets the default_values of this MdsViewProperty. # noqa: E501 - - - :return: The default_values of this MdsViewProperty. # noqa: E501 - :rtype: list[str] - """ - return self._default_values - - @default_values.setter - def default_values(self, default_values): - """Sets the default_values of this MdsViewProperty. - - - :param default_values: The default_values of this MdsViewProperty. # noqa: E501 - :type: list[str] - """ - if default_values is None: - raise ValueError("Invalid value for `default_values`, must not be `None`") # noqa: E501 - - self._default_values = default_values - - @property - def multiple(self): - """Gets the multiple of this MdsViewProperty. # noqa: E501 - - - :return: The multiple of this MdsViewProperty. # noqa: E501 - :rtype: bool - """ - return self._multiple - - @multiple.setter - def multiple(self, multiple): - """Sets the multiple of this MdsViewProperty. - - - :param multiple: The multiple of this MdsViewProperty. # noqa: E501 - :type: bool - """ - if multiple is None: - raise ValueError("Invalid value for `multiple`, must not be `None`") # noqa: E501 - - self._multiple = multiple - - @property - def place_holder(self): - """Gets the place_holder of this MdsViewProperty. # noqa: E501 - - - :return: The place_holder of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._place_holder - - @place_holder.setter - def place_holder(self, place_holder): - """Sets the place_holder of this MdsViewProperty. - - - :param place_holder: The place_holder of this MdsViewProperty. # noqa: E501 - :type: str - """ - if place_holder is None: - raise ValueError("Invalid value for `place_holder`, must not be `None`") # noqa: E501 - - self._place_holder = place_holder - - @property - def style_name(self): - """Gets the style_name of this MdsViewProperty. # noqa: E501 - - - :return: The style_name of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._style_name - - @style_name.setter - def style_name(self, style_name): - """Sets the style_name of this MdsViewProperty. - - - :param style_name: The style_name of this MdsViewProperty. # noqa: E501 - :type: str - """ - if style_name is None: - raise ValueError("Invalid value for `style_name`, must not be `None`") # noqa: E501 - - self._style_name = style_name - - @property - def style_name_label(self): - """Gets the style_name_label of this MdsViewProperty. # noqa: E501 - - - :return: The style_name_label of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._style_name_label - - @style_name_label.setter - def style_name_label(self, style_name_label): - """Sets the style_name_label of this MdsViewProperty. - - - :param style_name_label: The style_name_label of this MdsViewProperty. # noqa: E501 - :type: str - """ - if style_name_label is None: - raise ValueError("Invalid value for `style_name_label`, must not be `None`") # noqa: E501 - - self._style_name_label = style_name_label - - @property - def type(self): - """Gets the type of this MdsViewProperty. # noqa: E501 - - - :return: The type of this MdsViewProperty. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this MdsViewProperty. - - - :param type: The type of this MdsViewProperty. # noqa: E501 - :type: str - """ - if type is None: - raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 - - self._type = type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsViewProperty, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsViewProperty): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_view_property_parameter.py b/edu_sharing_client/models/mds_view_property_parameter.py deleted file mode 100644 index e3ecbb6c..00000000 --- a/edu_sharing_client/models/mds_view_property_parameter.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsViewPropertyParameter(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'value': 'str' - } - - attribute_map = { - 'name': 'name', - 'value': 'value' - } - - def __init__(self, name=None, value=None): # noqa: E501 - """MdsViewPropertyParameter - a model defined in Swagger""" # noqa: E501 - self._name = None - self._value = None - self.discriminator = None - self.name = name - self.value = value - - @property - def name(self): - """Gets the name of this MdsViewPropertyParameter. # noqa: E501 - - - :return: The name of this MdsViewPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MdsViewPropertyParameter. - - - :param name: The name of this MdsViewPropertyParameter. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def value(self): - """Gets the value of this MdsViewPropertyParameter. # noqa: E501 - - - :return: The value of this MdsViewPropertyParameter. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsViewPropertyParameter. - - - :param value: The value of this MdsViewPropertyParameter. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsViewPropertyParameter, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsViewPropertyParameter): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mds_view_property_value.py b/edu_sharing_client/models/mds_view_property_value.py deleted file mode 100644 index f472aa2c..00000000 --- a/edu_sharing_client/models/mds_view_property_value.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MdsViewPropertyValue(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'value': 'str' - } - - attribute_map = { - 'key': 'key', - 'value': 'value' - } - - def __init__(self, key=None, value=None): # noqa: E501 - """MdsViewPropertyValue - a model defined in Swagger""" # noqa: E501 - self._key = None - self._value = None - self.discriminator = None - self.key = key - self.value = value - - @property - def key(self): - """Gets the key of this MdsViewPropertyValue. # noqa: E501 - - - :return: The key of this MdsViewPropertyValue. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this MdsViewPropertyValue. - - - :param key: The key of this MdsViewPropertyValue. # noqa: E501 - :type: str - """ - if key is None: - raise ValueError("Invalid value for `key`, must not be `None`") # noqa: E501 - - self._key = key - - @property - def value(self): - """Gets the value of this MdsViewPropertyValue. # noqa: E501 - - - :return: The value of this MdsViewPropertyValue. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this MdsViewPropertyValue. - - - :param value: The value of this MdsViewPropertyValue. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MdsViewPropertyValue, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MdsViewPropertyValue): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mediacenter.py b/edu_sharing_client/models/mediacenter.py deleted file mode 100644 index b748d494..00000000 --- a/edu_sharing_client/models/mediacenter.py +++ /dev/null @@ -1,274 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Mediacenter(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'ref': 'NodeRef', - 'editable': 'bool', - 'authority_name': 'str', - 'authority_type': 'str', - 'group_name': 'str', - 'profile': 'GroupProfile', - 'administration_access': 'bool' - } - - attribute_map = { - 'ref': 'ref', - 'editable': 'editable', - 'authority_name': 'authorityName', - 'authority_type': 'authorityType', - 'group_name': 'groupName', - 'profile': 'profile', - 'administration_access': 'administrationAccess' - } - - def __init__(self, ref=None, editable=False, authority_name=None, authority_type=None, group_name=None, profile=None, administration_access=False): # noqa: E501 - """Mediacenter - a model defined in Swagger""" # noqa: E501 - self._ref = None - self._editable = None - self._authority_name = None - self._authority_type = None - self._group_name = None - self._profile = None - self._administration_access = None - self.discriminator = None - if ref is not None: - self.ref = ref - if editable is not None: - self.editable = editable - self.authority_name = authority_name - if authority_type is not None: - self.authority_type = authority_type - if group_name is not None: - self.group_name = group_name - if profile is not None: - self.profile = profile - if administration_access is not None: - self.administration_access = administration_access - - @property - def ref(self): - """Gets the ref of this Mediacenter. # noqa: E501 - - - :return: The ref of this Mediacenter. # noqa: E501 - :rtype: NodeRef - """ - return self._ref - - @ref.setter - def ref(self, ref): - """Sets the ref of this Mediacenter. - - - :param ref: The ref of this Mediacenter. # noqa: E501 - :type: NodeRef - """ - - self._ref = ref - - @property - def editable(self): - """Gets the editable of this Mediacenter. # noqa: E501 - - - :return: The editable of this Mediacenter. # noqa: E501 - :rtype: bool - """ - return self._editable - - @editable.setter - def editable(self, editable): - """Sets the editable of this Mediacenter. - - - :param editable: The editable of this Mediacenter. # noqa: E501 - :type: bool - """ - - self._editable = editable - - @property - def authority_name(self): - """Gets the authority_name of this Mediacenter. # noqa: E501 - - - :return: The authority_name of this Mediacenter. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this Mediacenter. - - - :param authority_name: The authority_name of this Mediacenter. # noqa: E501 - :type: str - """ - if authority_name is None: - raise ValueError("Invalid value for `authority_name`, must not be `None`") # noqa: E501 - - self._authority_name = authority_name - - @property - def authority_type(self): - """Gets the authority_type of this Mediacenter. # noqa: E501 - - - :return: The authority_type of this Mediacenter. # noqa: E501 - :rtype: str - """ - return self._authority_type - - @authority_type.setter - def authority_type(self, authority_type): - """Sets the authority_type of this Mediacenter. - - - :param authority_type: The authority_type of this Mediacenter. # noqa: E501 - :type: str - """ - allowed_values = ["USER", "GROUP", "OWNER", "EVERYONE", "GUEST"] # noqa: E501 - if authority_type not in allowed_values: - raise ValueError( - "Invalid value for `authority_type` ({0}), must be one of {1}" # noqa: E501 - .format(authority_type, allowed_values) - ) - - self._authority_type = authority_type - - @property - def group_name(self): - """Gets the group_name of this Mediacenter. # noqa: E501 - - - :return: The group_name of this Mediacenter. # noqa: E501 - :rtype: str - """ - return self._group_name - - @group_name.setter - def group_name(self, group_name): - """Sets the group_name of this Mediacenter. - - - :param group_name: The group_name of this Mediacenter. # noqa: E501 - :type: str - """ - - self._group_name = group_name - - @property - def profile(self): - """Gets the profile of this Mediacenter. # noqa: E501 - - - :return: The profile of this Mediacenter. # noqa: E501 - :rtype: GroupProfile - """ - return self._profile - - @profile.setter - def profile(self, profile): - """Sets the profile of this Mediacenter. - - - :param profile: The profile of this Mediacenter. # noqa: E501 - :type: GroupProfile - """ - - self._profile = profile - - @property - def administration_access(self): - """Gets the administration_access of this Mediacenter. # noqa: E501 - - - :return: The administration_access of this Mediacenter. # noqa: E501 - :rtype: bool - """ - return self._administration_access - - @administration_access.setter - def administration_access(self, administration_access): - """Sets the administration_access of this Mediacenter. - - - :param administration_access: The administration_access of this Mediacenter. # noqa: E501 - :type: bool - """ - - self._administration_access = administration_access - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Mediacenter, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Mediacenter): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mediacenter_profile_extension.py b/edu_sharing_client/models/mediacenter_profile_extension.py deleted file mode 100644 index f98f5701..00000000 --- a/edu_sharing_client/models/mediacenter_profile_extension.py +++ /dev/null @@ -1,247 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MediacenterProfileExtension(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'location': 'str', - 'district_abbreviation': 'str', - 'main_url': 'str', - 'catalogs': 'list[Catalog]', - 'content_status': 'str' - } - - attribute_map = { - 'id': 'id', - 'location': 'location', - 'district_abbreviation': 'districtAbbreviation', - 'main_url': 'mainUrl', - 'catalogs': 'catalogs', - 'content_status': 'contentStatus' - } - - def __init__(self, id=None, location=None, district_abbreviation=None, main_url=None, catalogs=None, content_status=None): # noqa: E501 - """MediacenterProfileExtension - a model defined in Swagger""" # noqa: E501 - self._id = None - self._location = None - self._district_abbreviation = None - self._main_url = None - self._catalogs = None - self._content_status = None - self.discriminator = None - if id is not None: - self.id = id - if location is not None: - self.location = location - if district_abbreviation is not None: - self.district_abbreviation = district_abbreviation - if main_url is not None: - self.main_url = main_url - if catalogs is not None: - self.catalogs = catalogs - if content_status is not None: - self.content_status = content_status - - @property - def id(self): - """Gets the id of this MediacenterProfileExtension. # noqa: E501 - - - :return: The id of this MediacenterProfileExtension. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this MediacenterProfileExtension. - - - :param id: The id of this MediacenterProfileExtension. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def location(self): - """Gets the location of this MediacenterProfileExtension. # noqa: E501 - - - :return: The location of this MediacenterProfileExtension. # noqa: E501 - :rtype: str - """ - return self._location - - @location.setter - def location(self, location): - """Sets the location of this MediacenterProfileExtension. - - - :param location: The location of this MediacenterProfileExtension. # noqa: E501 - :type: str - """ - - self._location = location - - @property - def district_abbreviation(self): - """Gets the district_abbreviation of this MediacenterProfileExtension. # noqa: E501 - - - :return: The district_abbreviation of this MediacenterProfileExtension. # noqa: E501 - :rtype: str - """ - return self._district_abbreviation - - @district_abbreviation.setter - def district_abbreviation(self, district_abbreviation): - """Sets the district_abbreviation of this MediacenterProfileExtension. - - - :param district_abbreviation: The district_abbreviation of this MediacenterProfileExtension. # noqa: E501 - :type: str - """ - - self._district_abbreviation = district_abbreviation - - @property - def main_url(self): - """Gets the main_url of this MediacenterProfileExtension. # noqa: E501 - - - :return: The main_url of this MediacenterProfileExtension. # noqa: E501 - :rtype: str - """ - return self._main_url - - @main_url.setter - def main_url(self, main_url): - """Sets the main_url of this MediacenterProfileExtension. - - - :param main_url: The main_url of this MediacenterProfileExtension. # noqa: E501 - :type: str - """ - - self._main_url = main_url - - @property - def catalogs(self): - """Gets the catalogs of this MediacenterProfileExtension. # noqa: E501 - - - :return: The catalogs of this MediacenterProfileExtension. # noqa: E501 - :rtype: list[Catalog] - """ - return self._catalogs - - @catalogs.setter - def catalogs(self, catalogs): - """Sets the catalogs of this MediacenterProfileExtension. - - - :param catalogs: The catalogs of this MediacenterProfileExtension. # noqa: E501 - :type: list[Catalog] - """ - - self._catalogs = catalogs - - @property - def content_status(self): - """Gets the content_status of this MediacenterProfileExtension. # noqa: E501 - - - :return: The content_status of this MediacenterProfileExtension. # noqa: E501 - :rtype: str - """ - return self._content_status - - @content_status.setter - def content_status(self, content_status): - """Sets the content_status of this MediacenterProfileExtension. - - - :param content_status: The content_status of this MediacenterProfileExtension. # noqa: E501 - :type: str - """ - allowed_values = ["Activated", "Deactivated"] # noqa: E501 - if content_status not in allowed_values: - raise ValueError( - "Invalid value for `content_status` ({0}), must be one of {1}" # noqa: E501 - .format(content_status, allowed_values) - ) - - self._content_status = content_status - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MediacenterProfileExtension, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MediacenterProfileExtension): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/mediacenters_import_result.py b/edu_sharing_client/models/mediacenters_import_result.py deleted file mode 100644 index f7b32094..00000000 --- a/edu_sharing_client/models/mediacenters_import_result.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MediacentersImportResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'rows': 'int' - } - - attribute_map = { - 'rows': 'rows' - } - - def __init__(self, rows=None): # noqa: E501 - """MediacentersImportResult - a model defined in Swagger""" # noqa: E501 - self._rows = None - self.discriminator = None - if rows is not None: - self.rows = rows - - @property - def rows(self): - """Gets the rows of this MediacentersImportResult. # noqa: E501 - - - :return: The rows of this MediacentersImportResult. # noqa: E501 - :rtype: int - """ - return self._rows - - @rows.setter - def rows(self, rows): - """Sets the rows of this MediacentersImportResult. - - - :param rows: The rows of this MediacentersImportResult. # noqa: E501 - :type: int - """ - - self._rows = rows - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MediacentersImportResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MediacentersImportResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/menu_entry.py b/edu_sharing_client/models/menu_entry.py deleted file mode 100644 index 81269325..00000000 --- a/edu_sharing_client/models/menu_entry.py +++ /dev/null @@ -1,345 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MenuEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'position': 'int', - 'icon': 'str', - 'name': 'str', - 'url': 'str', - 'is_disabled': 'bool', - 'is_seperate': 'bool', - 'is_seperate_bottom': 'bool', - 'only_desktop': 'bool', - 'path': 'str', - 'scope': 'str' - } - - attribute_map = { - 'position': 'position', - 'icon': 'icon', - 'name': 'name', - 'url': 'url', - 'is_disabled': 'isDisabled', - 'is_seperate': 'isSeperate', - 'is_seperate_bottom': 'isSeperateBottom', - 'only_desktop': 'onlyDesktop', - 'path': 'path', - 'scope': 'scope' - } - - def __init__(self, position=None, icon=None, name=None, url=None, is_disabled=False, is_seperate=False, is_seperate_bottom=False, only_desktop=False, path=None, scope=None): # noqa: E501 - """MenuEntry - a model defined in Swagger""" # noqa: E501 - self._position = None - self._icon = None - self._name = None - self._url = None - self._is_disabled = None - self._is_seperate = None - self._is_seperate_bottom = None - self._only_desktop = None - self._path = None - self._scope = None - self.discriminator = None - if position is not None: - self.position = position - if icon is not None: - self.icon = icon - if name is not None: - self.name = name - if url is not None: - self.url = url - if is_disabled is not None: - self.is_disabled = is_disabled - if is_seperate is not None: - self.is_seperate = is_seperate - if is_seperate_bottom is not None: - self.is_seperate_bottom = is_seperate_bottom - if only_desktop is not None: - self.only_desktop = only_desktop - if path is not None: - self.path = path - if scope is not None: - self.scope = scope - - @property - def position(self): - """Gets the position of this MenuEntry. # noqa: E501 - - - :return: The position of this MenuEntry. # noqa: E501 - :rtype: int - """ - return self._position - - @position.setter - def position(self, position): - """Sets the position of this MenuEntry. - - - :param position: The position of this MenuEntry. # noqa: E501 - :type: int - """ - - self._position = position - - @property - def icon(self): - """Gets the icon of this MenuEntry. # noqa: E501 - - - :return: The icon of this MenuEntry. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this MenuEntry. - - - :param icon: The icon of this MenuEntry. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def name(self): - """Gets the name of this MenuEntry. # noqa: E501 - - - :return: The name of this MenuEntry. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MenuEntry. - - - :param name: The name of this MenuEntry. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def url(self): - """Gets the url of this MenuEntry. # noqa: E501 - - - :return: The url of this MenuEntry. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this MenuEntry. - - - :param url: The url of this MenuEntry. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def is_disabled(self): - """Gets the is_disabled of this MenuEntry. # noqa: E501 - - - :return: The is_disabled of this MenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_disabled - - @is_disabled.setter - def is_disabled(self, is_disabled): - """Sets the is_disabled of this MenuEntry. - - - :param is_disabled: The is_disabled of this MenuEntry. # noqa: E501 - :type: bool - """ - - self._is_disabled = is_disabled - - @property - def is_seperate(self): - """Gets the is_seperate of this MenuEntry. # noqa: E501 - - - :return: The is_seperate of this MenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_seperate - - @is_seperate.setter - def is_seperate(self, is_seperate): - """Sets the is_seperate of this MenuEntry. - - - :param is_seperate: The is_seperate of this MenuEntry. # noqa: E501 - :type: bool - """ - - self._is_seperate = is_seperate - - @property - def is_seperate_bottom(self): - """Gets the is_seperate_bottom of this MenuEntry. # noqa: E501 - - - :return: The is_seperate_bottom of this MenuEntry. # noqa: E501 - :rtype: bool - """ - return self._is_seperate_bottom - - @is_seperate_bottom.setter - def is_seperate_bottom(self, is_seperate_bottom): - """Sets the is_seperate_bottom of this MenuEntry. - - - :param is_seperate_bottom: The is_seperate_bottom of this MenuEntry. # noqa: E501 - :type: bool - """ - - self._is_seperate_bottom = is_seperate_bottom - - @property - def only_desktop(self): - """Gets the only_desktop of this MenuEntry. # noqa: E501 - - - :return: The only_desktop of this MenuEntry. # noqa: E501 - :rtype: bool - """ - return self._only_desktop - - @only_desktop.setter - def only_desktop(self, only_desktop): - """Sets the only_desktop of this MenuEntry. - - - :param only_desktop: The only_desktop of this MenuEntry. # noqa: E501 - :type: bool - """ - - self._only_desktop = only_desktop - - @property - def path(self): - """Gets the path of this MenuEntry. # noqa: E501 - - - :return: The path of this MenuEntry. # noqa: E501 - :rtype: str - """ - return self._path - - @path.setter - def path(self, path): - """Sets the path of this MenuEntry. - - - :param path: The path of this MenuEntry. # noqa: E501 - :type: str - """ - - self._path = path - - @property - def scope(self): - """Gets the scope of this MenuEntry. # noqa: E501 - - - :return: The scope of this MenuEntry. # noqa: E501 - :rtype: str - """ - return self._scope - - @scope.setter - def scope(self, scope): - """Sets the scope of this MenuEntry. - - - :param scope: The scope of this MenuEntry. # noqa: E501 - :type: str - """ - - self._scope = scope - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MenuEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MenuEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/metadata_set_info.py b/edu_sharing_client/models/metadata_set_info.py deleted file mode 100644 index 12b05f2d..00000000 --- a/edu_sharing_client/models/metadata_set_info.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class MetadataSetInfo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'name': 'str' - } - - attribute_map = { - 'id': 'id', - 'name': 'name' - } - - def __init__(self, id=None, name=None): # noqa: E501 - """MetadataSetInfo - a model defined in Swagger""" # noqa: E501 - self._id = None - self._name = None - self.discriminator = None - if id is not None: - self.id = id - if name is not None: - self.name = name - - @property - def id(self): - """Gets the id of this MetadataSetInfo. # noqa: E501 - - - :return: The id of this MetadataSetInfo. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this MetadataSetInfo. - - - :param id: The id of this MetadataSetInfo. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def name(self): - """Gets the name of this MetadataSetInfo. # noqa: E501 - - - :return: The name of this MetadataSetInfo. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this MetadataSetInfo. - - - :param name: The name of this MetadataSetInfo. # noqa: E501 - :type: str - """ - - self._name = name - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(MetadataSetInfo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, MetadataSetInfo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node.py b/edu_sharing_client/models/node.py deleted file mode 100644 index 106dc2c0..00000000 --- a/edu_sharing_client/models/node.py +++ /dev/null @@ -1,821 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Node(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'remote': 'Remote', - 'content': 'Content', - 'license': 'License', - 'is_directory': 'bool', - 'comment_count': 'int', - 'rating': 'AccumulatedRatings', - 'ref': 'NodeRef', - 'parent': 'NodeRef', - 'type': 'str', - 'aspects': 'list[str]', - 'name': 'str', - 'title': 'str', - 'metadataset': 'str', - 'repository_type': 'str', - 'created_at': 'datetime', - 'created_by': 'Person', - 'modified_at': 'datetime', - 'modified_by': 'Person', - 'access': 'list[str]', - 'download_url': 'str', - 'properties': 'dict(str, list[str])', - 'mimetype': 'str', - 'mediatype': 'str', - 'size': 'str', - 'preview': 'Preview', - 'icon_url': 'str', - 'collection': 'Collection', - 'owner': 'Person' - } - - attribute_map = { - 'remote': 'remote', - 'content': 'content', - 'license': 'license', - 'is_directory': 'isDirectory', - 'comment_count': 'commentCount', - 'rating': 'rating', - 'ref': 'ref', - 'parent': 'parent', - 'type': 'type', - 'aspects': 'aspects', - 'name': 'name', - 'title': 'title', - 'metadataset': 'metadataset', - 'repository_type': 'repositoryType', - 'created_at': 'createdAt', - 'created_by': 'createdBy', - 'modified_at': 'modifiedAt', - 'modified_by': 'modifiedBy', - 'access': 'access', - 'download_url': 'downloadUrl', - 'properties': 'properties', - 'mimetype': 'mimetype', - 'mediatype': 'mediatype', - 'size': 'size', - 'preview': 'preview', - 'icon_url': 'iconURL', - 'collection': 'collection', - 'owner': 'owner' - } - - def __init__(self, remote=None, content=None, license=None, is_directory=False, comment_count=None, rating=None, ref=None, parent=None, type=None, aspects=None, name=None, title=None, metadataset=None, repository_type=None, created_at=None, created_by=None, modified_at=None, modified_by=None, access=None, download_url=None, properties=None, mimetype=None, mediatype=None, size=None, preview=None, icon_url=None, collection=None, owner=None): # noqa: E501 - """Node - a model defined in Swagger""" # noqa: E501 - self._remote = None - self._content = None - self._license = None - self._is_directory = None - self._comment_count = None - self._rating = None - self._ref = None - self._parent = None - self._type = None - self._aspects = None - self._name = None - self._title = None - self._metadataset = None - self._repository_type = None - self._created_at = None - self._created_by = None - self._modified_at = None - self._modified_by = None - self._access = None - self._download_url = None - self._properties = None - self._mimetype = None - self._mediatype = None - self._size = None - self._preview = None - self._icon_url = None - self._collection = None - self._owner = None - self.discriminator = None - if remote is not None: - self.remote = remote - if content is not None: - self.content = content - if license is not None: - self.license = license - if is_directory is not None: - self.is_directory = is_directory - if comment_count is not None: - self.comment_count = comment_count - if rating is not None: - self.rating = rating - self.ref = ref - if parent is not None: - self.parent = parent - if type is not None: - self.type = type - if aspects is not None: - self.aspects = aspects - self.name = name - if title is not None: - self.title = title - if metadataset is not None: - self.metadataset = metadataset - if repository_type is not None: - self.repository_type = repository_type - self.created_at = created_at - self.created_by = created_by - if modified_at is not None: - self.modified_at = modified_at - if modified_by is not None: - self.modified_by = modified_by - self.access = access - self.download_url = download_url - if properties is not None: - self.properties = properties - if mimetype is not None: - self.mimetype = mimetype - if mediatype is not None: - self.mediatype = mediatype - if size is not None: - self.size = size - if preview is not None: - self.preview = preview - if icon_url is not None: - self.icon_url = icon_url - self.collection = collection - self.owner = owner - - @property - def remote(self): - """Gets the remote of this Node. # noqa: E501 - - - :return: The remote of this Node. # noqa: E501 - :rtype: Remote - """ - return self._remote - - @remote.setter - def remote(self, remote): - """Sets the remote of this Node. - - - :param remote: The remote of this Node. # noqa: E501 - :type: Remote - """ - - self._remote = remote - - @property - def content(self): - """Gets the content of this Node. # noqa: E501 - - - :return: The content of this Node. # noqa: E501 - :rtype: Content - """ - return self._content - - @content.setter - def content(self, content): - """Sets the content of this Node. - - - :param content: The content of this Node. # noqa: E501 - :type: Content - """ - - self._content = content - - @property - def license(self): - """Gets the license of this Node. # noqa: E501 - - - :return: The license of this Node. # noqa: E501 - :rtype: License - """ - return self._license - - @license.setter - def license(self, license): - """Sets the license of this Node. - - - :param license: The license of this Node. # noqa: E501 - :type: License - """ - - self._license = license - - @property - def is_directory(self): - """Gets the is_directory of this Node. # noqa: E501 - - - :return: The is_directory of this Node. # noqa: E501 - :rtype: bool - """ - return self._is_directory - - @is_directory.setter - def is_directory(self, is_directory): - """Sets the is_directory of this Node. - - - :param is_directory: The is_directory of this Node. # noqa: E501 - :type: bool - """ - - self._is_directory = is_directory - - @property - def comment_count(self): - """Gets the comment_count of this Node. # noqa: E501 - - - :return: The comment_count of this Node. # noqa: E501 - :rtype: int - """ - return self._comment_count - - @comment_count.setter - def comment_count(self, comment_count): - """Sets the comment_count of this Node. - - - :param comment_count: The comment_count of this Node. # noqa: E501 - :type: int - """ - - self._comment_count = comment_count - - @property - def rating(self): - """Gets the rating of this Node. # noqa: E501 - - - :return: The rating of this Node. # noqa: E501 - :rtype: AccumulatedRatings - """ - return self._rating - - @rating.setter - def rating(self, rating): - """Sets the rating of this Node. - - - :param rating: The rating of this Node. # noqa: E501 - :type: AccumulatedRatings - """ - - self._rating = rating - - @property - def ref(self): - """Gets the ref of this Node. # noqa: E501 - - - :return: The ref of this Node. # noqa: E501 - :rtype: NodeRef - """ - return self._ref - - @ref.setter - def ref(self, ref): - """Sets the ref of this Node. - - - :param ref: The ref of this Node. # noqa: E501 - :type: NodeRef - """ - if ref is None: - raise ValueError("Invalid value for `ref`, must not be `None`") # noqa: E501 - - self._ref = ref - - @property - def parent(self): - """Gets the parent of this Node. # noqa: E501 - - - :return: The parent of this Node. # noqa: E501 - :rtype: NodeRef - """ - return self._parent - - @parent.setter - def parent(self, parent): - """Sets the parent of this Node. - - - :param parent: The parent of this Node. # noqa: E501 - :type: NodeRef - """ - - self._parent = parent - - @property - def type(self): - """Gets the type of this Node. # noqa: E501 - - - :return: The type of this Node. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Node. - - - :param type: The type of this Node. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def aspects(self): - """Gets the aspects of this Node. # noqa: E501 - - - :return: The aspects of this Node. # noqa: E501 - :rtype: list[str] - """ - return self._aspects - - @aspects.setter - def aspects(self, aspects): - """Sets the aspects of this Node. - - - :param aspects: The aspects of this Node. # noqa: E501 - :type: list[str] - """ - - self._aspects = aspects - - @property - def name(self): - """Gets the name of this Node. # noqa: E501 - - - :return: The name of this Node. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Node. - - - :param name: The name of this Node. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def title(self): - """Gets the title of this Node. # noqa: E501 - - - :return: The title of this Node. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this Node. - - - :param title: The title of this Node. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def metadataset(self): - """Gets the metadataset of this Node. # noqa: E501 - - - :return: The metadataset of this Node. # noqa: E501 - :rtype: str - """ - return self._metadataset - - @metadataset.setter - def metadataset(self, metadataset): - """Sets the metadataset of this Node. - - - :param metadataset: The metadataset of this Node. # noqa: E501 - :type: str - """ - - self._metadataset = metadataset - - @property - def repository_type(self): - """Gets the repository_type of this Node. # noqa: E501 - - - :return: The repository_type of this Node. # noqa: E501 - :rtype: str - """ - return self._repository_type - - @repository_type.setter - def repository_type(self, repository_type): - """Sets the repository_type of this Node. - - - :param repository_type: The repository_type of this Node. # noqa: E501 - :type: str - """ - - self._repository_type = repository_type - - @property - def created_at(self): - """Gets the created_at of this Node. # noqa: E501 - - - :return: The created_at of this Node. # noqa: E501 - :rtype: datetime - """ - return self._created_at - - @created_at.setter - def created_at(self, created_at): - """Sets the created_at of this Node. - - - :param created_at: The created_at of this Node. # noqa: E501 - :type: datetime - """ - if created_at is None: - raise ValueError("Invalid value for `created_at`, must not be `None`") # noqa: E501 - - self._created_at = created_at - - @property - def created_by(self): - """Gets the created_by of this Node. # noqa: E501 - - - :return: The created_by of this Node. # noqa: E501 - :rtype: Person - """ - return self._created_by - - @created_by.setter - def created_by(self, created_by): - """Sets the created_by of this Node. - - - :param created_by: The created_by of this Node. # noqa: E501 - :type: Person - """ - if created_by is None: - raise ValueError("Invalid value for `created_by`, must not be `None`") # noqa: E501 - - self._created_by = created_by - - @property - def modified_at(self): - """Gets the modified_at of this Node. # noqa: E501 - - - :return: The modified_at of this Node. # noqa: E501 - :rtype: datetime - """ - return self._modified_at - - @modified_at.setter - def modified_at(self, modified_at): - """Sets the modified_at of this Node. - - - :param modified_at: The modified_at of this Node. # noqa: E501 - :type: datetime - """ - - self._modified_at = modified_at - - @property - def modified_by(self): - """Gets the modified_by of this Node. # noqa: E501 - - - :return: The modified_by of this Node. # noqa: E501 - :rtype: Person - """ - return self._modified_by - - @modified_by.setter - def modified_by(self, modified_by): - """Sets the modified_by of this Node. - - - :param modified_by: The modified_by of this Node. # noqa: E501 - :type: Person - """ - - self._modified_by = modified_by - - @property - def access(self): - """Gets the access of this Node. # noqa: E501 - - - :return: The access of this Node. # noqa: E501 - :rtype: list[str] - """ - return self._access - - @access.setter - def access(self, access): - """Sets the access of this Node. - - - :param access: The access of this Node. # noqa: E501 - :type: list[str] - """ - if access is None: - raise ValueError("Invalid value for `access`, must not be `None`") # noqa: E501 - - self._access = access - - @property - def download_url(self): - """Gets the download_url of this Node. # noqa: E501 - - - :return: The download_url of this Node. # noqa: E501 - :rtype: str - """ - return self._download_url - - @download_url.setter - def download_url(self, download_url): - """Sets the download_url of this Node. - - - :param download_url: The download_url of this Node. # noqa: E501 - :type: str - """ - if download_url is None: - raise ValueError("Invalid value for `download_url`, must not be `None`") # noqa: E501 - - self._download_url = download_url - - @property - def properties(self): - """Gets the properties of this Node. # noqa: E501 - - - :return: The properties of this Node. # noqa: E501 - :rtype: dict(str, list[str]) - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this Node. - - - :param properties: The properties of this Node. # noqa: E501 - :type: dict(str, list[str]) - """ - - self._properties = properties - - @property - def mimetype(self): - """Gets the mimetype of this Node. # noqa: E501 - - - :return: The mimetype of this Node. # noqa: E501 - :rtype: str - """ - return self._mimetype - - @mimetype.setter - def mimetype(self, mimetype): - """Sets the mimetype of this Node. - - - :param mimetype: The mimetype of this Node. # noqa: E501 - :type: str - """ - - self._mimetype = mimetype - - @property - def mediatype(self): - """Gets the mediatype of this Node. # noqa: E501 - - - :return: The mediatype of this Node. # noqa: E501 - :rtype: str - """ - return self._mediatype - - @mediatype.setter - def mediatype(self, mediatype): - """Sets the mediatype of this Node. - - - :param mediatype: The mediatype of this Node. # noqa: E501 - :type: str - """ - - self._mediatype = mediatype - - @property - def size(self): - """Gets the size of this Node. # noqa: E501 - - - :return: The size of this Node. # noqa: E501 - :rtype: str - """ - return self._size - - @size.setter - def size(self, size): - """Sets the size of this Node. - - - :param size: The size of this Node. # noqa: E501 - :type: str - """ - - self._size = size - - @property - def preview(self): - """Gets the preview of this Node. # noqa: E501 - - - :return: The preview of this Node. # noqa: E501 - :rtype: Preview - """ - return self._preview - - @preview.setter - def preview(self, preview): - """Sets the preview of this Node. - - - :param preview: The preview of this Node. # noqa: E501 - :type: Preview - """ - - self._preview = preview - - @property - def icon_url(self): - """Gets the icon_url of this Node. # noqa: E501 - - - :return: The icon_url of this Node. # noqa: E501 - :rtype: str - """ - return self._icon_url - - @icon_url.setter - def icon_url(self, icon_url): - """Sets the icon_url of this Node. - - - :param icon_url: The icon_url of this Node. # noqa: E501 - :type: str - """ - - self._icon_url = icon_url - - @property - def collection(self): - """Gets the collection of this Node. # noqa: E501 - - - :return: The collection of this Node. # noqa: E501 - :rtype: Collection - """ - return self._collection - - @collection.setter - def collection(self, collection): - """Sets the collection of this Node. - - - :param collection: The collection of this Node. # noqa: E501 - :type: Collection - """ - if collection is None: - raise ValueError("Invalid value for `collection`, must not be `None`") # noqa: E501 - - self._collection = collection - - @property - def owner(self): - """Gets the owner of this Node. # noqa: E501 - - - :return: The owner of this Node. # noqa: E501 - :rtype: Person - """ - return self._owner - - @owner.setter - def owner(self, owner): - """Sets the owner of this Node. - - - :param owner: The owner of this Node. # noqa: E501 - :type: Person - """ - if owner is None: - raise ValueError("Invalid value for `owner`, must not be `None`") # noqa: E501 - - self._owner = owner - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Node, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Node): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_entries.py b/edu_sharing_client/models/node_entries.py deleted file mode 100644 index 70dc1589..00000000 --- a/edu_sharing_client/models/node_entries.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'nodes': 'list[Node]', - 'pagination': 'Pagination' - } - - attribute_map = { - 'nodes': 'nodes', - 'pagination': 'pagination' - } - - def __init__(self, nodes=None, pagination=None): # noqa: E501 - """NodeEntries - a model defined in Swagger""" # noqa: E501 - self._nodes = None - self._pagination = None - self.discriminator = None - self.nodes = nodes - self.pagination = pagination - - @property - def nodes(self): - """Gets the nodes of this NodeEntries. # noqa: E501 - - - :return: The nodes of this NodeEntries. # noqa: E501 - :rtype: list[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this NodeEntries. - - - :param nodes: The nodes of this NodeEntries. # noqa: E501 - :type: list[Node] - """ - if nodes is None: - raise ValueError("Invalid value for `nodes`, must not be `None`") # noqa: E501 - - self._nodes = nodes - - @property - def pagination(self): - """Gets the pagination of this NodeEntries. # noqa: E501 - - - :return: The pagination of this NodeEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this NodeEntries. - - - :param pagination: The pagination of this NodeEntries. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_entry.py b/edu_sharing_client/models/node_entry.py deleted file mode 100644 index 6cfbc873..00000000 --- a/edu_sharing_client/models/node_entry.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'node': 'Node' - } - - attribute_map = { - 'node': 'node' - } - - def __init__(self, node=None): # noqa: E501 - """NodeEntry - a model defined in Swagger""" # noqa: E501 - self._node = None - self.discriminator = None - self.node = node - - @property - def node(self): - """Gets the node of this NodeEntry. # noqa: E501 - - - :return: The node of this NodeEntry. # noqa: E501 - :rtype: Node - """ - return self._node - - @node.setter - def node(self, node): - """Sets the node of this NodeEntry. - - - :param node: The node of this NodeEntry. # noqa: E501 - :type: Node - """ - if node is None: - raise ValueError("Invalid value for `node`, must not be `None`") # noqa: E501 - - self._node = node - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_locked.py b/edu_sharing_client/models/node_locked.py deleted file mode 100644 index 47b90355..00000000 --- a/edu_sharing_client/models/node_locked.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeLocked(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'is_locked': 'bool' - } - - attribute_map = { - 'is_locked': 'isLocked' - } - - def __init__(self, is_locked=False): # noqa: E501 - """NodeLocked - a model defined in Swagger""" # noqa: E501 - self._is_locked = None - self.discriminator = None - self.is_locked = is_locked - - @property - def is_locked(self): - """Gets the is_locked of this NodeLocked. # noqa: E501 - - - :return: The is_locked of this NodeLocked. # noqa: E501 - :rtype: bool - """ - return self._is_locked - - @is_locked.setter - def is_locked(self, is_locked): - """Sets the is_locked of this NodeLocked. - - - :param is_locked: The is_locked of this NodeLocked. # noqa: E501 - :type: bool - """ - if is_locked is None: - raise ValueError("Invalid value for `is_locked`, must not be `None`") # noqa: E501 - - self._is_locked = is_locked - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeLocked, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeLocked): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_permission_entry.py b/edu_sharing_client/models/node_permission_entry.py deleted file mode 100644 index f8a3be05..00000000 --- a/edu_sharing_client/models/node_permission_entry.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodePermissionEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'permissions': 'NodePermissions' - } - - attribute_map = { - 'permissions': 'permissions' - } - - def __init__(self, permissions=None): # noqa: E501 - """NodePermissionEntry - a model defined in Swagger""" # noqa: E501 - self._permissions = None - self.discriminator = None - self.permissions = permissions - - @property - def permissions(self): - """Gets the permissions of this NodePermissionEntry. # noqa: E501 - - - :return: The permissions of this NodePermissionEntry. # noqa: E501 - :rtype: NodePermissions - """ - return self._permissions - - @permissions.setter - def permissions(self, permissions): - """Sets the permissions of this NodePermissionEntry. - - - :param permissions: The permissions of this NodePermissionEntry. # noqa: E501 - :type: NodePermissions - """ - if permissions is None: - raise ValueError("Invalid value for `permissions`, must not be `None`") # noqa: E501 - - self._permissions = permissions - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodePermissionEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodePermissionEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_permissions.py b/edu_sharing_client/models/node_permissions.py deleted file mode 100644 index 8dbf5585..00000000 --- a/edu_sharing_client/models/node_permissions.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodePermissions(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'local_permissions': 'ACL', - 'inherited_permissions': 'list[ACE]' - } - - attribute_map = { - 'local_permissions': 'localPermissions', - 'inherited_permissions': 'inheritedPermissions' - } - - def __init__(self, local_permissions=None, inherited_permissions=None): # noqa: E501 - """NodePermissions - a model defined in Swagger""" # noqa: E501 - self._local_permissions = None - self._inherited_permissions = None - self.discriminator = None - self.local_permissions = local_permissions - self.inherited_permissions = inherited_permissions - - @property - def local_permissions(self): - """Gets the local_permissions of this NodePermissions. # noqa: E501 - - - :return: The local_permissions of this NodePermissions. # noqa: E501 - :rtype: ACL - """ - return self._local_permissions - - @local_permissions.setter - def local_permissions(self, local_permissions): - """Sets the local_permissions of this NodePermissions. - - - :param local_permissions: The local_permissions of this NodePermissions. # noqa: E501 - :type: ACL - """ - if local_permissions is None: - raise ValueError("Invalid value for `local_permissions`, must not be `None`") # noqa: E501 - - self._local_permissions = local_permissions - - @property - def inherited_permissions(self): - """Gets the inherited_permissions of this NodePermissions. # noqa: E501 - - - :return: The inherited_permissions of this NodePermissions. # noqa: E501 - :rtype: list[ACE] - """ - return self._inherited_permissions - - @inherited_permissions.setter - def inherited_permissions(self, inherited_permissions): - """Sets the inherited_permissions of this NodePermissions. - - - :param inherited_permissions: The inherited_permissions of this NodePermissions. # noqa: E501 - :type: list[ACE] - """ - if inherited_permissions is None: - raise ValueError("Invalid value for `inherited_permissions`, must not be `None`") # noqa: E501 - - self._inherited_permissions = inherited_permissions - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodePermissions, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodePermissions): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_ref.py b/edu_sharing_client/models/node_ref.py deleted file mode 100644 index 45646f0b..00000000 --- a/edu_sharing_client/models/node_ref.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeRef(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repo': 'str', - 'id': 'str', - 'archived': 'bool', - 'is_home_repo': 'bool' - } - - attribute_map = { - 'repo': 'repo', - 'id': 'id', - 'archived': 'archived', - 'is_home_repo': 'isHomeRepo' - } - - def __init__(self, repo=None, id=None, archived=False, is_home_repo=False): # noqa: E501 - """NodeRef - a model defined in Swagger""" # noqa: E501 - self._repo = None - self._id = None - self._archived = None - self._is_home_repo = None - self.discriminator = None - self.repo = repo - self.id = id - self.archived = archived - if is_home_repo is not None: - self.is_home_repo = is_home_repo - - @property - def repo(self): - """Gets the repo of this NodeRef. # noqa: E501 - - - :return: The repo of this NodeRef. # noqa: E501 - :rtype: str - """ - return self._repo - - @repo.setter - def repo(self, repo): - """Sets the repo of this NodeRef. - - - :param repo: The repo of this NodeRef. # noqa: E501 - :type: str - """ - if repo is None: - raise ValueError("Invalid value for `repo`, must not be `None`") # noqa: E501 - - self._repo = repo - - @property - def id(self): - """Gets the id of this NodeRef. # noqa: E501 - - - :return: The id of this NodeRef. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this NodeRef. - - - :param id: The id of this NodeRef. # noqa: E501 - :type: str - """ - if id is None: - raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501 - - self._id = id - - @property - def archived(self): - """Gets the archived of this NodeRef. # noqa: E501 - - - :return: The archived of this NodeRef. # noqa: E501 - :rtype: bool - """ - return self._archived - - @archived.setter - def archived(self, archived): - """Sets the archived of this NodeRef. - - - :param archived: The archived of this NodeRef. # noqa: E501 - :type: bool - """ - if archived is None: - raise ValueError("Invalid value for `archived`, must not be `None`") # noqa: E501 - - self._archived = archived - - @property - def is_home_repo(self): - """Gets the is_home_repo of this NodeRef. # noqa: E501 - - - :return: The is_home_repo of this NodeRef. # noqa: E501 - :rtype: bool - """ - return self._is_home_repo - - @is_home_repo.setter - def is_home_repo(self, is_home_repo): - """Sets the is_home_repo of this NodeRef. - - - :param is_home_repo: The is_home_repo of this NodeRef. # noqa: E501 - :type: bool - """ - - self._is_home_repo = is_home_repo - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeRef, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeRef): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_remote.py b/edu_sharing_client/models/node_remote.py deleted file mode 100644 index 9c558459..00000000 --- a/edu_sharing_client/models/node_remote.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeRemote(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'node': 'Node', - 'remote': 'Node' - } - - attribute_map = { - 'node': 'node', - 'remote': 'remote' - } - - def __init__(self, node=None, remote=None): # noqa: E501 - """NodeRemote - a model defined in Swagger""" # noqa: E501 - self._node = None - self._remote = None - self.discriminator = None - self.node = node - self.remote = remote - - @property - def node(self): - """Gets the node of this NodeRemote. # noqa: E501 - - - :return: The node of this NodeRemote. # noqa: E501 - :rtype: Node - """ - return self._node - - @node.setter - def node(self, node): - """Sets the node of this NodeRemote. - - - :param node: The node of this NodeRemote. # noqa: E501 - :type: Node - """ - if node is None: - raise ValueError("Invalid value for `node`, must not be `None`") # noqa: E501 - - self._node = node - - @property - def remote(self): - """Gets the remote of this NodeRemote. # noqa: E501 - - - :return: The remote of this NodeRemote. # noqa: E501 - :rtype: Node - """ - return self._remote - - @remote.setter - def remote(self, remote): - """Sets the remote of this NodeRemote. - - - :param remote: The remote of this NodeRemote. # noqa: E501 - :type: Node - """ - if remote is None: - raise ValueError("Invalid value for `remote`, must not be `None`") # noqa: E501 - - self._remote = remote - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeRemote, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeRemote): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_share.py b/edu_sharing_client/models/node_share.py deleted file mode 100644 index df27ad67..00000000 --- a/edu_sharing_client/models/node_share.py +++ /dev/null @@ -1,293 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeShare(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'password': 'bool', - 'token': 'str', - 'email': 'str', - 'expiry_date': 'int', - 'invited_at': 'int', - 'download_count': 'int', - 'url': 'str', - 'share_id': 'str' - } - - attribute_map = { - 'password': 'password', - 'token': 'token', - 'email': 'email', - 'expiry_date': 'expiryDate', - 'invited_at': 'invitedAt', - 'download_count': 'downloadCount', - 'url': 'url', - 'share_id': 'shareId' - } - - def __init__(self, password=False, token=None, email=None, expiry_date=None, invited_at=None, download_count=None, url=None, share_id=None): # noqa: E501 - """NodeShare - a model defined in Swagger""" # noqa: E501 - self._password = None - self._token = None - self._email = None - self._expiry_date = None - self._invited_at = None - self._download_count = None - self._url = None - self._share_id = None - self.discriminator = None - if password is not None: - self.password = password - if token is not None: - self.token = token - if email is not None: - self.email = email - if expiry_date is not None: - self.expiry_date = expiry_date - if invited_at is not None: - self.invited_at = invited_at - if download_count is not None: - self.download_count = download_count - if url is not None: - self.url = url - if share_id is not None: - self.share_id = share_id - - @property - def password(self): - """Gets the password of this NodeShare. # noqa: E501 - - - :return: The password of this NodeShare. # noqa: E501 - :rtype: bool - """ - return self._password - - @password.setter - def password(self, password): - """Sets the password of this NodeShare. - - - :param password: The password of this NodeShare. # noqa: E501 - :type: bool - """ - - self._password = password - - @property - def token(self): - """Gets the token of this NodeShare. # noqa: E501 - - - :return: The token of this NodeShare. # noqa: E501 - :rtype: str - """ - return self._token - - @token.setter - def token(self, token): - """Sets the token of this NodeShare. - - - :param token: The token of this NodeShare. # noqa: E501 - :type: str - """ - - self._token = token - - @property - def email(self): - """Gets the email of this NodeShare. # noqa: E501 - - - :return: The email of this NodeShare. # noqa: E501 - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this NodeShare. - - - :param email: The email of this NodeShare. # noqa: E501 - :type: str - """ - - self._email = email - - @property - def expiry_date(self): - """Gets the expiry_date of this NodeShare. # noqa: E501 - - - :return: The expiry_date of this NodeShare. # noqa: E501 - :rtype: int - """ - return self._expiry_date - - @expiry_date.setter - def expiry_date(self, expiry_date): - """Sets the expiry_date of this NodeShare. - - - :param expiry_date: The expiry_date of this NodeShare. # noqa: E501 - :type: int - """ - - self._expiry_date = expiry_date - - @property - def invited_at(self): - """Gets the invited_at of this NodeShare. # noqa: E501 - - - :return: The invited_at of this NodeShare. # noqa: E501 - :rtype: int - """ - return self._invited_at - - @invited_at.setter - def invited_at(self, invited_at): - """Sets the invited_at of this NodeShare. - - - :param invited_at: The invited_at of this NodeShare. # noqa: E501 - :type: int - """ - - self._invited_at = invited_at - - @property - def download_count(self): - """Gets the download_count of this NodeShare. # noqa: E501 - - - :return: The download_count of this NodeShare. # noqa: E501 - :rtype: int - """ - return self._download_count - - @download_count.setter - def download_count(self, download_count): - """Sets the download_count of this NodeShare. - - - :param download_count: The download_count of this NodeShare. # noqa: E501 - :type: int - """ - - self._download_count = download_count - - @property - def url(self): - """Gets the url of this NodeShare. # noqa: E501 - - - :return: The url of this NodeShare. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this NodeShare. - - - :param url: The url of this NodeShare. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def share_id(self): - """Gets the share_id of this NodeShare. # noqa: E501 - - - :return: The share_id of this NodeShare. # noqa: E501 - :rtype: str - """ - return self._share_id - - @share_id.setter - def share_id(self, share_id): - """Sets the share_id of this NodeShare. - - - :param share_id: The share_id of this NodeShare. # noqa: E501 - :type: str - """ - - self._share_id = share_id - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeShare, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeShare): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_text.py b/edu_sharing_client/models/node_text.py deleted file mode 100644 index 2dfea608..00000000 --- a/edu_sharing_client/models/node_text.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeText(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'text': 'str', - 'html': 'str', - 'raw': 'str' - } - - attribute_map = { - 'text': 'text', - 'html': 'html', - 'raw': 'raw' - } - - def __init__(self, text=None, html=None, raw=None): # noqa: E501 - """NodeText - a model defined in Swagger""" # noqa: E501 - self._text = None - self._html = None - self._raw = None - self.discriminator = None - if text is not None: - self.text = text - if html is not None: - self.html = html - if raw is not None: - self.raw = raw - - @property - def text(self): - """Gets the text of this NodeText. # noqa: E501 - - - :return: The text of this NodeText. # noqa: E501 - :rtype: str - """ - return self._text - - @text.setter - def text(self, text): - """Sets the text of this NodeText. - - - :param text: The text of this NodeText. # noqa: E501 - :type: str - """ - - self._text = text - - @property - def html(self): - """Gets the html of this NodeText. # noqa: E501 - - - :return: The html of this NodeText. # noqa: E501 - :rtype: str - """ - return self._html - - @html.setter - def html(self, html): - """Sets the html of this NodeText. - - - :param html: The html of this NodeText. # noqa: E501 - :type: str - """ - - self._html = html - - @property - def raw(self): - """Gets the raw of this NodeText. # noqa: E501 - - - :return: The raw of this NodeText. # noqa: E501 - :rtype: str - """ - return self._raw - - @raw.setter - def raw(self, raw): - """Sets the raw of this NodeText. - - - :param raw: The raw of this NodeText. # noqa: E501 - :type: str - """ - - self._raw = raw - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeText, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeText): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_version.py b/edu_sharing_client/models/node_version.py deleted file mode 100644 index bc2cf968..00000000 --- a/edu_sharing_client/models/node_version.py +++ /dev/null @@ -1,245 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeVersion(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'properties': 'dict(str, list[str])', - 'version': 'NodeVersionRef', - 'comment': 'str', - 'modified_at': 'str', - 'modified_by': 'Person', - 'content_url': 'str' - } - - attribute_map = { - 'properties': 'properties', - 'version': 'version', - 'comment': 'comment', - 'modified_at': 'modifiedAt', - 'modified_by': 'modifiedBy', - 'content_url': 'contentUrl' - } - - def __init__(self, properties=None, version=None, comment=None, modified_at=None, modified_by=None, content_url=None): # noqa: E501 - """NodeVersion - a model defined in Swagger""" # noqa: E501 - self._properties = None - self._version = None - self._comment = None - self._modified_at = None - self._modified_by = None - self._content_url = None - self.discriminator = None - if properties is not None: - self.properties = properties - self.version = version - self.comment = comment - self.modified_at = modified_at - self.modified_by = modified_by - if content_url is not None: - self.content_url = content_url - - @property - def properties(self): - """Gets the properties of this NodeVersion. # noqa: E501 - - - :return: The properties of this NodeVersion. # noqa: E501 - :rtype: dict(str, list[str]) - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this NodeVersion. - - - :param properties: The properties of this NodeVersion. # noqa: E501 - :type: dict(str, list[str]) - """ - - self._properties = properties - - @property - def version(self): - """Gets the version of this NodeVersion. # noqa: E501 - - - :return: The version of this NodeVersion. # noqa: E501 - :rtype: NodeVersionRef - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this NodeVersion. - - - :param version: The version of this NodeVersion. # noqa: E501 - :type: NodeVersionRef - """ - if version is None: - raise ValueError("Invalid value for `version`, must not be `None`") # noqa: E501 - - self._version = version - - @property - def comment(self): - """Gets the comment of this NodeVersion. # noqa: E501 - - - :return: The comment of this NodeVersion. # noqa: E501 - :rtype: str - """ - return self._comment - - @comment.setter - def comment(self, comment): - """Sets the comment of this NodeVersion. - - - :param comment: The comment of this NodeVersion. # noqa: E501 - :type: str - """ - if comment is None: - raise ValueError("Invalid value for `comment`, must not be `None`") # noqa: E501 - - self._comment = comment - - @property - def modified_at(self): - """Gets the modified_at of this NodeVersion. # noqa: E501 - - - :return: The modified_at of this NodeVersion. # noqa: E501 - :rtype: str - """ - return self._modified_at - - @modified_at.setter - def modified_at(self, modified_at): - """Sets the modified_at of this NodeVersion. - - - :param modified_at: The modified_at of this NodeVersion. # noqa: E501 - :type: str - """ - if modified_at is None: - raise ValueError("Invalid value for `modified_at`, must not be `None`") # noqa: E501 - - self._modified_at = modified_at - - @property - def modified_by(self): - """Gets the modified_by of this NodeVersion. # noqa: E501 - - - :return: The modified_by of this NodeVersion. # noqa: E501 - :rtype: Person - """ - return self._modified_by - - @modified_by.setter - def modified_by(self, modified_by): - """Sets the modified_by of this NodeVersion. - - - :param modified_by: The modified_by of this NodeVersion. # noqa: E501 - :type: Person - """ - if modified_by is None: - raise ValueError("Invalid value for `modified_by`, must not be `None`") # noqa: E501 - - self._modified_by = modified_by - - @property - def content_url(self): - """Gets the content_url of this NodeVersion. # noqa: E501 - - - :return: The content_url of this NodeVersion. # noqa: E501 - :rtype: str - """ - return self._content_url - - @content_url.setter - def content_url(self, content_url): - """Sets the content_url of this NodeVersion. - - - :param content_url: The content_url of this NodeVersion. # noqa: E501 - :type: str - """ - - self._content_url = content_url - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeVersion, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeVersion): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_version_entry.py b/edu_sharing_client/models/node_version_entry.py deleted file mode 100644 index 67a9ca15..00000000 --- a/edu_sharing_client/models/node_version_entry.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeVersionEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'version': 'NodeVersion' - } - - attribute_map = { - 'version': 'version' - } - - def __init__(self, version=None): # noqa: E501 - """NodeVersionEntry - a model defined in Swagger""" # noqa: E501 - self._version = None - self.discriminator = None - self.version = version - - @property - def version(self): - """Gets the version of this NodeVersionEntry. # noqa: E501 - - - :return: The version of this NodeVersionEntry. # noqa: E501 - :rtype: NodeVersion - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this NodeVersionEntry. - - - :param version: The version of this NodeVersionEntry. # noqa: E501 - :type: NodeVersion - """ - if version is None: - raise ValueError("Invalid value for `version`, must not be `None`") # noqa: E501 - - self._version = version - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeVersionEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeVersionEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_version_ref.py b/edu_sharing_client/models/node_version_ref.py deleted file mode 100644 index 7220bc45..00000000 --- a/edu_sharing_client/models/node_version_ref.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeVersionRef(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'node': 'NodeRef', - 'major': 'int', - 'minor': 'int' - } - - attribute_map = { - 'node': 'node', - 'major': 'major', - 'minor': 'minor' - } - - def __init__(self, node=None, major=None, minor=None): # noqa: E501 - """NodeVersionRef - a model defined in Swagger""" # noqa: E501 - self._node = None - self._major = None - self._minor = None - self.discriminator = None - self.node = node - self.major = major - self.minor = minor - - @property - def node(self): - """Gets the node of this NodeVersionRef. # noqa: E501 - - - :return: The node of this NodeVersionRef. # noqa: E501 - :rtype: NodeRef - """ - return self._node - - @node.setter - def node(self, node): - """Sets the node of this NodeVersionRef. - - - :param node: The node of this NodeVersionRef. # noqa: E501 - :type: NodeRef - """ - if node is None: - raise ValueError("Invalid value for `node`, must not be `None`") # noqa: E501 - - self._node = node - - @property - def major(self): - """Gets the major of this NodeVersionRef. # noqa: E501 - - - :return: The major of this NodeVersionRef. # noqa: E501 - :rtype: int - """ - return self._major - - @major.setter - def major(self, major): - """Sets the major of this NodeVersionRef. - - - :param major: The major of this NodeVersionRef. # noqa: E501 - :type: int - """ - if major is None: - raise ValueError("Invalid value for `major`, must not be `None`") # noqa: E501 - - self._major = major - - @property - def minor(self): - """Gets the minor of this NodeVersionRef. # noqa: E501 - - - :return: The minor of this NodeVersionRef. # noqa: E501 - :rtype: int - """ - return self._minor - - @minor.setter - def minor(self, minor): - """Sets the minor of this NodeVersionRef. - - - :param minor: The minor of this NodeVersionRef. # noqa: E501 - :type: int - """ - if minor is None: - raise ValueError("Invalid value for `minor`, must not be `None`") # noqa: E501 - - self._minor = minor - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeVersionRef, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeVersionRef): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/node_version_ref_entries.py b/edu_sharing_client/models/node_version_ref_entries.py deleted file mode 100644 index b650f822..00000000 --- a/edu_sharing_client/models/node_version_ref_entries.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NodeVersionRefEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'versions': 'list[NodeVersionRef]' - } - - attribute_map = { - 'versions': 'versions' - } - - def __init__(self, versions=None): # noqa: E501 - """NodeVersionRefEntries - a model defined in Swagger""" # noqa: E501 - self._versions = None - self.discriminator = None - self.versions = versions - - @property - def versions(self): - """Gets the versions of this NodeVersionRefEntries. # noqa: E501 - - - :return: The versions of this NodeVersionRefEntries. # noqa: E501 - :rtype: list[NodeVersionRef] - """ - return self._versions - - @versions.setter - def versions(self, versions): - """Sets the versions of this NodeVersionRefEntries. - - - :param versions: The versions of this NodeVersionRefEntries. # noqa: E501 - :type: list[NodeVersionRef] - """ - if versions is None: - raise ValueError("Invalid value for `versions`, must not be `None`") # noqa: E501 - - self._versions = versions - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NodeVersionRefEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NodeVersionRefEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/notify_entry.py b/edu_sharing_client/models/notify_entry.py deleted file mode 100644 index 9ebfcd8e..00000000 --- a/edu_sharing_client/models/notify_entry.py +++ /dev/null @@ -1,193 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class NotifyEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - '_date': 'int', - 'permissions': 'ACL', - 'user': 'User', - 'action': 'str' - } - - attribute_map = { - '_date': 'date', - 'permissions': 'permissions', - 'user': 'user', - 'action': 'action' - } - - def __init__(self, _date=None, permissions=None, user=None, action=None): # noqa: E501 - """NotifyEntry - a model defined in Swagger""" # noqa: E501 - self.__date = None - self._permissions = None - self._user = None - self._action = None - self.discriminator = None - self._date = _date - self.permissions = permissions - self.user = user - self.action = action - - @property - def _date(self): - """Gets the _date of this NotifyEntry. # noqa: E501 - - - :return: The _date of this NotifyEntry. # noqa: E501 - :rtype: int - """ - return self.__date - - @_date.setter - def _date(self, _date): - """Sets the _date of this NotifyEntry. - - - :param _date: The _date of this NotifyEntry. # noqa: E501 - :type: int - """ - if _date is None: - raise ValueError("Invalid value for `_date`, must not be `None`") # noqa: E501 - - self.__date = _date - - @property - def permissions(self): - """Gets the permissions of this NotifyEntry. # noqa: E501 - - - :return: The permissions of this NotifyEntry. # noqa: E501 - :rtype: ACL - """ - return self._permissions - - @permissions.setter - def permissions(self, permissions): - """Sets the permissions of this NotifyEntry. - - - :param permissions: The permissions of this NotifyEntry. # noqa: E501 - :type: ACL - """ - if permissions is None: - raise ValueError("Invalid value for `permissions`, must not be `None`") # noqa: E501 - - self._permissions = permissions - - @property - def user(self): - """Gets the user of this NotifyEntry. # noqa: E501 - - - :return: The user of this NotifyEntry. # noqa: E501 - :rtype: User - """ - return self._user - - @user.setter - def user(self, user): - """Sets the user of this NotifyEntry. - - - :param user: The user of this NotifyEntry. # noqa: E501 - :type: User - """ - if user is None: - raise ValueError("Invalid value for `user`, must not be `None`") # noqa: E501 - - self._user = user - - @property - def action(self): - """Gets the action of this NotifyEntry. # noqa: E501 - - - :return: The action of this NotifyEntry. # noqa: E501 - :rtype: str - """ - return self._action - - @action.setter - def action(self, action): - """Sets the action of this NotifyEntry. - - - :param action: The action of this NotifyEntry. # noqa: E501 - :type: str - """ - if action is None: - raise ValueError("Invalid value for `action`, must not be `None`") # noqa: E501 - - self._action = action - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(NotifyEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, NotifyEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/organisations_import_result.py b/edu_sharing_client/models/organisations_import_result.py deleted file mode 100644 index d7c1661e..00000000 --- a/edu_sharing_client/models/organisations_import_result.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class OrganisationsImportResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'rows': 'int' - } - - attribute_map = { - 'rows': 'rows' - } - - def __init__(self, rows=None): # noqa: E501 - """OrganisationsImportResult - a model defined in Swagger""" # noqa: E501 - self._rows = None - self.discriminator = None - if rows is not None: - self.rows = rows - - @property - def rows(self): - """Gets the rows of this OrganisationsImportResult. # noqa: E501 - - - :return: The rows of this OrganisationsImportResult. # noqa: E501 - :rtype: int - """ - return self._rows - - @rows.setter - def rows(self, rows): - """Sets the rows of this OrganisationsImportResult. - - - :param rows: The rows of this OrganisationsImportResult. # noqa: E501 - :type: int - """ - - self._rows = rows - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(OrganisationsImportResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, OrganisationsImportResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/organization.py b/edu_sharing_client/models/organization.py deleted file mode 100644 index 38fb06d1..00000000 --- a/edu_sharing_client/models/organization.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Organization(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'group_types': 'list[str]' - } - - attribute_map = { - 'group_types': 'groupTypes' - } - - def __init__(self, group_types=None): # noqa: E501 - """Organization - a model defined in Swagger""" # noqa: E501 - self._group_types = None - self.discriminator = None - if group_types is not None: - self.group_types = group_types - - @property - def group_types(self): - """Gets the group_types of this Organization. # noqa: E501 - - - :return: The group_types of this Organization. # noqa: E501 - :rtype: list[str] - """ - return self._group_types - - @group_types.setter - def group_types(self, group_types): - """Sets the group_types of this Organization. - - - :param group_types: The group_types of this Organization. # noqa: E501 - :type: list[str] - """ - - self._group_types = group_types - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Organization, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Organization): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/organization_entries.py b/edu_sharing_client/models/organization_entries.py deleted file mode 100644 index 53b8b07c..00000000 --- a/edu_sharing_client/models/organization_entries.py +++ /dev/null @@ -1,165 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class OrganizationEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'organizations': 'list[Organization]', - 'pagination': 'Pagination', - 'can_create': 'bool' - } - - attribute_map = { - 'organizations': 'organizations', - 'pagination': 'pagination', - 'can_create': 'canCreate' - } - - def __init__(self, organizations=None, pagination=None, can_create=False): # noqa: E501 - """OrganizationEntries - a model defined in Swagger""" # noqa: E501 - self._organizations = None - self._pagination = None - self._can_create = None - self.discriminator = None - self.organizations = organizations - self.pagination = pagination - if can_create is not None: - self.can_create = can_create - - @property - def organizations(self): - """Gets the organizations of this OrganizationEntries. # noqa: E501 - - - :return: The organizations of this OrganizationEntries. # noqa: E501 - :rtype: list[Organization] - """ - return self._organizations - - @organizations.setter - def organizations(self, organizations): - """Sets the organizations of this OrganizationEntries. - - - :param organizations: The organizations of this OrganizationEntries. # noqa: E501 - :type: list[Organization] - """ - if organizations is None: - raise ValueError("Invalid value for `organizations`, must not be `None`") # noqa: E501 - - self._organizations = organizations - - @property - def pagination(self): - """Gets the pagination of this OrganizationEntries. # noqa: E501 - - - :return: The pagination of this OrganizationEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this OrganizationEntries. - - - :param pagination: The pagination of this OrganizationEntries. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - @property - def can_create(self): - """Gets the can_create of this OrganizationEntries. # noqa: E501 - - - :return: The can_create of this OrganizationEntries. # noqa: E501 - :rtype: bool - """ - return self._can_create - - @can_create.setter - def can_create(self, can_create): - """Sets the can_create of this OrganizationEntries. - - - :param can_create: The can_create of this OrganizationEntries. # noqa: E501 - :type: bool - """ - - self._can_create = can_create - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(OrganizationEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, OrganizationEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/pagination.py b/edu_sharing_client/models/pagination.py deleted file mode 100644 index 9481e57c..00000000 --- a/edu_sharing_client/models/pagination.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Pagination(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'total': 'int', - '_from': 'int', - 'count': 'int' - } - - attribute_map = { - 'total': 'total', - '_from': 'from', - 'count': 'count' - } - - def __init__(self, total=None, _from=None, count=None): # noqa: E501 - """Pagination - a model defined in Swagger""" # noqa: E501 - self._total = None - self.__from = None - self._count = None - self.discriminator = None - self.total = total - self._from = _from - self.count = count - - @property - def total(self): - """Gets the total of this Pagination. # noqa: E501 - - - :return: The total of this Pagination. # noqa: E501 - :rtype: int - """ - return self._total - - @total.setter - def total(self, total): - """Sets the total of this Pagination. - - - :param total: The total of this Pagination. # noqa: E501 - :type: int - """ - if total is None: - raise ValueError("Invalid value for `total`, must not be `None`") # noqa: E501 - - self._total = total - - @property - def _from(self): - """Gets the _from of this Pagination. # noqa: E501 - - - :return: The _from of this Pagination. # noqa: E501 - :rtype: int - """ - return self.__from - - @_from.setter - def _from(self, _from): - """Sets the _from of this Pagination. - - - :param _from: The _from of this Pagination. # noqa: E501 - :type: int - """ - if _from is None: - raise ValueError("Invalid value for `_from`, must not be `None`") # noqa: E501 - - self.__from = _from - - @property - def count(self): - """Gets the count of this Pagination. # noqa: E501 - - - :return: The count of this Pagination. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this Pagination. - - - :param count: The count of this Pagination. # noqa: E501 - :type: int - """ - if count is None: - raise ValueError("Invalid value for `count`, must not be `None`") # noqa: E501 - - self._count = count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Pagination, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Pagination): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/parameters.py b/edu_sharing_client/models/parameters.py deleted file mode 100644 index 3a6d0d6c..00000000 --- a/edu_sharing_client/models/parameters.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Parameters(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'general': 'General' - } - - attribute_map = { - 'general': 'general' - } - - def __init__(self, general=None): # noqa: E501 - """Parameters - a model defined in Swagger""" # noqa: E501 - self._general = None - self.discriminator = None - if general is not None: - self.general = general - - @property - def general(self): - """Gets the general of this Parameters. # noqa: E501 - - - :return: The general of this Parameters. # noqa: E501 - :rtype: General - """ - return self._general - - @general.setter - def general(self, general): - """Sets the general of this Parameters. - - - :param general: The general of this Parameters. # noqa: E501 - :type: General - """ - - self._general = general - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Parameters, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Parameters): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/parent_entries.py b/edu_sharing_client/models/parent_entries.py deleted file mode 100644 index 954c0156..00000000 --- a/edu_sharing_client/models/parent_entries.py +++ /dev/null @@ -1,165 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ParentEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'scope': 'str', - 'nodes': 'list[Node]', - 'pagination': 'Pagination' - } - - attribute_map = { - 'scope': 'scope', - 'nodes': 'nodes', - 'pagination': 'pagination' - } - - def __init__(self, scope=None, nodes=None, pagination=None): # noqa: E501 - """ParentEntries - a model defined in Swagger""" # noqa: E501 - self._scope = None - self._nodes = None - self._pagination = None - self.discriminator = None - if scope is not None: - self.scope = scope - self.nodes = nodes - self.pagination = pagination - - @property - def scope(self): - """Gets the scope of this ParentEntries. # noqa: E501 - - - :return: The scope of this ParentEntries. # noqa: E501 - :rtype: str - """ - return self._scope - - @scope.setter - def scope(self, scope): - """Sets the scope of this ParentEntries. - - - :param scope: The scope of this ParentEntries. # noqa: E501 - :type: str - """ - - self._scope = scope - - @property - def nodes(self): - """Gets the nodes of this ParentEntries. # noqa: E501 - - - :return: The nodes of this ParentEntries. # noqa: E501 - :rtype: list[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this ParentEntries. - - - :param nodes: The nodes of this ParentEntries. # noqa: E501 - :type: list[Node] - """ - if nodes is None: - raise ValueError("Invalid value for `nodes`, must not be `None`") # noqa: E501 - - self._nodes = nodes - - @property - def pagination(self): - """Gets the pagination of this ParentEntries. # noqa: E501 - - - :return: The pagination of this ParentEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this ParentEntries. - - - :param pagination: The pagination of this ParentEntries. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ParentEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ParentEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/person.py b/edu_sharing_client/models/person.py deleted file mode 100644 index 94432a40..00000000 --- a/edu_sharing_client/models/person.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Person(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'profile': 'UserProfile', - 'first_name': 'str', - 'last_name': 'str', - 'mailbox': 'str' - } - - attribute_map = { - 'profile': 'profile', - 'first_name': 'firstName', - 'last_name': 'lastName', - 'mailbox': 'mailbox' - } - - def __init__(self, profile=None, first_name=None, last_name=None, mailbox=None): # noqa: E501 - """Person - a model defined in Swagger""" # noqa: E501 - self._profile = None - self._first_name = None - self._last_name = None - self._mailbox = None - self.discriminator = None - if profile is not None: - self.profile = profile - if first_name is not None: - self.first_name = first_name - if last_name is not None: - self.last_name = last_name - if mailbox is not None: - self.mailbox = mailbox - - @property - def profile(self): - """Gets the profile of this Person. # noqa: E501 - - - :return: The profile of this Person. # noqa: E501 - :rtype: UserProfile - """ - return self._profile - - @profile.setter - def profile(self, profile): - """Sets the profile of this Person. - - - :param profile: The profile of this Person. # noqa: E501 - :type: UserProfile - """ - - self._profile = profile - - @property - def first_name(self): - """Gets the first_name of this Person. # noqa: E501 - - - :return: The first_name of this Person. # noqa: E501 - :rtype: str - """ - return self._first_name - - @first_name.setter - def first_name(self, first_name): - """Sets the first_name of this Person. - - - :param first_name: The first_name of this Person. # noqa: E501 - :type: str - """ - - self._first_name = first_name - - @property - def last_name(self): - """Gets the last_name of this Person. # noqa: E501 - - - :return: The last_name of this Person. # noqa: E501 - :rtype: str - """ - return self._last_name - - @last_name.setter - def last_name(self, last_name): - """Sets the last_name of this Person. - - - :param last_name: The last_name of this Person. # noqa: E501 - :type: str - """ - - self._last_name = last_name - - @property - def mailbox(self): - """Gets the mailbox of this Person. # noqa: E501 - - - :return: The mailbox of this Person. # noqa: E501 - :rtype: str - """ - return self._mailbox - - @mailbox.setter - def mailbox(self, mailbox): - """Sets the mailbox of this Person. - - - :param mailbox: The mailbox of this Person. # noqa: E501 - :type: str - """ - - self._mailbox = mailbox - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Person, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Person): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/person_delete_options.py b/edu_sharing_client/models/person_delete_options.py deleted file mode 100644 index 988e0f06..00000000 --- a/edu_sharing_client/models/person_delete_options.py +++ /dev/null @@ -1,371 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class PersonDeleteOptions(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'cleanup_metadata': 'bool', - 'home_folder': 'HomeFolderOptions', - 'shared_folders': 'SharedFolderOptions', - 'collections': 'CollectionOptions', - 'ratings': 'DeleteOption', - 'comments': 'DeleteOption', - 'collection_feedback': 'DeleteOption', - 'statistics': 'DeleteOption', - 'stream': 'DeleteOption', - 'receiver': 'str', - 'receiver_group': 'str' - } - - attribute_map = { - 'cleanup_metadata': 'cleanupMetadata', - 'home_folder': 'homeFolder', - 'shared_folders': 'sharedFolders', - 'collections': 'collections', - 'ratings': 'ratings', - 'comments': 'comments', - 'collection_feedback': 'collectionFeedback', - 'statistics': 'statistics', - 'stream': 'stream', - 'receiver': 'receiver', - 'receiver_group': 'receiverGroup' - } - - def __init__(self, cleanup_metadata=False, home_folder=None, shared_folders=None, collections=None, ratings=None, comments=None, collection_feedback=None, statistics=None, stream=None, receiver=None, receiver_group=None): # noqa: E501 - """PersonDeleteOptions - a model defined in Swagger""" # noqa: E501 - self._cleanup_metadata = None - self._home_folder = None - self._shared_folders = None - self._collections = None - self._ratings = None - self._comments = None - self._collection_feedback = None - self._statistics = None - self._stream = None - self._receiver = None - self._receiver_group = None - self.discriminator = None - if cleanup_metadata is not None: - self.cleanup_metadata = cleanup_metadata - if home_folder is not None: - self.home_folder = home_folder - if shared_folders is not None: - self.shared_folders = shared_folders - if collections is not None: - self.collections = collections - if ratings is not None: - self.ratings = ratings - if comments is not None: - self.comments = comments - if collection_feedback is not None: - self.collection_feedback = collection_feedback - if statistics is not None: - self.statistics = statistics - if stream is not None: - self.stream = stream - if receiver is not None: - self.receiver = receiver - if receiver_group is not None: - self.receiver_group = receiver_group - - @property - def cleanup_metadata(self): - """Gets the cleanup_metadata of this PersonDeleteOptions. # noqa: E501 - - - :return: The cleanup_metadata of this PersonDeleteOptions. # noqa: E501 - :rtype: bool - """ - return self._cleanup_metadata - - @cleanup_metadata.setter - def cleanup_metadata(self, cleanup_metadata): - """Sets the cleanup_metadata of this PersonDeleteOptions. - - - :param cleanup_metadata: The cleanup_metadata of this PersonDeleteOptions. # noqa: E501 - :type: bool - """ - - self._cleanup_metadata = cleanup_metadata - - @property - def home_folder(self): - """Gets the home_folder of this PersonDeleteOptions. # noqa: E501 - - - :return: The home_folder of this PersonDeleteOptions. # noqa: E501 - :rtype: HomeFolderOptions - """ - return self._home_folder - - @home_folder.setter - def home_folder(self, home_folder): - """Sets the home_folder of this PersonDeleteOptions. - - - :param home_folder: The home_folder of this PersonDeleteOptions. # noqa: E501 - :type: HomeFolderOptions - """ - - self._home_folder = home_folder - - @property - def shared_folders(self): - """Gets the shared_folders of this PersonDeleteOptions. # noqa: E501 - - - :return: The shared_folders of this PersonDeleteOptions. # noqa: E501 - :rtype: SharedFolderOptions - """ - return self._shared_folders - - @shared_folders.setter - def shared_folders(self, shared_folders): - """Sets the shared_folders of this PersonDeleteOptions. - - - :param shared_folders: The shared_folders of this PersonDeleteOptions. # noqa: E501 - :type: SharedFolderOptions - """ - - self._shared_folders = shared_folders - - @property - def collections(self): - """Gets the collections of this PersonDeleteOptions. # noqa: E501 - - - :return: The collections of this PersonDeleteOptions. # noqa: E501 - :rtype: CollectionOptions - """ - return self._collections - - @collections.setter - def collections(self, collections): - """Sets the collections of this PersonDeleteOptions. - - - :param collections: The collections of this PersonDeleteOptions. # noqa: E501 - :type: CollectionOptions - """ - - self._collections = collections - - @property - def ratings(self): - """Gets the ratings of this PersonDeleteOptions. # noqa: E501 - - - :return: The ratings of this PersonDeleteOptions. # noqa: E501 - :rtype: DeleteOption - """ - return self._ratings - - @ratings.setter - def ratings(self, ratings): - """Sets the ratings of this PersonDeleteOptions. - - - :param ratings: The ratings of this PersonDeleteOptions. # noqa: E501 - :type: DeleteOption - """ - - self._ratings = ratings - - @property - def comments(self): - """Gets the comments of this PersonDeleteOptions. # noqa: E501 - - - :return: The comments of this PersonDeleteOptions. # noqa: E501 - :rtype: DeleteOption - """ - return self._comments - - @comments.setter - def comments(self, comments): - """Sets the comments of this PersonDeleteOptions. - - - :param comments: The comments of this PersonDeleteOptions. # noqa: E501 - :type: DeleteOption - """ - - self._comments = comments - - @property - def collection_feedback(self): - """Gets the collection_feedback of this PersonDeleteOptions. # noqa: E501 - - - :return: The collection_feedback of this PersonDeleteOptions. # noqa: E501 - :rtype: DeleteOption - """ - return self._collection_feedback - - @collection_feedback.setter - def collection_feedback(self, collection_feedback): - """Sets the collection_feedback of this PersonDeleteOptions. - - - :param collection_feedback: The collection_feedback of this PersonDeleteOptions. # noqa: E501 - :type: DeleteOption - """ - - self._collection_feedback = collection_feedback - - @property - def statistics(self): - """Gets the statistics of this PersonDeleteOptions. # noqa: E501 - - - :return: The statistics of this PersonDeleteOptions. # noqa: E501 - :rtype: DeleteOption - """ - return self._statistics - - @statistics.setter - def statistics(self, statistics): - """Sets the statistics of this PersonDeleteOptions. - - - :param statistics: The statistics of this PersonDeleteOptions. # noqa: E501 - :type: DeleteOption - """ - - self._statistics = statistics - - @property - def stream(self): - """Gets the stream of this PersonDeleteOptions. # noqa: E501 - - - :return: The stream of this PersonDeleteOptions. # noqa: E501 - :rtype: DeleteOption - """ - return self._stream - - @stream.setter - def stream(self, stream): - """Sets the stream of this PersonDeleteOptions. - - - :param stream: The stream of this PersonDeleteOptions. # noqa: E501 - :type: DeleteOption - """ - - self._stream = stream - - @property - def receiver(self): - """Gets the receiver of this PersonDeleteOptions. # noqa: E501 - - - :return: The receiver of this PersonDeleteOptions. # noqa: E501 - :rtype: str - """ - return self._receiver - - @receiver.setter - def receiver(self, receiver): - """Sets the receiver of this PersonDeleteOptions. - - - :param receiver: The receiver of this PersonDeleteOptions. # noqa: E501 - :type: str - """ - - self._receiver = receiver - - @property - def receiver_group(self): - """Gets the receiver_group of this PersonDeleteOptions. # noqa: E501 - - - :return: The receiver_group of this PersonDeleteOptions. # noqa: E501 - :rtype: str - """ - return self._receiver_group - - @receiver_group.setter - def receiver_group(self, receiver_group): - """Sets the receiver_group of this PersonDeleteOptions. - - - :param receiver_group: The receiver_group of this PersonDeleteOptions. # noqa: E501 - :type: str - """ - - self._receiver_group = receiver_group - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(PersonDeleteOptions, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, PersonDeleteOptions): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/person_delete_result.py b/edu_sharing_client/models/person_delete_result.py deleted file mode 100644 index 99e7946a..00000000 --- a/edu_sharing_client/models/person_delete_result.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class PersonDeleteResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'authority_name': 'str', - 'deleted_name': 'str', - 'home_folder': 'dict(str, Counts)', - 'shared_folders': 'dict(str, Counts)', - 'collections': 'CollectionCounts', - 'comments': 'int', - 'ratings': 'int', - 'collection_feedback': 'int', - 'stream': 'int' - } - - attribute_map = { - 'authority_name': 'authorityName', - 'deleted_name': 'deletedName', - 'home_folder': 'homeFolder', - 'shared_folders': 'sharedFolders', - 'collections': 'collections', - 'comments': 'comments', - 'ratings': 'ratings', - 'collection_feedback': 'collectionFeedback', - 'stream': 'stream' - } - - def __init__(self, authority_name=None, deleted_name=None, home_folder=None, shared_folders=None, collections=None, comments=None, ratings=None, collection_feedback=None, stream=None): # noqa: E501 - """PersonDeleteResult - a model defined in Swagger""" # noqa: E501 - self._authority_name = None - self._deleted_name = None - self._home_folder = None - self._shared_folders = None - self._collections = None - self._comments = None - self._ratings = None - self._collection_feedback = None - self._stream = None - self.discriminator = None - if authority_name is not None: - self.authority_name = authority_name - if deleted_name is not None: - self.deleted_name = deleted_name - if home_folder is not None: - self.home_folder = home_folder - if shared_folders is not None: - self.shared_folders = shared_folders - if collections is not None: - self.collections = collections - if comments is not None: - self.comments = comments - if ratings is not None: - self.ratings = ratings - if collection_feedback is not None: - self.collection_feedback = collection_feedback - if stream is not None: - self.stream = stream - - @property - def authority_name(self): - """Gets the authority_name of this PersonDeleteResult. # noqa: E501 - - - :return: The authority_name of this PersonDeleteResult. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this PersonDeleteResult. - - - :param authority_name: The authority_name of this PersonDeleteResult. # noqa: E501 - :type: str - """ - - self._authority_name = authority_name - - @property - def deleted_name(self): - """Gets the deleted_name of this PersonDeleteResult. # noqa: E501 - - - :return: The deleted_name of this PersonDeleteResult. # noqa: E501 - :rtype: str - """ - return self._deleted_name - - @deleted_name.setter - def deleted_name(self, deleted_name): - """Sets the deleted_name of this PersonDeleteResult. - - - :param deleted_name: The deleted_name of this PersonDeleteResult. # noqa: E501 - :type: str - """ - - self._deleted_name = deleted_name - - @property - def home_folder(self): - """Gets the home_folder of this PersonDeleteResult. # noqa: E501 - - - :return: The home_folder of this PersonDeleteResult. # noqa: E501 - :rtype: dict(str, Counts) - """ - return self._home_folder - - @home_folder.setter - def home_folder(self, home_folder): - """Sets the home_folder of this PersonDeleteResult. - - - :param home_folder: The home_folder of this PersonDeleteResult. # noqa: E501 - :type: dict(str, Counts) - """ - - self._home_folder = home_folder - - @property - def shared_folders(self): - """Gets the shared_folders of this PersonDeleteResult. # noqa: E501 - - - :return: The shared_folders of this PersonDeleteResult. # noqa: E501 - :rtype: dict(str, Counts) - """ - return self._shared_folders - - @shared_folders.setter - def shared_folders(self, shared_folders): - """Sets the shared_folders of this PersonDeleteResult. - - - :param shared_folders: The shared_folders of this PersonDeleteResult. # noqa: E501 - :type: dict(str, Counts) - """ - - self._shared_folders = shared_folders - - @property - def collections(self): - """Gets the collections of this PersonDeleteResult. # noqa: E501 - - - :return: The collections of this PersonDeleteResult. # noqa: E501 - :rtype: CollectionCounts - """ - return self._collections - - @collections.setter - def collections(self, collections): - """Sets the collections of this PersonDeleteResult. - - - :param collections: The collections of this PersonDeleteResult. # noqa: E501 - :type: CollectionCounts - """ - - self._collections = collections - - @property - def comments(self): - """Gets the comments of this PersonDeleteResult. # noqa: E501 - - - :return: The comments of this PersonDeleteResult. # noqa: E501 - :rtype: int - """ - return self._comments - - @comments.setter - def comments(self, comments): - """Sets the comments of this PersonDeleteResult. - - - :param comments: The comments of this PersonDeleteResult. # noqa: E501 - :type: int - """ - - self._comments = comments - - @property - def ratings(self): - """Gets the ratings of this PersonDeleteResult. # noqa: E501 - - - :return: The ratings of this PersonDeleteResult. # noqa: E501 - :rtype: int - """ - return self._ratings - - @ratings.setter - def ratings(self, ratings): - """Sets the ratings of this PersonDeleteResult. - - - :param ratings: The ratings of this PersonDeleteResult. # noqa: E501 - :type: int - """ - - self._ratings = ratings - - @property - def collection_feedback(self): - """Gets the collection_feedback of this PersonDeleteResult. # noqa: E501 - - - :return: The collection_feedback of this PersonDeleteResult. # noqa: E501 - :rtype: int - """ - return self._collection_feedback - - @collection_feedback.setter - def collection_feedback(self, collection_feedback): - """Sets the collection_feedback of this PersonDeleteResult. - - - :param collection_feedback: The collection_feedback of this PersonDeleteResult. # noqa: E501 - :type: int - """ - - self._collection_feedback = collection_feedback - - @property - def stream(self): - """Gets the stream of this PersonDeleteResult. # noqa: E501 - - - :return: The stream of this PersonDeleteResult. # noqa: E501 - :rtype: int - """ - return self._stream - - @stream.setter - def stream(self, stream): - """Sets the stream of this PersonDeleteResult. - - - :param stream: The stream of this PersonDeleteResult. # noqa: E501 - :type: int - """ - - self._stream = stream - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(PersonDeleteResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, PersonDeleteResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/person_report.py b/edu_sharing_client/models/person_report.py deleted file mode 100644 index f53bfdfb..00000000 --- a/edu_sharing_client/models/person_report.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class PersonReport(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'options': 'PersonDeleteOptions', - 'results': 'list[PersonDeleteResult]' - } - - attribute_map = { - 'options': 'options', - 'results': 'results' - } - - def __init__(self, options=None, results=None): # noqa: E501 - """PersonReport - a model defined in Swagger""" # noqa: E501 - self._options = None - self._results = None - self.discriminator = None - if options is not None: - self.options = options - if results is not None: - self.results = results - - @property - def options(self): - """Gets the options of this PersonReport. # noqa: E501 - - - :return: The options of this PersonReport. # noqa: E501 - :rtype: PersonDeleteOptions - """ - return self._options - - @options.setter - def options(self, options): - """Sets the options of this PersonReport. - - - :param options: The options of this PersonReport. # noqa: E501 - :type: PersonDeleteOptions - """ - - self._options = options - - @property - def results(self): - """Gets the results of this PersonReport. # noqa: E501 - - - :return: The results of this PersonReport. # noqa: E501 - :rtype: list[PersonDeleteResult] - """ - return self._results - - @results.setter - def results(self, results): - """Sets the results of this PersonReport. - - - :param results: The results of this PersonReport. # noqa: E501 - :type: list[PersonDeleteResult] - """ - - self._results = results - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(PersonReport, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, PersonReport): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/preferences.py b/edu_sharing_client/models/preferences.py deleted file mode 100644 index b9477689..00000000 --- a/edu_sharing_client/models/preferences.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Preferences(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'preferences': 'str' - } - - attribute_map = { - 'preferences': 'preferences' - } - - def __init__(self, preferences=None): # noqa: E501 - """Preferences - a model defined in Swagger""" # noqa: E501 - self._preferences = None - self.discriminator = None - if preferences is not None: - self.preferences = preferences - - @property - def preferences(self): - """Gets the preferences of this Preferences. # noqa: E501 - - - :return: The preferences of this Preferences. # noqa: E501 - :rtype: str - """ - return self._preferences - - @preferences.setter - def preferences(self, preferences): - """Sets the preferences of this Preferences. - - - :param preferences: The preferences of this Preferences. # noqa: E501 - :type: str - """ - - self._preferences = preferences - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Preferences, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Preferences): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/preview.py b/edu_sharing_client/models/preview.py deleted file mode 100644 index 9aaabde6..00000000 --- a/edu_sharing_client/models/preview.py +++ /dev/null @@ -1,219 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Preview(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'is_icon': 'bool', - 'is_generated': 'bool', - 'url': 'str', - 'width': 'int', - 'height': 'int' - } - - attribute_map = { - 'is_icon': 'isIcon', - 'is_generated': 'isGenerated', - 'url': 'url', - 'width': 'width', - 'height': 'height' - } - - def __init__(self, is_icon=False, is_generated=False, url=None, width=None, height=None): # noqa: E501 - """Preview - a model defined in Swagger""" # noqa: E501 - self._is_icon = None - self._is_generated = None - self._url = None - self._width = None - self._height = None - self.discriminator = None - self.is_icon = is_icon - if is_generated is not None: - self.is_generated = is_generated - self.url = url - self.width = width - self.height = height - - @property - def is_icon(self): - """Gets the is_icon of this Preview. # noqa: E501 - - - :return: The is_icon of this Preview. # noqa: E501 - :rtype: bool - """ - return self._is_icon - - @is_icon.setter - def is_icon(self, is_icon): - """Sets the is_icon of this Preview. - - - :param is_icon: The is_icon of this Preview. # noqa: E501 - :type: bool - """ - if is_icon is None: - raise ValueError("Invalid value for `is_icon`, must not be `None`") # noqa: E501 - - self._is_icon = is_icon - - @property - def is_generated(self): - """Gets the is_generated of this Preview. # noqa: E501 - - - :return: The is_generated of this Preview. # noqa: E501 - :rtype: bool - """ - return self._is_generated - - @is_generated.setter - def is_generated(self, is_generated): - """Sets the is_generated of this Preview. - - - :param is_generated: The is_generated of this Preview. # noqa: E501 - :type: bool - """ - - self._is_generated = is_generated - - @property - def url(self): - """Gets the url of this Preview. # noqa: E501 - - - :return: The url of this Preview. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Preview. - - - :param url: The url of this Preview. # noqa: E501 - :type: str - """ - if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 - - self._url = url - - @property - def width(self): - """Gets the width of this Preview. # noqa: E501 - - - :return: The width of this Preview. # noqa: E501 - :rtype: int - """ - return self._width - - @width.setter - def width(self, width): - """Sets the width of this Preview. - - - :param width: The width of this Preview. # noqa: E501 - :type: int - """ - if width is None: - raise ValueError("Invalid value for `width`, must not be `None`") # noqa: E501 - - self._width = width - - @property - def height(self): - """Gets the height of this Preview. # noqa: E501 - - - :return: The height of this Preview. # noqa: E501 - :rtype: int - """ - return self._height - - @height.setter - def height(self, height): - """Sets the height of this Preview. - - - :param height: The height of this Preview. # noqa: E501 - :type: int - """ - if height is None: - raise ValueError("Invalid value for `height`, must not be `None`") # noqa: E501 - - self._height = height - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Preview, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Preview): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/profile.py b/edu_sharing_client/models/profile.py deleted file mode 100644 index 3b89db84..00000000 --- a/edu_sharing_client/models/profile.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Profile(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'group_email': 'str', - 'mediacenter': 'MediacenterProfileExtension', - 'display_name': 'str', - 'group_type': 'str', - 'scope_type': 'str' - } - - attribute_map = { - 'group_email': 'groupEmail', - 'mediacenter': 'mediacenter', - 'display_name': 'displayName', - 'group_type': 'groupType', - 'scope_type': 'scopeType' - } - - def __init__(self, group_email=None, mediacenter=None, display_name=None, group_type=None, scope_type=None): # noqa: E501 - """Profile - a model defined in Swagger""" # noqa: E501 - self._group_email = None - self._mediacenter = None - self._display_name = None - self._group_type = None - self._scope_type = None - self.discriminator = None - if group_email is not None: - self.group_email = group_email - if mediacenter is not None: - self.mediacenter = mediacenter - if display_name is not None: - self.display_name = display_name - if group_type is not None: - self.group_type = group_type - if scope_type is not None: - self.scope_type = scope_type - - @property - def group_email(self): - """Gets the group_email of this Profile. # noqa: E501 - - - :return: The group_email of this Profile. # noqa: E501 - :rtype: str - """ - return self._group_email - - @group_email.setter - def group_email(self, group_email): - """Sets the group_email of this Profile. - - - :param group_email: The group_email of this Profile. # noqa: E501 - :type: str - """ - - self._group_email = group_email - - @property - def mediacenter(self): - """Gets the mediacenter of this Profile. # noqa: E501 - - - :return: The mediacenter of this Profile. # noqa: E501 - :rtype: MediacenterProfileExtension - """ - return self._mediacenter - - @mediacenter.setter - def mediacenter(self, mediacenter): - """Sets the mediacenter of this Profile. - - - :param mediacenter: The mediacenter of this Profile. # noqa: E501 - :type: MediacenterProfileExtension - """ - - self._mediacenter = mediacenter - - @property - def display_name(self): - """Gets the display_name of this Profile. # noqa: E501 - - - :return: The display_name of this Profile. # noqa: E501 - :rtype: str - """ - return self._display_name - - @display_name.setter - def display_name(self, display_name): - """Sets the display_name of this Profile. - - - :param display_name: The display_name of this Profile. # noqa: E501 - :type: str - """ - - self._display_name = display_name - - @property - def group_type(self): - """Gets the group_type of this Profile. # noqa: E501 - - - :return: The group_type of this Profile. # noqa: E501 - :rtype: str - """ - return self._group_type - - @group_type.setter - def group_type(self, group_type): - """Sets the group_type of this Profile. - - - :param group_type: The group_type of this Profile. # noqa: E501 - :type: str - """ - - self._group_type = group_type - - @property - def scope_type(self): - """Gets the scope_type of this Profile. # noqa: E501 - - - :return: The scope_type of this Profile. # noqa: E501 - :rtype: str - """ - return self._scope_type - - @scope_type.setter - def scope_type(self, scope_type): - """Sets the scope_type of this Profile. - - - :param scope_type: The scope_type of this Profile. # noqa: E501 - :type: str - """ - - self._scope_type = scope_type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Profile, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Profile): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/provider.py b/edu_sharing_client/models/provider.py deleted file mode 100644 index 90367073..00000000 --- a/edu_sharing_client/models/provider.py +++ /dev/null @@ -1,221 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Provider(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'legal_name': 'str', - 'url': 'str', - 'email': 'str', - 'area_served': 'str', - 'location': 'Location' - } - - attribute_map = { - 'legal_name': 'legalName', - 'url': 'url', - 'email': 'email', - 'area_served': 'areaServed', - 'location': 'location' - } - - def __init__(self, legal_name=None, url=None, email=None, area_served=None, location=None): # noqa: E501 - """Provider - a model defined in Swagger""" # noqa: E501 - self._legal_name = None - self._url = None - self._email = None - self._area_served = None - self._location = None - self.discriminator = None - if legal_name is not None: - self.legal_name = legal_name - if url is not None: - self.url = url - if email is not None: - self.email = email - if area_served is not None: - self.area_served = area_served - if location is not None: - self.location = location - - @property - def legal_name(self): - """Gets the legal_name of this Provider. # noqa: E501 - - - :return: The legal_name of this Provider. # noqa: E501 - :rtype: str - """ - return self._legal_name - - @legal_name.setter - def legal_name(self, legal_name): - """Sets the legal_name of this Provider. - - - :param legal_name: The legal_name of this Provider. # noqa: E501 - :type: str - """ - - self._legal_name = legal_name - - @property - def url(self): - """Gets the url of this Provider. # noqa: E501 - - - :return: The url of this Provider. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this Provider. - - - :param url: The url of this Provider. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def email(self): - """Gets the email of this Provider. # noqa: E501 - - - :return: The email of this Provider. # noqa: E501 - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this Provider. - - - :param email: The email of this Provider. # noqa: E501 - :type: str - """ - - self._email = email - - @property - def area_served(self): - """Gets the area_served of this Provider. # noqa: E501 - - - :return: The area_served of this Provider. # noqa: E501 - :rtype: str - """ - return self._area_served - - @area_served.setter - def area_served(self, area_served): - """Sets the area_served of this Provider. - - - :param area_served: The area_served of this Provider. # noqa: E501 - :type: str - """ - allowed_values = ["Organization", "City", "State", "Country", "Continent", "World"] # noqa: E501 - if area_served not in allowed_values: - raise ValueError( - "Invalid value for `area_served` ({0}), must be one of {1}" # noqa: E501 - .format(area_served, allowed_values) - ) - - self._area_served = area_served - - @property - def location(self): - """Gets the location of this Provider. # noqa: E501 - - - :return: The location of this Provider. # noqa: E501 - :rtype: Location - """ - return self._location - - @location.setter - def location(self, location): - """Sets the location of this Provider. - - - :param location: The location of this Provider. # noqa: E501 - :type: Location - """ - - self._location = location - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Provider, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Provider): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/query.py b/edu_sharing_client/models/query.py deleted file mode 100644 index 83c4780e..00000000 --- a/edu_sharing_client/models/query.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Query(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'condition': 'Condition', - 'query': 'str' - } - - attribute_map = { - 'condition': 'condition', - 'query': 'query' - } - - def __init__(self, condition=None, query=None): # noqa: E501 - """Query - a model defined in Swagger""" # noqa: E501 - self._condition = None - self._query = None - self.discriminator = None - if condition is not None: - self.condition = condition - if query is not None: - self.query = query - - @property - def condition(self): - """Gets the condition of this Query. # noqa: E501 - - - :return: The condition of this Query. # noqa: E501 - :rtype: Condition - """ - return self._condition - - @condition.setter - def condition(self, condition): - """Sets the condition of this Query. - - - :param condition: The condition of this Query. # noqa: E501 - :type: Condition - """ - - self._condition = condition - - @property - def query(self): - """Gets the query of this Query. # noqa: E501 - - - :return: The query of this Query. # noqa: E501 - :rtype: str - """ - return self._query - - @query.setter - def query(self, query): - """Sets the query of this Query. - - - :param query: The query of this Query. # noqa: E501 - :type: str - """ - - self._query = query - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Query, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Query): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/rating_data.py b/edu_sharing_client/models/rating_data.py deleted file mode 100644 index ce2b1221..00000000 --- a/edu_sharing_client/models/rating_data.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RatingData(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'sum': 'float', - 'count': 'int', - 'rating': 'float' - } - - attribute_map = { - 'sum': 'sum', - 'count': 'count', - 'rating': 'rating' - } - - def __init__(self, sum=None, count=None, rating=None): # noqa: E501 - """RatingData - a model defined in Swagger""" # noqa: E501 - self._sum = None - self._count = None - self._rating = None - self.discriminator = None - if sum is not None: - self.sum = sum - if count is not None: - self.count = count - if rating is not None: - self.rating = rating - - @property - def sum(self): - """Gets the sum of this RatingData. # noqa: E501 - - - :return: The sum of this RatingData. # noqa: E501 - :rtype: float - """ - return self._sum - - @sum.setter - def sum(self, sum): - """Sets the sum of this RatingData. - - - :param sum: The sum of this RatingData. # noqa: E501 - :type: float - """ - - self._sum = sum - - @property - def count(self): - """Gets the count of this RatingData. # noqa: E501 - - - :return: The count of this RatingData. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this RatingData. - - - :param count: The count of this RatingData. # noqa: E501 - :type: int - """ - - self._count = count - - @property - def rating(self): - """Gets the rating of this RatingData. # noqa: E501 - - - :return: The rating of this RatingData. # noqa: E501 - :rtype: float - """ - return self._rating - - @rating.setter - def rating(self, rating): - """Sets the rating of this RatingData. - - - :param rating: The rating of this RatingData. # noqa: E501 - :type: float - """ - - self._rating = rating - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RatingData, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RatingData): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/reference_entries.py b/edu_sharing_client/models/reference_entries.py deleted file mode 100644 index 8dd34601..00000000 --- a/edu_sharing_client/models/reference_entries.py +++ /dev/null @@ -1,138 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ReferenceEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'pagination': 'Pagination', - 'references': 'list[CollectionReference]' - } - - attribute_map = { - 'pagination': 'pagination', - 'references': 'references' - } - - def __init__(self, pagination=None, references=None): # noqa: E501 - """ReferenceEntries - a model defined in Swagger""" # noqa: E501 - self._pagination = None - self._references = None - self.discriminator = None - if pagination is not None: - self.pagination = pagination - self.references = references - - @property - def pagination(self): - """Gets the pagination of this ReferenceEntries. # noqa: E501 - - - :return: The pagination of this ReferenceEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this ReferenceEntries. - - - :param pagination: The pagination of this ReferenceEntries. # noqa: E501 - :type: Pagination - """ - - self._pagination = pagination - - @property - def references(self): - """Gets the references of this ReferenceEntries. # noqa: E501 - - - :return: The references of this ReferenceEntries. # noqa: E501 - :rtype: list[CollectionReference] - """ - return self._references - - @references.setter - def references(self, references): - """Sets the references of this ReferenceEntries. - - - :param references: The references of this ReferenceEntries. # noqa: E501 - :type: list[CollectionReference] - """ - if references is None: - raise ValueError("Invalid value for `references`, must not be `None`") # noqa: E501 - - self._references = references - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ReferenceEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ReferenceEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/register.py b/edu_sharing_client/models/register.py deleted file mode 100644 index 4f229f9a..00000000 --- a/edu_sharing_client/models/register.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Register(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'local': 'bool', - 'login_url': 'str', - 'recover_url': 'str', - 'required_fields': 'list[str]' - } - - attribute_map = { - 'local': 'local', - 'login_url': 'loginUrl', - 'recover_url': 'recoverUrl', - 'required_fields': 'requiredFields' - } - - def __init__(self, local=False, login_url=None, recover_url=None, required_fields=None): # noqa: E501 - """Register - a model defined in Swagger""" # noqa: E501 - self._local = None - self._login_url = None - self._recover_url = None - self._required_fields = None - self.discriminator = None - if local is not None: - self.local = local - if login_url is not None: - self.login_url = login_url - if recover_url is not None: - self.recover_url = recover_url - if required_fields is not None: - self.required_fields = required_fields - - @property - def local(self): - """Gets the local of this Register. # noqa: E501 - - - :return: The local of this Register. # noqa: E501 - :rtype: bool - """ - return self._local - - @local.setter - def local(self, local): - """Sets the local of this Register. - - - :param local: The local of this Register. # noqa: E501 - :type: bool - """ - - self._local = local - - @property - def login_url(self): - """Gets the login_url of this Register. # noqa: E501 - - - :return: The login_url of this Register. # noqa: E501 - :rtype: str - """ - return self._login_url - - @login_url.setter - def login_url(self, login_url): - """Sets the login_url of this Register. - - - :param login_url: The login_url of this Register. # noqa: E501 - :type: str - """ - - self._login_url = login_url - - @property - def recover_url(self): - """Gets the recover_url of this Register. # noqa: E501 - - - :return: The recover_url of this Register. # noqa: E501 - :rtype: str - """ - return self._recover_url - - @recover_url.setter - def recover_url(self, recover_url): - """Sets the recover_url of this Register. - - - :param recover_url: The recover_url of this Register. # noqa: E501 - :type: str - """ - - self._recover_url = recover_url - - @property - def required_fields(self): - """Gets the required_fields of this Register. # noqa: E501 - - - :return: The required_fields of this Register. # noqa: E501 - :rtype: list[str] - """ - return self._required_fields - - @required_fields.setter - def required_fields(self, required_fields): - """Sets the required_fields of this Register. - - - :param required_fields: The required_fields of this Register. # noqa: E501 - :type: list[str] - """ - - self._required_fields = required_fields - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Register, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Register): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/register_exists.py b/edu_sharing_client/models/register_exists.py deleted file mode 100644 index 2f25967e..00000000 --- a/edu_sharing_client/models/register_exists.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RegisterExists(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'exists': 'bool' - } - - attribute_map = { - 'exists': 'exists' - } - - def __init__(self, exists=False): # noqa: E501 - """RegisterExists - a model defined in Swagger""" # noqa: E501 - self._exists = None - self.discriminator = None - if exists is not None: - self.exists = exists - - @property - def exists(self): - """Gets the exists of this RegisterExists. # noqa: E501 - - - :return: The exists of this RegisterExists. # noqa: E501 - :rtype: bool - """ - return self._exists - - @exists.setter - def exists(self, exists): - """Sets the exists of this RegisterExists. - - - :param exists: The exists of this RegisterExists. # noqa: E501 - :type: bool - """ - - self._exists = exists - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RegisterExists, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RegisterExists): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/register_information.py b/edu_sharing_client/models/register_information.py deleted file mode 100644 index f9557c8c..00000000 --- a/edu_sharing_client/models/register_information.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RegisterInformation(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'first_name': 'str', - 'last_name': 'str', - 'email': 'str', - 'password': 'str', - 'organization': 'str', - 'allow_notifications': 'bool' - } - - attribute_map = { - 'first_name': 'firstName', - 'last_name': 'lastName', - 'email': 'email', - 'password': 'password', - 'organization': 'organization', - 'allow_notifications': 'allowNotifications' - } - - def __init__(self, first_name=None, last_name=None, email=None, password=None, organization=None, allow_notifications=False): # noqa: E501 - """RegisterInformation - a model defined in Swagger""" # noqa: E501 - self._first_name = None - self._last_name = None - self._email = None - self._password = None - self._organization = None - self._allow_notifications = None - self.discriminator = None - if first_name is not None: - self.first_name = first_name - if last_name is not None: - self.last_name = last_name - if email is not None: - self.email = email - if password is not None: - self.password = password - if organization is not None: - self.organization = organization - if allow_notifications is not None: - self.allow_notifications = allow_notifications - - @property - def first_name(self): - """Gets the first_name of this RegisterInformation. # noqa: E501 - - - :return: The first_name of this RegisterInformation. # noqa: E501 - :rtype: str - """ - return self._first_name - - @first_name.setter - def first_name(self, first_name): - """Sets the first_name of this RegisterInformation. - - - :param first_name: The first_name of this RegisterInformation. # noqa: E501 - :type: str - """ - - self._first_name = first_name - - @property - def last_name(self): - """Gets the last_name of this RegisterInformation. # noqa: E501 - - - :return: The last_name of this RegisterInformation. # noqa: E501 - :rtype: str - """ - return self._last_name - - @last_name.setter - def last_name(self, last_name): - """Sets the last_name of this RegisterInformation. - - - :param last_name: The last_name of this RegisterInformation. # noqa: E501 - :type: str - """ - - self._last_name = last_name - - @property - def email(self): - """Gets the email of this RegisterInformation. # noqa: E501 - - - :return: The email of this RegisterInformation. # noqa: E501 - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this RegisterInformation. - - - :param email: The email of this RegisterInformation. # noqa: E501 - :type: str - """ - - self._email = email - - @property - def password(self): - """Gets the password of this RegisterInformation. # noqa: E501 - - - :return: The password of this RegisterInformation. # noqa: E501 - :rtype: str - """ - return self._password - - @password.setter - def password(self, password): - """Sets the password of this RegisterInformation. - - - :param password: The password of this RegisterInformation. # noqa: E501 - :type: str - """ - - self._password = password - - @property - def organization(self): - """Gets the organization of this RegisterInformation. # noqa: E501 - - - :return: The organization of this RegisterInformation. # noqa: E501 - :rtype: str - """ - return self._organization - - @organization.setter - def organization(self, organization): - """Sets the organization of this RegisterInformation. - - - :param organization: The organization of this RegisterInformation. # noqa: E501 - :type: str - """ - - self._organization = organization - - @property - def allow_notifications(self): - """Gets the allow_notifications of this RegisterInformation. # noqa: E501 - - - :return: The allow_notifications of this RegisterInformation. # noqa: E501 - :rtype: bool - """ - return self._allow_notifications - - @allow_notifications.setter - def allow_notifications(self, allow_notifications): - """Sets the allow_notifications of this RegisterInformation. - - - :param allow_notifications: The allow_notifications of this RegisterInformation. # noqa: E501 - :type: bool - """ - - self._allow_notifications = allow_notifications - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RegisterInformation, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RegisterInformation): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/remote.py b/edu_sharing_client/models/remote.py deleted file mode 100644 index 9b706116..00000000 --- a/edu_sharing_client/models/remote.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Remote(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repository': 'Repo', - 'id': 'str' - } - - attribute_map = { - 'repository': 'repository', - 'id': 'id' - } - - def __init__(self, repository=None, id=None): # noqa: E501 - """Remote - a model defined in Swagger""" # noqa: E501 - self._repository = None - self._id = None - self.discriminator = None - if repository is not None: - self.repository = repository - if id is not None: - self.id = id - - @property - def repository(self): - """Gets the repository of this Remote. # noqa: E501 - - - :return: The repository of this Remote. # noqa: E501 - :rtype: Repo - """ - return self._repository - - @repository.setter - def repository(self, repository): - """Sets the repository of this Remote. - - - :param repository: The repository of this Remote. # noqa: E501 - :type: Repo - """ - - self._repository = repository - - @property - def id(self): - """Gets the id of this Remote. # noqa: E501 - - - :return: The id of this Remote. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Remote. - - - :param id: The id of this Remote. # noqa: E501 - :type: str - """ - - self._id = id - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Remote, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Remote): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/remote_auth_description.py b/edu_sharing_client/models/remote_auth_description.py deleted file mode 100644 index a577906a..00000000 --- a/edu_sharing_client/models/remote_auth_description.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RemoteAuthDescription(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'url': 'str', - 'token': 'str' - } - - attribute_map = { - 'url': 'url', - 'token': 'token' - } - - def __init__(self, url=None, token=None): # noqa: E501 - """RemoteAuthDescription - a model defined in Swagger""" # noqa: E501 - self._url = None - self._token = None - self.discriminator = None - if url is not None: - self.url = url - if token is not None: - self.token = token - - @property - def url(self): - """Gets the url of this RemoteAuthDescription. # noqa: E501 - - - :return: The url of this RemoteAuthDescription. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this RemoteAuthDescription. - - - :param url: The url of this RemoteAuthDescription. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def token(self): - """Gets the token of this RemoteAuthDescription. # noqa: E501 - - - :return: The token of this RemoteAuthDescription. # noqa: E501 - :rtype: str - """ - return self._token - - @token.setter - def token(self, token): - """Sets the token of this RemoteAuthDescription. - - - :param token: The token of this RemoteAuthDescription. # noqa: E501 - :type: str - """ - - self._token = token - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RemoteAuthDescription, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RemoteAuthDescription): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/rendering.py b/edu_sharing_client/models/rendering.py deleted file mode 100644 index 44bf1871..00000000 --- a/edu_sharing_client/models/rendering.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Rendering(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - } - - attribute_map = { - } - - def __init__(self): # noqa: E501 - """Rendering - a model defined in Swagger""" # noqa: E501 - self.discriminator = None - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Rendering, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Rendering): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/rendering_details_entry.py b/edu_sharing_client/models/rendering_details_entry.py deleted file mode 100644 index 480b7193..00000000 --- a/edu_sharing_client/models/rendering_details_entry.py +++ /dev/null @@ -1,166 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RenderingDetailsEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'details_snippet': 'str', - 'mime_type': 'str', - 'node': 'Node' - } - - attribute_map = { - 'details_snippet': 'detailsSnippet', - 'mime_type': 'mimeType', - 'node': 'node' - } - - def __init__(self, details_snippet=None, mime_type=None, node=None): # noqa: E501 - """RenderingDetailsEntry - a model defined in Swagger""" # noqa: E501 - self._details_snippet = None - self._mime_type = None - self._node = None - self.discriminator = None - self.details_snippet = details_snippet - self.mime_type = mime_type - self.node = node - - @property - def details_snippet(self): - """Gets the details_snippet of this RenderingDetailsEntry. # noqa: E501 - - - :return: The details_snippet of this RenderingDetailsEntry. # noqa: E501 - :rtype: str - """ - return self._details_snippet - - @details_snippet.setter - def details_snippet(self, details_snippet): - """Sets the details_snippet of this RenderingDetailsEntry. - - - :param details_snippet: The details_snippet of this RenderingDetailsEntry. # noqa: E501 - :type: str - """ - if details_snippet is None: - raise ValueError("Invalid value for `details_snippet`, must not be `None`") # noqa: E501 - - self._details_snippet = details_snippet - - @property - def mime_type(self): - """Gets the mime_type of this RenderingDetailsEntry. # noqa: E501 - - - :return: The mime_type of this RenderingDetailsEntry. # noqa: E501 - :rtype: str - """ - return self._mime_type - - @mime_type.setter - def mime_type(self, mime_type): - """Sets the mime_type of this RenderingDetailsEntry. - - - :param mime_type: The mime_type of this RenderingDetailsEntry. # noqa: E501 - :type: str - """ - if mime_type is None: - raise ValueError("Invalid value for `mime_type`, must not be `None`") # noqa: E501 - - self._mime_type = mime_type - - @property - def node(self): - """Gets the node of this RenderingDetailsEntry. # noqa: E501 - - - :return: The node of this RenderingDetailsEntry. # noqa: E501 - :rtype: Node - """ - return self._node - - @node.setter - def node(self, node): - """Sets the node of this RenderingDetailsEntry. - - - :param node: The node of this RenderingDetailsEntry. # noqa: E501 - :type: Node - """ - if node is None: - raise ValueError("Invalid value for `node`, must not be `None`") # noqa: E501 - - self._node = node - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RenderingDetailsEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RenderingDetailsEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/repo.py b/edu_sharing_client/models/repo.py deleted file mode 100644 index d712ae88..00000000 --- a/edu_sharing_client/models/repo.py +++ /dev/null @@ -1,267 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Repo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repository_type': 'str', - 'rendering_supported': 'bool', - 'id': 'str', - 'title': 'str', - 'icon': 'str', - 'logo': 'str', - 'is_home_repo': 'bool' - } - - attribute_map = { - 'repository_type': 'repositoryType', - 'rendering_supported': 'renderingSupported', - 'id': 'id', - 'title': 'title', - 'icon': 'icon', - 'logo': 'logo', - 'is_home_repo': 'isHomeRepo' - } - - def __init__(self, repository_type=None, rendering_supported=False, id=None, title=None, icon=None, logo=None, is_home_repo=False): # noqa: E501 - """Repo - a model defined in Swagger""" # noqa: E501 - self._repository_type = None - self._rendering_supported = None - self._id = None - self._title = None - self._icon = None - self._logo = None - self._is_home_repo = None - self.discriminator = None - if repository_type is not None: - self.repository_type = repository_type - if rendering_supported is not None: - self.rendering_supported = rendering_supported - if id is not None: - self.id = id - if title is not None: - self.title = title - if icon is not None: - self.icon = icon - if logo is not None: - self.logo = logo - if is_home_repo is not None: - self.is_home_repo = is_home_repo - - @property - def repository_type(self): - """Gets the repository_type of this Repo. # noqa: E501 - - - :return: The repository_type of this Repo. # noqa: E501 - :rtype: str - """ - return self._repository_type - - @repository_type.setter - def repository_type(self, repository_type): - """Sets the repository_type of this Repo. - - - :param repository_type: The repository_type of this Repo. # noqa: E501 - :type: str - """ - - self._repository_type = repository_type - - @property - def rendering_supported(self): - """Gets the rendering_supported of this Repo. # noqa: E501 - - - :return: The rendering_supported of this Repo. # noqa: E501 - :rtype: bool - """ - return self._rendering_supported - - @rendering_supported.setter - def rendering_supported(self, rendering_supported): - """Sets the rendering_supported of this Repo. - - - :param rendering_supported: The rendering_supported of this Repo. # noqa: E501 - :type: bool - """ - - self._rendering_supported = rendering_supported - - @property - def id(self): - """Gets the id of this Repo. # noqa: E501 - - - :return: The id of this Repo. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Repo. - - - :param id: The id of this Repo. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def title(self): - """Gets the title of this Repo. # noqa: E501 - - - :return: The title of this Repo. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this Repo. - - - :param title: The title of this Repo. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def icon(self): - """Gets the icon of this Repo. # noqa: E501 - - - :return: The icon of this Repo. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this Repo. - - - :param icon: The icon of this Repo. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def logo(self): - """Gets the logo of this Repo. # noqa: E501 - - - :return: The logo of this Repo. # noqa: E501 - :rtype: str - """ - return self._logo - - @logo.setter - def logo(self, logo): - """Sets the logo of this Repo. - - - :param logo: The logo of this Repo. # noqa: E501 - :type: str - """ - - self._logo = logo - - @property - def is_home_repo(self): - """Gets the is_home_repo of this Repo. # noqa: E501 - - - :return: The is_home_repo of this Repo. # noqa: E501 - :rtype: bool - """ - return self._is_home_repo - - @is_home_repo.setter - def is_home_repo(self, is_home_repo): - """Sets the is_home_repo of this Repo. - - - :param is_home_repo: The is_home_repo of this Repo. # noqa: E501 - :type: bool - """ - - self._is_home_repo = is_home_repo - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Repo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Repo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/repo_entries.py b/edu_sharing_client/models/repo_entries.py deleted file mode 100644 index e04a22c8..00000000 --- a/edu_sharing_client/models/repo_entries.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RepoEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repositories': 'list[Repo]' - } - - attribute_map = { - 'repositories': 'repositories' - } - - def __init__(self, repositories=None): # noqa: E501 - """RepoEntries - a model defined in Swagger""" # noqa: E501 - self._repositories = None - self.discriminator = None - self.repositories = repositories - - @property - def repositories(self): - """Gets the repositories of this RepoEntries. # noqa: E501 - - - :return: The repositories of this RepoEntries. # noqa: E501 - :rtype: list[Repo] - """ - return self._repositories - - @repositories.setter - def repositories(self, repositories): - """Sets the repositories of this RepoEntries. - - - :param repositories: The repositories of this RepoEntries. # noqa: E501 - :type: list[Repo] - """ - if repositories is None: - raise ValueError("Invalid value for `repositories`, must not be `None`") # noqa: E501 - - self._repositories = repositories - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RepoEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RepoEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/repository_config.py b/edu_sharing_client/models/repository_config.py deleted file mode 100644 index 40b9a0be..00000000 --- a/edu_sharing_client/models/repository_config.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RepositoryConfig(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'frontpage': 'Frontpage' - } - - attribute_map = { - 'frontpage': 'frontpage' - } - - def __init__(self, frontpage=None): # noqa: E501 - """RepositoryConfig - a model defined in Swagger""" # noqa: E501 - self._frontpage = None - self.discriminator = None - if frontpage is not None: - self.frontpage = frontpage - - @property - def frontpage(self): - """Gets the frontpage of this RepositoryConfig. # noqa: E501 - - - :return: The frontpage of this RepositoryConfig. # noqa: E501 - :rtype: Frontpage - """ - return self._frontpage - - @frontpage.setter - def frontpage(self, frontpage): - """Sets the frontpage of this RepositoryConfig. - - - :param frontpage: The frontpage of this RepositoryConfig. # noqa: E501 - :type: Frontpage - """ - - self._frontpage = frontpage - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RepositoryConfig, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RepositoryConfig): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/restore_result.py b/edu_sharing_client/models/restore_result.py deleted file mode 100644 index cc95c1e2..00000000 --- a/edu_sharing_client/models/restore_result.py +++ /dev/null @@ -1,247 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RestoreResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'archive_node_id': 'str', - 'node_id': 'str', - 'parent': 'str', - 'path': 'str', - 'name': 'str', - 'restore_status': 'str' - } - - attribute_map = { - 'archive_node_id': 'archiveNodeId', - 'node_id': 'nodeId', - 'parent': 'parent', - 'path': 'path', - 'name': 'name', - 'restore_status': 'restoreStatus' - } - - def __init__(self, archive_node_id=None, node_id=None, parent=None, path=None, name=None, restore_status=None): # noqa: E501 - """RestoreResult - a model defined in Swagger""" # noqa: E501 - self._archive_node_id = None - self._node_id = None - self._parent = None - self._path = None - self._name = None - self._restore_status = None - self.discriminator = None - self.archive_node_id = archive_node_id - self.node_id = node_id - self.parent = parent - self.path = path - self.name = name - self.restore_status = restore_status - - @property - def archive_node_id(self): - """Gets the archive_node_id of this RestoreResult. # noqa: E501 - - - :return: The archive_node_id of this RestoreResult. # noqa: E501 - :rtype: str - """ - return self._archive_node_id - - @archive_node_id.setter - def archive_node_id(self, archive_node_id): - """Sets the archive_node_id of this RestoreResult. - - - :param archive_node_id: The archive_node_id of this RestoreResult. # noqa: E501 - :type: str - """ - if archive_node_id is None: - raise ValueError("Invalid value for `archive_node_id`, must not be `None`") # noqa: E501 - - self._archive_node_id = archive_node_id - - @property - def node_id(self): - """Gets the node_id of this RestoreResult. # noqa: E501 - - - :return: The node_id of this RestoreResult. # noqa: E501 - :rtype: str - """ - return self._node_id - - @node_id.setter - def node_id(self, node_id): - """Sets the node_id of this RestoreResult. - - - :param node_id: The node_id of this RestoreResult. # noqa: E501 - :type: str - """ - if node_id is None: - raise ValueError("Invalid value for `node_id`, must not be `None`") # noqa: E501 - - self._node_id = node_id - - @property - def parent(self): - """Gets the parent of this RestoreResult. # noqa: E501 - - - :return: The parent of this RestoreResult. # noqa: E501 - :rtype: str - """ - return self._parent - - @parent.setter - def parent(self, parent): - """Sets the parent of this RestoreResult. - - - :param parent: The parent of this RestoreResult. # noqa: E501 - :type: str - """ - if parent is None: - raise ValueError("Invalid value for `parent`, must not be `None`") # noqa: E501 - - self._parent = parent - - @property - def path(self): - """Gets the path of this RestoreResult. # noqa: E501 - - - :return: The path of this RestoreResult. # noqa: E501 - :rtype: str - """ - return self._path - - @path.setter - def path(self, path): - """Sets the path of this RestoreResult. - - - :param path: The path of this RestoreResult. # noqa: E501 - :type: str - """ - if path is None: - raise ValueError("Invalid value for `path`, must not be `None`") # noqa: E501 - - self._path = path - - @property - def name(self): - """Gets the name of this RestoreResult. # noqa: E501 - - - :return: The name of this RestoreResult. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this RestoreResult. - - - :param name: The name of this RestoreResult. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def restore_status(self): - """Gets the restore_status of this RestoreResult. # noqa: E501 - - - :return: The restore_status of this RestoreResult. # noqa: E501 - :rtype: str - """ - return self._restore_status - - @restore_status.setter - def restore_status(self, restore_status): - """Sets the restore_status of this RestoreResult. - - - :param restore_status: The restore_status of this RestoreResult. # noqa: E501 - :type: str - """ - if restore_status is None: - raise ValueError("Invalid value for `restore_status`, must not be `None`") # noqa: E501 - - self._restore_status = restore_status - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RestoreResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RestoreResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/restore_results.py b/edu_sharing_client/models/restore_results.py deleted file mode 100644 index f3553c5b..00000000 --- a/edu_sharing_client/models/restore_results.py +++ /dev/null @@ -1,112 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class RestoreResults(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'results': 'list[RestoreResult]' - } - - attribute_map = { - 'results': 'results' - } - - def __init__(self, results=None): # noqa: E501 - """RestoreResults - a model defined in Swagger""" # noqa: E501 - self._results = None - self.discriminator = None - self.results = results - - @property - def results(self): - """Gets the results of this RestoreResults. # noqa: E501 - - - :return: The results of this RestoreResults. # noqa: E501 - :rtype: list[RestoreResult] - """ - return self._results - - @results.setter - def results(self, results): - """Sets the results of this RestoreResults. - - - :param results: The results of this RestoreResults. # noqa: E501 - :type: list[RestoreResult] - """ - if results is None: - raise ValueError("Invalid value for `results`, must not be `None`") # noqa: E501 - - self._results = results - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(RestoreResults, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, RestoreResults): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/search_parameters.py b/edu_sharing_client/models/search_parameters.py deleted file mode 100644 index 9ed15639..00000000 --- a/edu_sharing_client/models/search_parameters.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SearchParameters(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'criterias': 'list[MdsQueryCriteria]', - 'facettes': 'list[str]' - } - - attribute_map = { - 'criterias': 'criterias', - 'facettes': 'facettes' - } - - def __init__(self, criterias=None, facettes=None): # noqa: E501 - """SearchParameters - a model defined in Swagger""" # noqa: E501 - self._criterias = None - self._facettes = None - self.discriminator = None - self.criterias = criterias - self.facettes = facettes - - @property - def criterias(self): - """Gets the criterias of this SearchParameters. # noqa: E501 - - - :return: The criterias of this SearchParameters. # noqa: E501 - :rtype: list[MdsQueryCriteria] - """ - return self._criterias - - @criterias.setter - def criterias(self, criterias): - """Sets the criterias of this SearchParameters. - - - :param criterias: The criterias of this SearchParameters. # noqa: E501 - :type: list[MdsQueryCriteria] - """ - if criterias is None: - raise ValueError("Invalid value for `criterias`, must not be `None`") # noqa: E501 - - self._criterias = criterias - - @property - def facettes(self): - """Gets the facettes of this SearchParameters. # noqa: E501 - - - :return: The facettes of this SearchParameters. # noqa: E501 - :rtype: list[str] - """ - return self._facettes - - @facettes.setter - def facettes(self, facettes): - """Sets the facettes of this SearchParameters. - - - :param facettes: The facettes of this SearchParameters. # noqa: E501 - :type: list[str] - """ - if facettes is None: - raise ValueError("Invalid value for `facettes`, must not be `None`") # noqa: E501 - - self._facettes = facettes - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SearchParameters, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SearchParameters): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/search_result.py b/edu_sharing_client/models/search_result.py deleted file mode 100644 index 2a244a1c..00000000 --- a/edu_sharing_client/models/search_result.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SearchResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'nodes': 'list[object]', - 'pagination': 'Pagination', - 'facettes': 'list[Facette]', - 'ignored': 'list[str]' - } - - attribute_map = { - 'nodes': 'nodes', - 'pagination': 'pagination', - 'facettes': 'facettes', - 'ignored': 'ignored' - } - - def __init__(self, nodes=None, pagination=None, facettes=None, ignored=None): # noqa: E501 - """SearchResult - a model defined in Swagger""" # noqa: E501 - self._nodes = None - self._pagination = None - self._facettes = None - self._ignored = None - self.discriminator = None - self.nodes = nodes - self.pagination = pagination - self.facettes = facettes - if ignored is not None: - self.ignored = ignored - - @property - def nodes(self): - """Gets the nodes of this SearchResult. # noqa: E501 - - - :return: The nodes of this SearchResult. # noqa: E501 - :rtype: list[object] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this SearchResult. - - - :param nodes: The nodes of this SearchResult. # noqa: E501 - :type: list[object] - """ - if nodes is None: - raise ValueError("Invalid value for `nodes`, must not be `None`") # noqa: E501 - - self._nodes = nodes - - @property - def pagination(self): - """Gets the pagination of this SearchResult. # noqa: E501 - - - :return: The pagination of this SearchResult. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this SearchResult. - - - :param pagination: The pagination of this SearchResult. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - @property - def facettes(self): - """Gets the facettes of this SearchResult. # noqa: E501 - - - :return: The facettes of this SearchResult. # noqa: E501 - :rtype: list[Facette] - """ - return self._facettes - - @facettes.setter - def facettes(self, facettes): - """Sets the facettes of this SearchResult. - - - :param facettes: The facettes of this SearchResult. # noqa: E501 - :type: list[Facette] - """ - if facettes is None: - raise ValueError("Invalid value for `facettes`, must not be `None`") # noqa: E501 - - self._facettes = facettes - - @property - def ignored(self): - """Gets the ignored of this SearchResult. # noqa: E501 - - - :return: The ignored of this SearchResult. # noqa: E501 - :rtype: list[str] - """ - return self._ignored - - @ignored.setter - def ignored(self, ignored): - """Sets the ignored of this SearchResult. - - - :param ignored: The ignored of this SearchResult. # noqa: E501 - :type: list[str] - """ - - self._ignored = ignored - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SearchResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SearchResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/search_result_node.py b/edu_sharing_client/models/search_result_node.py deleted file mode 100644 index 0b5ce477..00000000 --- a/edu_sharing_client/models/search_result_node.py +++ /dev/null @@ -1,192 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SearchResultNode(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'nodes': 'list[Node]', - 'pagination': 'Pagination', - 'facettes': 'list[Facette]', - 'ignored': 'list[str]' - } - - attribute_map = { - 'nodes': 'nodes', - 'pagination': 'pagination', - 'facettes': 'facettes', - 'ignored': 'ignored' - } - - def __init__(self, nodes=None, pagination=None, facettes=None, ignored=None): # noqa: E501 - """SearchResultNode - a model defined in Swagger""" # noqa: E501 - self._nodes = None - self._pagination = None - self._facettes = None - self._ignored = None - self.discriminator = None - self.nodes = nodes - self.pagination = pagination - self.facettes = facettes - if ignored is not None: - self.ignored = ignored - - @property - def nodes(self): - """Gets the nodes of this SearchResultNode. # noqa: E501 - - - :return: The nodes of this SearchResultNode. # noqa: E501 - :rtype: list[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this SearchResultNode. - - - :param nodes: The nodes of this SearchResultNode. # noqa: E501 - :type: list[Node] - """ - if nodes is None: - raise ValueError("Invalid value for `nodes`, must not be `None`") # noqa: E501 - - self._nodes = nodes - - @property - def pagination(self): - """Gets the pagination of this SearchResultNode. # noqa: E501 - - - :return: The pagination of this SearchResultNode. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this SearchResultNode. - - - :param pagination: The pagination of this SearchResultNode. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - @property - def facettes(self): - """Gets the facettes of this SearchResultNode. # noqa: E501 - - - :return: The facettes of this SearchResultNode. # noqa: E501 - :rtype: list[Facette] - """ - return self._facettes - - @facettes.setter - def facettes(self, facettes): - """Sets the facettes of this SearchResultNode. - - - :param facettes: The facettes of this SearchResultNode. # noqa: E501 - :type: list[Facette] - """ - if facettes is None: - raise ValueError("Invalid value for `facettes`, must not be `None`") # noqa: E501 - - self._facettes = facettes - - @property - def ignored(self): - """Gets the ignored of this SearchResultNode. # noqa: E501 - - - :return: The ignored of this SearchResultNode. # noqa: E501 - :rtype: list[str] - """ - return self._ignored - - @ignored.setter - def ignored(self, ignored): - """Sets the ignored of this SearchResultNode. - - - :param ignored: The ignored of this SearchResultNode. # noqa: E501 - :type: list[str] - """ - - self._ignored = ignored - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SearchResultNode, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SearchResultNode): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/serializable.py b/edu_sharing_client/models/serializable.py deleted file mode 100644 index fa045745..00000000 --- a/edu_sharing_client/models/serializable.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Serializable(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - } - - attribute_map = { - } - - def __init__(self): # noqa: E501 - """Serializable - a model defined in Swagger""" # noqa: E501 - self.discriminator = None - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Serializable, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Serializable): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/server_update_info.py b/edu_sharing_client/models/server_update_info.py deleted file mode 100644 index 259004a9..00000000 --- a/edu_sharing_client/models/server_update_info.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ServerUpdateInfo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'description': 'str', - 'executed_at': 'int' - } - - attribute_map = { - 'id': 'id', - 'description': 'description', - 'executed_at': 'executedAt' - } - - def __init__(self, id=None, description=None, executed_at=None): # noqa: E501 - """ServerUpdateInfo - a model defined in Swagger""" # noqa: E501 - self._id = None - self._description = None - self._executed_at = None - self.discriminator = None - if id is not None: - self.id = id - if description is not None: - self.description = description - if executed_at is not None: - self.executed_at = executed_at - - @property - def id(self): - """Gets the id of this ServerUpdateInfo. # noqa: E501 - - - :return: The id of this ServerUpdateInfo. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ServerUpdateInfo. - - - :param id: The id of this ServerUpdateInfo. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def description(self): - """Gets the description of this ServerUpdateInfo. # noqa: E501 - - - :return: The description of this ServerUpdateInfo. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this ServerUpdateInfo. - - - :param description: The description of this ServerUpdateInfo. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def executed_at(self): - """Gets the executed_at of this ServerUpdateInfo. # noqa: E501 - - - :return: The executed_at of this ServerUpdateInfo. # noqa: E501 - :rtype: int - """ - return self._executed_at - - @executed_at.setter - def executed_at(self, executed_at): - """Sets the executed_at of this ServerUpdateInfo. - - - :param executed_at: The executed_at of this ServerUpdateInfo. # noqa: E501 - :type: int - """ - - self._executed_at = executed_at - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ServerUpdateInfo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ServerUpdateInfo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/service.py b/edu_sharing_client/models/service.py deleted file mode 100644 index 83d2bc67..00000000 --- a/edu_sharing_client/models/service.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Service(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'instances': 'list[ServiceInstance]' - } - - attribute_map = { - 'name': 'name', - 'instances': 'instances' - } - - def __init__(self, name=None, instances=None): # noqa: E501 - """Service - a model defined in Swagger""" # noqa: E501 - self._name = None - self._instances = None - self.discriminator = None - self.name = name - self.instances = instances - - @property - def name(self): - """Gets the name of this Service. # noqa: E501 - - - :return: The name of this Service. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this Service. - - - :param name: The name of this Service. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - - self._name = name - - @property - def instances(self): - """Gets the instances of this Service. # noqa: E501 - - - :return: The instances of this Service. # noqa: E501 - :rtype: list[ServiceInstance] - """ - return self._instances - - @instances.setter - def instances(self, instances): - """Sets the instances of this Service. - - - :param instances: The instances of this Service. # noqa: E501 - :type: list[ServiceInstance] - """ - if instances is None: - raise ValueError("Invalid value for `instances`, must not be `None`") # noqa: E501 - - self._instances = instances - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Service, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Service): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/service_instance.py b/edu_sharing_client/models/service_instance.py deleted file mode 100644 index c26c8287..00000000 --- a/edu_sharing_client/models/service_instance.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ServiceInstance(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'version': 'ServiceVersion', - 'endpoint': 'str' - } - - attribute_map = { - 'version': 'version', - 'endpoint': 'endpoint' - } - - def __init__(self, version=None, endpoint=None): # noqa: E501 - """ServiceInstance - a model defined in Swagger""" # noqa: E501 - self._version = None - self._endpoint = None - self.discriminator = None - self.version = version - self.endpoint = endpoint - - @property - def version(self): - """Gets the version of this ServiceInstance. # noqa: E501 - - - :return: The version of this ServiceInstance. # noqa: E501 - :rtype: ServiceVersion - """ - return self._version - - @version.setter - def version(self, version): - """Sets the version of this ServiceInstance. - - - :param version: The version of this ServiceInstance. # noqa: E501 - :type: ServiceVersion - """ - if version is None: - raise ValueError("Invalid value for `version`, must not be `None`") # noqa: E501 - - self._version = version - - @property - def endpoint(self): - """Gets the endpoint of this ServiceInstance. # noqa: E501 - - - :return: The endpoint of this ServiceInstance. # noqa: E501 - :rtype: str - """ - return self._endpoint - - @endpoint.setter - def endpoint(self, endpoint): - """Sets the endpoint of this ServiceInstance. - - - :param endpoint: The endpoint of this ServiceInstance. # noqa: E501 - :type: str - """ - if endpoint is None: - raise ValueError("Invalid value for `endpoint`, must not be `None`") # noqa: E501 - - self._endpoint = endpoint - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ServiceInstance, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ServiceInstance): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/service_version.py b/edu_sharing_client/models/service_version.py deleted file mode 100644 index d50a0869..00000000 --- a/edu_sharing_client/models/service_version.py +++ /dev/null @@ -1,191 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ServiceVersion(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'repository': 'str', - 'renderservice': 'str', - 'major': 'int', - 'minor': 'int' - } - - attribute_map = { - 'repository': 'repository', - 'renderservice': 'renderservice', - 'major': 'major', - 'minor': 'minor' - } - - def __init__(self, repository=None, renderservice=None, major=None, minor=None): # noqa: E501 - """ServiceVersion - a model defined in Swagger""" # noqa: E501 - self._repository = None - self._renderservice = None - self._major = None - self._minor = None - self.discriminator = None - if repository is not None: - self.repository = repository - if renderservice is not None: - self.renderservice = renderservice - self.major = major - self.minor = minor - - @property - def repository(self): - """Gets the repository of this ServiceVersion. # noqa: E501 - - - :return: The repository of this ServiceVersion. # noqa: E501 - :rtype: str - """ - return self._repository - - @repository.setter - def repository(self, repository): - """Sets the repository of this ServiceVersion. - - - :param repository: The repository of this ServiceVersion. # noqa: E501 - :type: str - """ - - self._repository = repository - - @property - def renderservice(self): - """Gets the renderservice of this ServiceVersion. # noqa: E501 - - - :return: The renderservice of this ServiceVersion. # noqa: E501 - :rtype: str - """ - return self._renderservice - - @renderservice.setter - def renderservice(self, renderservice): - """Sets the renderservice of this ServiceVersion. - - - :param renderservice: The renderservice of this ServiceVersion. # noqa: E501 - :type: str - """ - - self._renderservice = renderservice - - @property - def major(self): - """Gets the major of this ServiceVersion. # noqa: E501 - - - :return: The major of this ServiceVersion. # noqa: E501 - :rtype: int - """ - return self._major - - @major.setter - def major(self, major): - """Sets the major of this ServiceVersion. - - - :param major: The major of this ServiceVersion. # noqa: E501 - :type: int - """ - if major is None: - raise ValueError("Invalid value for `major`, must not be `None`") # noqa: E501 - - self._major = major - - @property - def minor(self): - """Gets the minor of this ServiceVersion. # noqa: E501 - - - :return: The minor of this ServiceVersion. # noqa: E501 - :rtype: int - """ - return self._minor - - @minor.setter - def minor(self, minor): - """Sets the minor of this ServiceVersion. - - - :param minor: The minor of this ServiceVersion. # noqa: E501 - :type: int - """ - if minor is None: - raise ValueError("Invalid value for `minor`, must not be `None`") # noqa: E501 - - self._minor = minor - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ServiceVersion, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ServiceVersion): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/services.py b/edu_sharing_client/models/services.py deleted file mode 100644 index 01b5a353..00000000 --- a/edu_sharing_client/models/services.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Services(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'visualization': 'str' - } - - attribute_map = { - 'visualization': 'visualization' - } - - def __init__(self, visualization=None): # noqa: E501 - """Services - a model defined in Swagger""" # noqa: E501 - self._visualization = None - self.discriminator = None - if visualization is not None: - self.visualization = visualization - - @property - def visualization(self): - """Gets the visualization of this Services. # noqa: E501 - - - :return: The visualization of this Services. # noqa: E501 - :rtype: str - """ - return self._visualization - - @visualization.setter - def visualization(self, visualization): - """Sets the visualization of this Services. - - - :param visualization: The visualization of this Services. # noqa: E501 - :type: str - """ - - self._visualization = visualization - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Services, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Services): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/session_expired_dialog.py b/edu_sharing_client/models/session_expired_dialog.py deleted file mode 100644 index ba93d985..00000000 --- a/edu_sharing_client/models/session_expired_dialog.py +++ /dev/null @@ -1,85 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SessionExpiredDialog(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - } - - attribute_map = { - } - - def __init__(self): # noqa: E501 - """SessionExpiredDialog - a model defined in Swagger""" # noqa: E501 - self.discriminator = None - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SessionExpiredDialog, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SessionExpiredDialog): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/shared_folder_options.py b/edu_sharing_client/models/shared_folder_options.py deleted file mode 100644 index e5b3adf3..00000000 --- a/edu_sharing_client/models/shared_folder_options.py +++ /dev/null @@ -1,207 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SharedFolderOptions(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'folders': 'str', - 'private_files': 'str', - 'cc_files': 'str', - 'move': 'bool' - } - - attribute_map = { - 'folders': 'folders', - 'private_files': 'privateFiles', - 'cc_files': 'ccFiles', - 'move': 'move' - } - - def __init__(self, folders=None, private_files=None, cc_files=None, move=False): # noqa: E501 - """SharedFolderOptions - a model defined in Swagger""" # noqa: E501 - self._folders = None - self._private_files = None - self._cc_files = None - self._move = None - self.discriminator = None - if folders is not None: - self.folders = folders - if private_files is not None: - self.private_files = private_files - if cc_files is not None: - self.cc_files = cc_files - if move is not None: - self.move = move - - @property - def folders(self): - """Gets the folders of this SharedFolderOptions. # noqa: E501 - - - :return: The folders of this SharedFolderOptions. # noqa: E501 - :rtype: str - """ - return self._folders - - @folders.setter - def folders(self, folders): - """Sets the folders of this SharedFolderOptions. - - - :param folders: The folders of this SharedFolderOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign"] # noqa: E501 - if folders not in allowed_values: - raise ValueError( - "Invalid value for `folders` ({0}), must be one of {1}" # noqa: E501 - .format(folders, allowed_values) - ) - - self._folders = folders - - @property - def private_files(self): - """Gets the private_files of this SharedFolderOptions. # noqa: E501 - - - :return: The private_files of this SharedFolderOptions. # noqa: E501 - :rtype: str - """ - return self._private_files - - @private_files.setter - def private_files(self, private_files): - """Sets the private_files of this SharedFolderOptions. - - - :param private_files: The private_files of this SharedFolderOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign", "delete"] # noqa: E501 - if private_files not in allowed_values: - raise ValueError( - "Invalid value for `private_files` ({0}), must be one of {1}" # noqa: E501 - .format(private_files, allowed_values) - ) - - self._private_files = private_files - - @property - def cc_files(self): - """Gets the cc_files of this SharedFolderOptions. # noqa: E501 - - - :return: The cc_files of this SharedFolderOptions. # noqa: E501 - :rtype: str - """ - return self._cc_files - - @cc_files.setter - def cc_files(self, cc_files): - """Sets the cc_files of this SharedFolderOptions. - - - :param cc_files: The cc_files of this SharedFolderOptions. # noqa: E501 - :type: str - """ - allowed_values = ["none", "assign", "delete"] # noqa: E501 - if cc_files not in allowed_values: - raise ValueError( - "Invalid value for `cc_files` ({0}), must be one of {1}" # noqa: E501 - .format(cc_files, allowed_values) - ) - - self._cc_files = cc_files - - @property - def move(self): - """Gets the move of this SharedFolderOptions. # noqa: E501 - - - :return: The move of this SharedFolderOptions. # noqa: E501 - :rtype: bool - """ - return self._move - - @move.setter - def move(self, move): - """Sets the move of this SharedFolderOptions. - - - :param move: The move of this SharedFolderOptions. # noqa: E501 - :type: bool - """ - - self._move = move - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SharedFolderOptions, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SharedFolderOptions): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/sharing_info.py b/edu_sharing_client/models/sharing_info.py deleted file mode 100644 index 635e705f..00000000 --- a/edu_sharing_client/models/sharing_info.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SharingInfo(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'password_matches': 'bool', - 'password': 'bool', - 'expired': 'bool', - 'invited_by': 'Person', - 'node': 'Node' - } - - attribute_map = { - 'password_matches': 'passwordMatches', - 'password': 'password', - 'expired': 'expired', - 'invited_by': 'invitedBy', - 'node': 'node' - } - - def __init__(self, password_matches=False, password=False, expired=False, invited_by=None, node=None): # noqa: E501 - """SharingInfo - a model defined in Swagger""" # noqa: E501 - self._password_matches = None - self._password = None - self._expired = None - self._invited_by = None - self._node = None - self.discriminator = None - if password_matches is not None: - self.password_matches = password_matches - if password is not None: - self.password = password - if expired is not None: - self.expired = expired - if invited_by is not None: - self.invited_by = invited_by - if node is not None: - self.node = node - - @property - def password_matches(self): - """Gets the password_matches of this SharingInfo. # noqa: E501 - - - :return: The password_matches of this SharingInfo. # noqa: E501 - :rtype: bool - """ - return self._password_matches - - @password_matches.setter - def password_matches(self, password_matches): - """Sets the password_matches of this SharingInfo. - - - :param password_matches: The password_matches of this SharingInfo. # noqa: E501 - :type: bool - """ - - self._password_matches = password_matches - - @property - def password(self): - """Gets the password of this SharingInfo. # noqa: E501 - - - :return: The password of this SharingInfo. # noqa: E501 - :rtype: bool - """ - return self._password - - @password.setter - def password(self, password): - """Sets the password of this SharingInfo. - - - :param password: The password of this SharingInfo. # noqa: E501 - :type: bool - """ - - self._password = password - - @property - def expired(self): - """Gets the expired of this SharingInfo. # noqa: E501 - - - :return: The expired of this SharingInfo. # noqa: E501 - :rtype: bool - """ - return self._expired - - @expired.setter - def expired(self, expired): - """Sets the expired of this SharingInfo. - - - :param expired: The expired of this SharingInfo. # noqa: E501 - :type: bool - """ - - self._expired = expired - - @property - def invited_by(self): - """Gets the invited_by of this SharingInfo. # noqa: E501 - - - :return: The invited_by of this SharingInfo. # noqa: E501 - :rtype: Person - """ - return self._invited_by - - @invited_by.setter - def invited_by(self, invited_by): - """Sets the invited_by of this SharingInfo. - - - :param invited_by: The invited_by of this SharingInfo. # noqa: E501 - :type: Person - """ - - self._invited_by = invited_by - - @property - def node(self): - """Gets the node of this SharingInfo. # noqa: E501 - - - :return: The node of this SharingInfo. # noqa: E501 - :rtype: Node - """ - return self._node - - @node.setter - def node(self, node): - """Sets the node of this SharingInfo. - - - :param node: The node of this SharingInfo. # noqa: E501 - :type: Node - """ - - self._node = node - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SharingInfo, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SharingInfo): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/simple_edit.py b/edu_sharing_client/models/simple_edit.py deleted file mode 100644 index d9d1b4e8..00000000 --- a/edu_sharing_client/models/simple_edit.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SimpleEdit(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'global_groups': 'list[str]', - 'organization': 'Organization' - } - - attribute_map = { - 'global_groups': 'globalGroups', - 'organization': 'organization' - } - - def __init__(self, global_groups=None, organization=None): # noqa: E501 - """SimpleEdit - a model defined in Swagger""" # noqa: E501 - self._global_groups = None - self._organization = None - self.discriminator = None - if global_groups is not None: - self.global_groups = global_groups - if organization is not None: - self.organization = organization - - @property - def global_groups(self): - """Gets the global_groups of this SimpleEdit. # noqa: E501 - - - :return: The global_groups of this SimpleEdit. # noqa: E501 - :rtype: list[str] - """ - return self._global_groups - - @global_groups.setter - def global_groups(self, global_groups): - """Sets the global_groups of this SimpleEdit. - - - :param global_groups: The global_groups of this SimpleEdit. # noqa: E501 - :type: list[str] - """ - - self._global_groups = global_groups - - @property - def organization(self): - """Gets the organization of this SimpleEdit. # noqa: E501 - - - :return: The organization of this SimpleEdit. # noqa: E501 - :rtype: Organization - """ - return self._organization - - @organization.setter - def organization(self, organization): - """Sets the organization of this SimpleEdit. - - - :param organization: The organization of this SimpleEdit. # noqa: E501 - :type: Organization - """ - - self._organization = organization - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SimpleEdit, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SimpleEdit): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/sort_column_v2.py b/edu_sharing_client/models/sort_column_v2.py deleted file mode 100644 index 3090ee9c..00000000 --- a/edu_sharing_client/models/sort_column_v2.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SortColumnV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'mode': 'str' - } - - attribute_map = { - 'id': 'id', - 'mode': 'mode' - } - - def __init__(self, id=None, mode=None): # noqa: E501 - """SortColumnV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._mode = None - self.discriminator = None - if id is not None: - self.id = id - if mode is not None: - self.mode = mode - - @property - def id(self): - """Gets the id of this SortColumnV2. # noqa: E501 - - - :return: The id of this SortColumnV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this SortColumnV2. - - - :param id: The id of this SortColumnV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def mode(self): - """Gets the mode of this SortColumnV2. # noqa: E501 - - - :return: The mode of this SortColumnV2. # noqa: E501 - :rtype: str - """ - return self._mode - - @mode.setter - def mode(self, mode): - """Sets the mode of this SortColumnV2. - - - :param mode: The mode of this SortColumnV2. # noqa: E501 - :type: str - """ - - self._mode = mode - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SortColumnV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SortColumnV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/sort_v2.py b/edu_sharing_client/models/sort_v2.py deleted file mode 100644 index 4317e89c..00000000 --- a/edu_sharing_client/models/sort_v2.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SortV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'columns': 'list[SortColumnV2]', - 'default': 'SortV2Default' - } - - attribute_map = { - 'id': 'id', - 'columns': 'columns', - 'default': 'default' - } - - def __init__(self, id=None, columns=None, default=None): # noqa: E501 - """SortV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._columns = None - self._default = None - self.discriminator = None - if id is not None: - self.id = id - if columns is not None: - self.columns = columns - if default is not None: - self.default = default - - @property - def id(self): - """Gets the id of this SortV2. # noqa: E501 - - - :return: The id of this SortV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this SortV2. - - - :param id: The id of this SortV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def columns(self): - """Gets the columns of this SortV2. # noqa: E501 - - - :return: The columns of this SortV2. # noqa: E501 - :rtype: list[SortColumnV2] - """ - return self._columns - - @columns.setter - def columns(self, columns): - """Sets the columns of this SortV2. - - - :param columns: The columns of this SortV2. # noqa: E501 - :type: list[SortColumnV2] - """ - - self._columns = columns - - @property - def default(self): - """Gets the default of this SortV2. # noqa: E501 - - - :return: The default of this SortV2. # noqa: E501 - :rtype: SortV2Default - """ - return self._default - - @default.setter - def default(self, default): - """Sets the default of this SortV2. - - - :param default: The default of this SortV2. # noqa: E501 - :type: SortV2Default - """ - - self._default = default - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SortV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SortV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/sort_v2_default.py b/edu_sharing_client/models/sort_v2_default.py deleted file mode 100644 index e42d0254..00000000 --- a/edu_sharing_client/models/sort_v2_default.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SortV2Default(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'sort_by': 'str', - 'sort_ascending': 'bool' - } - - attribute_map = { - 'sort_by': 'sortBy', - 'sort_ascending': 'sortAscending' - } - - def __init__(self, sort_by=None, sort_ascending=False): # noqa: E501 - """SortV2Default - a model defined in Swagger""" # noqa: E501 - self._sort_by = None - self._sort_ascending = None - self.discriminator = None - if sort_by is not None: - self.sort_by = sort_by - if sort_ascending is not None: - self.sort_ascending = sort_ascending - - @property - def sort_by(self): - """Gets the sort_by of this SortV2Default. # noqa: E501 - - - :return: The sort_by of this SortV2Default. # noqa: E501 - :rtype: str - """ - return self._sort_by - - @sort_by.setter - def sort_by(self, sort_by): - """Sets the sort_by of this SortV2Default. - - - :param sort_by: The sort_by of this SortV2Default. # noqa: E501 - :type: str - """ - - self._sort_by = sort_by - - @property - def sort_ascending(self): - """Gets the sort_ascending of this SortV2Default. # noqa: E501 - - - :return: The sort_ascending of this SortV2Default. # noqa: E501 - :rtype: bool - """ - return self._sort_ascending - - @sort_ascending.setter - def sort_ascending(self, sort_ascending): - """Sets the sort_ascending of this SortV2Default. - - - :param sort_ascending: The sort_ascending of this SortV2Default. # noqa: E501 - :type: bool - """ - - self._sort_ascending = sort_ascending - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SortV2Default, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SortV2Default): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistic_entity.py b/edu_sharing_client/models/statistic_entity.py deleted file mode 100644 index 929faa44..00000000 --- a/edu_sharing_client/models/statistic_entity.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StatisticEntity(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'value': 'str', - 'count': 'int' - } - - attribute_map = { - 'value': 'value', - 'count': 'count' - } - - def __init__(self, value=None, count=None): # noqa: E501 - """StatisticEntity - a model defined in Swagger""" # noqa: E501 - self._value = None - self._count = None - self.discriminator = None - self.value = value - self.count = count - - @property - def value(self): - """Gets the value of this StatisticEntity. # noqa: E501 - - - :return: The value of this StatisticEntity. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this StatisticEntity. - - - :param value: The value of this StatisticEntity. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - @property - def count(self): - """Gets the count of this StatisticEntity. # noqa: E501 - - - :return: The count of this StatisticEntity. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this StatisticEntity. - - - :param count: The count of this StatisticEntity. # noqa: E501 - :type: int - """ - if count is None: - raise ValueError("Invalid value for `count`, must not be `None`") # noqa: E501 - - self._count = count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StatisticEntity, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StatisticEntity): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistic_entry.py b/edu_sharing_client/models/statistic_entry.py deleted file mode 100644 index afcf291e..00000000 --- a/edu_sharing_client/models/statistic_entry.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StatisticEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - '_property': 'str', - 'entities': 'list[StatisticEntity]' - } - - attribute_map = { - '_property': 'property', - 'entities': 'entities' - } - - def __init__(self, _property=None, entities=None): # noqa: E501 - """StatisticEntry - a model defined in Swagger""" # noqa: E501 - self.__property = None - self._entities = None - self.discriminator = None - self._property = _property - self.entities = entities - - @property - def _property(self): - """Gets the _property of this StatisticEntry. # noqa: E501 - - - :return: The _property of this StatisticEntry. # noqa: E501 - :rtype: str - """ - return self.__property - - @_property.setter - def _property(self, _property): - """Sets the _property of this StatisticEntry. - - - :param _property: The _property of this StatisticEntry. # noqa: E501 - :type: str - """ - if _property is None: - raise ValueError("Invalid value for `_property`, must not be `None`") # noqa: E501 - - self.__property = _property - - @property - def entities(self): - """Gets the entities of this StatisticEntry. # noqa: E501 - - - :return: The entities of this StatisticEntry. # noqa: E501 - :rtype: list[StatisticEntity] - """ - return self._entities - - @entities.setter - def entities(self, entities): - """Sets the entities of this StatisticEntry. - - - :param entities: The entities of this StatisticEntry. # noqa: E501 - :type: list[StatisticEntity] - """ - if entities is None: - raise ValueError("Invalid value for `entities`, must not be `None`") # noqa: E501 - - self._entities = entities - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StatisticEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StatisticEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistics.py b/edu_sharing_client/models/statistics.py deleted file mode 100644 index 9c66850a..00000000 --- a/edu_sharing_client/models/statistics.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Statistics(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'groups': 'list[str]' - } - - attribute_map = { - 'groups': 'groups' - } - - def __init__(self, groups=None): # noqa: E501 - """Statistics - a model defined in Swagger""" # noqa: E501 - self._groups = None - self.discriminator = None - if groups is not None: - self.groups = groups - - @property - def groups(self): - """Gets the groups of this Statistics. # noqa: E501 - - - :return: The groups of this Statistics. # noqa: E501 - :rtype: list[str] - """ - return self._groups - - @groups.setter - def groups(self, groups): - """Sets the groups of this Statistics. - - - :param groups: The groups of this Statistics. # noqa: E501 - :type: list[str] - """ - - self._groups = groups - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Statistics, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Statistics): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistics_global.py b/edu_sharing_client/models/statistics_global.py deleted file mode 100644 index 871de6f4..00000000 --- a/edu_sharing_client/models/statistics_global.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StatisticsGlobal(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'overall': 'StatisticsGroup', - 'groups': 'list[StatisticsKeyGroup]', - 'user': 'User' - } - - attribute_map = { - 'overall': 'overall', - 'groups': 'groups', - 'user': 'user' - } - - def __init__(self, overall=None, groups=None, user=None): # noqa: E501 - """StatisticsGlobal - a model defined in Swagger""" # noqa: E501 - self._overall = None - self._groups = None - self._user = None - self.discriminator = None - if overall is not None: - self.overall = overall - if groups is not None: - self.groups = groups - if user is not None: - self.user = user - - @property - def overall(self): - """Gets the overall of this StatisticsGlobal. # noqa: E501 - - - :return: The overall of this StatisticsGlobal. # noqa: E501 - :rtype: StatisticsGroup - """ - return self._overall - - @overall.setter - def overall(self, overall): - """Sets the overall of this StatisticsGlobal. - - - :param overall: The overall of this StatisticsGlobal. # noqa: E501 - :type: StatisticsGroup - """ - - self._overall = overall - - @property - def groups(self): - """Gets the groups of this StatisticsGlobal. # noqa: E501 - - - :return: The groups of this StatisticsGlobal. # noqa: E501 - :rtype: list[StatisticsKeyGroup] - """ - return self._groups - - @groups.setter - def groups(self, groups): - """Sets the groups of this StatisticsGlobal. - - - :param groups: The groups of this StatisticsGlobal. # noqa: E501 - :type: list[StatisticsKeyGroup] - """ - - self._groups = groups - - @property - def user(self): - """Gets the user of this StatisticsGlobal. # noqa: E501 - - - :return: The user of this StatisticsGlobal. # noqa: E501 - :rtype: User - """ - return self._user - - @user.setter - def user(self, user): - """Sets the user of this StatisticsGlobal. - - - :param user: The user of this StatisticsGlobal. # noqa: E501 - :type: User - """ - - self._user = user - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StatisticsGlobal, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StatisticsGlobal): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistics_group.py b/edu_sharing_client/models/statistics_group.py deleted file mode 100644 index 5f4bd6ac..00000000 --- a/edu_sharing_client/models/statistics_group.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StatisticsGroup(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'count': 'int', - 'sub_groups': 'list[StatisticsSubGroup]' - } - - attribute_map = { - 'count': 'count', - 'sub_groups': 'subGroups' - } - - def __init__(self, count=None, sub_groups=None): # noqa: E501 - """StatisticsGroup - a model defined in Swagger""" # noqa: E501 - self._count = None - self._sub_groups = None - self.discriminator = None - if count is not None: - self.count = count - if sub_groups is not None: - self.sub_groups = sub_groups - - @property - def count(self): - """Gets the count of this StatisticsGroup. # noqa: E501 - - - :return: The count of this StatisticsGroup. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this StatisticsGroup. - - - :param count: The count of this StatisticsGroup. # noqa: E501 - :type: int - """ - - self._count = count - - @property - def sub_groups(self): - """Gets the sub_groups of this StatisticsGroup. # noqa: E501 - - - :return: The sub_groups of this StatisticsGroup. # noqa: E501 - :rtype: list[StatisticsSubGroup] - """ - return self._sub_groups - - @sub_groups.setter - def sub_groups(self, sub_groups): - """Sets the sub_groups of this StatisticsGroup. - - - :param sub_groups: The sub_groups of this StatisticsGroup. # noqa: E501 - :type: list[StatisticsSubGroup] - """ - - self._sub_groups = sub_groups - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StatisticsGroup, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StatisticsGroup): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistics_key_group.py b/edu_sharing_client/models/statistics_key_group.py deleted file mode 100644 index 8bc0374f..00000000 --- a/edu_sharing_client/models/statistics_key_group.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StatisticsKeyGroup(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'display_name': 'str', - 'count': 'int', - 'sub_groups': 'list[StatisticsSubGroup]' - } - - attribute_map = { - 'key': 'key', - 'display_name': 'displayName', - 'count': 'count', - 'sub_groups': 'subGroups' - } - - def __init__(self, key=None, display_name=None, count=None, sub_groups=None): # noqa: E501 - """StatisticsKeyGroup - a model defined in Swagger""" # noqa: E501 - self._key = None - self._display_name = None - self._count = None - self._sub_groups = None - self.discriminator = None - if key is not None: - self.key = key - if display_name is not None: - self.display_name = display_name - if count is not None: - self.count = count - if sub_groups is not None: - self.sub_groups = sub_groups - - @property - def key(self): - """Gets the key of this StatisticsKeyGroup. # noqa: E501 - - - :return: The key of this StatisticsKeyGroup. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this StatisticsKeyGroup. - - - :param key: The key of this StatisticsKeyGroup. # noqa: E501 - :type: str - """ - - self._key = key - - @property - def display_name(self): - """Gets the display_name of this StatisticsKeyGroup. # noqa: E501 - - - :return: The display_name of this StatisticsKeyGroup. # noqa: E501 - :rtype: str - """ - return self._display_name - - @display_name.setter - def display_name(self, display_name): - """Sets the display_name of this StatisticsKeyGroup. - - - :param display_name: The display_name of this StatisticsKeyGroup. # noqa: E501 - :type: str - """ - - self._display_name = display_name - - @property - def count(self): - """Gets the count of this StatisticsKeyGroup. # noqa: E501 - - - :return: The count of this StatisticsKeyGroup. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this StatisticsKeyGroup. - - - :param count: The count of this StatisticsKeyGroup. # noqa: E501 - :type: int - """ - - self._count = count - - @property - def sub_groups(self): - """Gets the sub_groups of this StatisticsKeyGroup. # noqa: E501 - - - :return: The sub_groups of this StatisticsKeyGroup. # noqa: E501 - :rtype: list[StatisticsSubGroup] - """ - return self._sub_groups - - @sub_groups.setter - def sub_groups(self, sub_groups): - """Sets the sub_groups of this StatisticsKeyGroup. - - - :param sub_groups: The sub_groups of this StatisticsKeyGroup. # noqa: E501 - :type: list[StatisticsSubGroup] - """ - - self._sub_groups = sub_groups - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StatisticsKeyGroup, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StatisticsKeyGroup): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/statistics_sub_group.py b/edu_sharing_client/models/statistics_sub_group.py deleted file mode 100644 index 99d1ab28..00000000 --- a/edu_sharing_client/models/statistics_sub_group.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StatisticsSubGroup(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'count': 'list[SubGroupItem]' - } - - attribute_map = { - 'id': 'id', - 'count': 'count' - } - - def __init__(self, id=None, count=None): # noqa: E501 - """StatisticsSubGroup - a model defined in Swagger""" # noqa: E501 - self._id = None - self._count = None - self.discriminator = None - if id is not None: - self.id = id - if count is not None: - self.count = count - - @property - def id(self): - """Gets the id of this StatisticsSubGroup. # noqa: E501 - - - :return: The id of this StatisticsSubGroup. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this StatisticsSubGroup. - - - :param id: The id of this StatisticsSubGroup. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def count(self): - """Gets the count of this StatisticsSubGroup. # noqa: E501 - - - :return: The count of this StatisticsSubGroup. # noqa: E501 - :rtype: list[SubGroupItem] - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this StatisticsSubGroup. - - - :param count: The count of this StatisticsSubGroup. # noqa: E501 - :type: list[SubGroupItem] - """ - - self._count = count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StatisticsSubGroup, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StatisticsSubGroup): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/stored_service.py b/edu_sharing_client/models/stored_service.py deleted file mode 100644 index 3f467478..00000000 --- a/edu_sharing_client/models/stored_service.py +++ /dev/null @@ -1,449 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StoredService(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str', - 'url': 'str', - 'icon': 'str', - 'logo': 'str', - 'in_language': 'str', - 'type': 'str', - 'description': 'str', - 'audience': 'list[Audience]', - 'provider': 'Provider', - 'start_date': 'str', - 'interfaces': 'list[Interface]', - 'about': 'list[str]', - 'id': 'str', - 'is_accessible_for_free': 'bool' - } - - attribute_map = { - 'name': 'name', - 'url': 'url', - 'icon': 'icon', - 'logo': 'logo', - 'in_language': 'inLanguage', - 'type': 'type', - 'description': 'description', - 'audience': 'audience', - 'provider': 'provider', - 'start_date': 'startDate', - 'interfaces': 'interfaces', - 'about': 'about', - 'id': 'id', - 'is_accessible_for_free': 'isAccessibleForFree' - } - - def __init__(self, name=None, url=None, icon=None, logo=None, in_language=None, type=None, description=None, audience=None, provider=None, start_date=None, interfaces=None, about=None, id=None, is_accessible_for_free=False): # noqa: E501 - """StoredService - a model defined in Swagger""" # noqa: E501 - self._name = None - self._url = None - self._icon = None - self._logo = None - self._in_language = None - self._type = None - self._description = None - self._audience = None - self._provider = None - self._start_date = None - self._interfaces = None - self._about = None - self._id = None - self._is_accessible_for_free = None - self.discriminator = None - if name is not None: - self.name = name - if url is not None: - self.url = url - if icon is not None: - self.icon = icon - if logo is not None: - self.logo = logo - if in_language is not None: - self.in_language = in_language - if type is not None: - self.type = type - if description is not None: - self.description = description - if audience is not None: - self.audience = audience - if provider is not None: - self.provider = provider - if start_date is not None: - self.start_date = start_date - if interfaces is not None: - self.interfaces = interfaces - if about is not None: - self.about = about - if id is not None: - self.id = id - if is_accessible_for_free is not None: - self.is_accessible_for_free = is_accessible_for_free - - @property - def name(self): - """Gets the name of this StoredService. # noqa: E501 - - - :return: The name of this StoredService. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this StoredService. - - - :param name: The name of this StoredService. # noqa: E501 - :type: str - """ - - self._name = name - - @property - def url(self): - """Gets the url of this StoredService. # noqa: E501 - - - :return: The url of this StoredService. # noqa: E501 - :rtype: str - """ - return self._url - - @url.setter - def url(self, url): - """Sets the url of this StoredService. - - - :param url: The url of this StoredService. # noqa: E501 - :type: str - """ - - self._url = url - - @property - def icon(self): - """Gets the icon of this StoredService. # noqa: E501 - - - :return: The icon of this StoredService. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this StoredService. - - - :param icon: The icon of this StoredService. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def logo(self): - """Gets the logo of this StoredService. # noqa: E501 - - - :return: The logo of this StoredService. # noqa: E501 - :rtype: str - """ - return self._logo - - @logo.setter - def logo(self, logo): - """Sets the logo of this StoredService. - - - :param logo: The logo of this StoredService. # noqa: E501 - :type: str - """ - - self._logo = logo - - @property - def in_language(self): - """Gets the in_language of this StoredService. # noqa: E501 - - - :return: The in_language of this StoredService. # noqa: E501 - :rtype: str - """ - return self._in_language - - @in_language.setter - def in_language(self, in_language): - """Sets the in_language of this StoredService. - - - :param in_language: The in_language of this StoredService. # noqa: E501 - :type: str - """ - - self._in_language = in_language - - @property - def type(self): - """Gets the type of this StoredService. # noqa: E501 - - - :return: The type of this StoredService. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this StoredService. - - - :param type: The type of this StoredService. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def description(self): - """Gets the description of this StoredService. # noqa: E501 - - - :return: The description of this StoredService. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this StoredService. - - - :param description: The description of this StoredService. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def audience(self): - """Gets the audience of this StoredService. # noqa: E501 - - - :return: The audience of this StoredService. # noqa: E501 - :rtype: list[Audience] - """ - return self._audience - - @audience.setter - def audience(self, audience): - """Sets the audience of this StoredService. - - - :param audience: The audience of this StoredService. # noqa: E501 - :type: list[Audience] - """ - - self._audience = audience - - @property - def provider(self): - """Gets the provider of this StoredService. # noqa: E501 - - - :return: The provider of this StoredService. # noqa: E501 - :rtype: Provider - """ - return self._provider - - @provider.setter - def provider(self, provider): - """Sets the provider of this StoredService. - - - :param provider: The provider of this StoredService. # noqa: E501 - :type: Provider - """ - - self._provider = provider - - @property - def start_date(self): - """Gets the start_date of this StoredService. # noqa: E501 - - - :return: The start_date of this StoredService. # noqa: E501 - :rtype: str - """ - return self._start_date - - @start_date.setter - def start_date(self, start_date): - """Sets the start_date of this StoredService. - - - :param start_date: The start_date of this StoredService. # noqa: E501 - :type: str - """ - - self._start_date = start_date - - @property - def interfaces(self): - """Gets the interfaces of this StoredService. # noqa: E501 - - - :return: The interfaces of this StoredService. # noqa: E501 - :rtype: list[Interface] - """ - return self._interfaces - - @interfaces.setter - def interfaces(self, interfaces): - """Sets the interfaces of this StoredService. - - - :param interfaces: The interfaces of this StoredService. # noqa: E501 - :type: list[Interface] - """ - - self._interfaces = interfaces - - @property - def about(self): - """Gets the about of this StoredService. # noqa: E501 - - - :return: The about of this StoredService. # noqa: E501 - :rtype: list[str] - """ - return self._about - - @about.setter - def about(self, about): - """Sets the about of this StoredService. - - - :param about: The about of this StoredService. # noqa: E501 - :type: list[str] - """ - - self._about = about - - @property - def id(self): - """Gets the id of this StoredService. # noqa: E501 - - - :return: The id of this StoredService. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this StoredService. - - - :param id: The id of this StoredService. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def is_accessible_for_free(self): - """Gets the is_accessible_for_free of this StoredService. # noqa: E501 - - - :return: The is_accessible_for_free of this StoredService. # noqa: E501 - :rtype: bool - """ - return self._is_accessible_for_free - - @is_accessible_for_free.setter - def is_accessible_for_free(self, is_accessible_for_free): - """Sets the is_accessible_for_free of this StoredService. - - - :param is_accessible_for_free: The is_accessible_for_free of this StoredService. # noqa: E501 - :type: bool - """ - - self._is_accessible_for_free = is_accessible_for_free - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StoredService, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StoredService): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/stream.py b/edu_sharing_client/models/stream.py deleted file mode 100644 index b0412e16..00000000 --- a/edu_sharing_client/models/stream.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Stream(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'enabled': 'bool' - } - - attribute_map = { - 'enabled': 'enabled' - } - - def __init__(self, enabled=False): # noqa: E501 - """Stream - a model defined in Swagger""" # noqa: E501 - self._enabled = None - self.discriminator = None - if enabled is not None: - self.enabled = enabled - - @property - def enabled(self): - """Gets the enabled of this Stream. # noqa: E501 - - - :return: The enabled of this Stream. # noqa: E501 - :rtype: bool - """ - return self._enabled - - @enabled.setter - def enabled(self, enabled): - """Sets the enabled of this Stream. - - - :param enabled: The enabled of this Stream. # noqa: E501 - :type: bool - """ - - self._enabled = enabled - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Stream, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Stream): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/stream_entry.py b/edu_sharing_client/models/stream_entry.py deleted file mode 100644 index e1ae1f9a..00000000 --- a/edu_sharing_client/models/stream_entry.py +++ /dev/null @@ -1,293 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StreamEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'description': 'str', - 'nodes': 'list[Node]', - 'properties': 'dict(str, object)', - 'priority': 'int', - 'author': 'UserSimple', - 'created': 'int', - 'modified': 'int' - } - - attribute_map = { - 'id': 'id', - 'description': 'description', - 'nodes': 'nodes', - 'properties': 'properties', - 'priority': 'priority', - 'author': 'author', - 'created': 'created', - 'modified': 'modified' - } - - def __init__(self, id=None, description=None, nodes=None, properties=None, priority=None, author=None, created=None, modified=None): # noqa: E501 - """StreamEntry - a model defined in Swagger""" # noqa: E501 - self._id = None - self._description = None - self._nodes = None - self._properties = None - self._priority = None - self._author = None - self._created = None - self._modified = None - self.discriminator = None - if id is not None: - self.id = id - if description is not None: - self.description = description - if nodes is not None: - self.nodes = nodes - if properties is not None: - self.properties = properties - if priority is not None: - self.priority = priority - if author is not None: - self.author = author - if created is not None: - self.created = created - if modified is not None: - self.modified = modified - - @property - def id(self): - """Gets the id of this StreamEntry. # noqa: E501 - - - :return: The id of this StreamEntry. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this StreamEntry. - - - :param id: The id of this StreamEntry. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def description(self): - """Gets the description of this StreamEntry. # noqa: E501 - - - :return: The description of this StreamEntry. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this StreamEntry. - - - :param description: The description of this StreamEntry. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def nodes(self): - """Gets the nodes of this StreamEntry. # noqa: E501 - - - :return: The nodes of this StreamEntry. # noqa: E501 - :rtype: list[Node] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this StreamEntry. - - - :param nodes: The nodes of this StreamEntry. # noqa: E501 - :type: list[Node] - """ - - self._nodes = nodes - - @property - def properties(self): - """Gets the properties of this StreamEntry. # noqa: E501 - - - :return: The properties of this StreamEntry. # noqa: E501 - :rtype: dict(str, object) - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this StreamEntry. - - - :param properties: The properties of this StreamEntry. # noqa: E501 - :type: dict(str, object) - """ - - self._properties = properties - - @property - def priority(self): - """Gets the priority of this StreamEntry. # noqa: E501 - - - :return: The priority of this StreamEntry. # noqa: E501 - :rtype: int - """ - return self._priority - - @priority.setter - def priority(self, priority): - """Sets the priority of this StreamEntry. - - - :param priority: The priority of this StreamEntry. # noqa: E501 - :type: int - """ - - self._priority = priority - - @property - def author(self): - """Gets the author of this StreamEntry. # noqa: E501 - - - :return: The author of this StreamEntry. # noqa: E501 - :rtype: UserSimple - """ - return self._author - - @author.setter - def author(self, author): - """Sets the author of this StreamEntry. - - - :param author: The author of this StreamEntry. # noqa: E501 - :type: UserSimple - """ - - self._author = author - - @property - def created(self): - """Gets the created of this StreamEntry. # noqa: E501 - - - :return: The created of this StreamEntry. # noqa: E501 - :rtype: int - """ - return self._created - - @created.setter - def created(self, created): - """Sets the created of this StreamEntry. - - - :param created: The created of this StreamEntry. # noqa: E501 - :type: int - """ - - self._created = created - - @property - def modified(self): - """Gets the modified of this StreamEntry. # noqa: E501 - - - :return: The modified of this StreamEntry. # noqa: E501 - :rtype: int - """ - return self._modified - - @modified.setter - def modified(self, modified): - """Sets the modified of this StreamEntry. - - - :param modified: The modified of this StreamEntry. # noqa: E501 - :type: int - """ - - self._modified = modified - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StreamEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StreamEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/stream_entry_input.py b/edu_sharing_client/models/stream_entry_input.py deleted file mode 100644 index 00a7b13f..00000000 --- a/edu_sharing_client/models/stream_entry_input.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StreamEntryInput(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'title': 'str', - 'description': 'str', - 'nodes': 'list[str]', - 'properties': 'dict(str, object)', - 'priority': 'int' - } - - attribute_map = { - 'id': 'id', - 'title': 'title', - 'description': 'description', - 'nodes': 'nodes', - 'properties': 'properties', - 'priority': 'priority' - } - - def __init__(self, id=None, title=None, description=None, nodes=None, properties=None, priority=None): # noqa: E501 - """StreamEntryInput - a model defined in Swagger""" # noqa: E501 - self._id = None - self._title = None - self._description = None - self._nodes = None - self._properties = None - self._priority = None - self.discriminator = None - if id is not None: - self.id = id - if title is not None: - self.title = title - if description is not None: - self.description = description - if nodes is not None: - self.nodes = nodes - if properties is not None: - self.properties = properties - if priority is not None: - self.priority = priority - - @property - def id(self): - """Gets the id of this StreamEntryInput. # noqa: E501 - - - :return: The id of this StreamEntryInput. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this StreamEntryInput. - - - :param id: The id of this StreamEntryInput. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def title(self): - """Gets the title of this StreamEntryInput. # noqa: E501 - - - :return: The title of this StreamEntryInput. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this StreamEntryInput. - - - :param title: The title of this StreamEntryInput. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def description(self): - """Gets the description of this StreamEntryInput. # noqa: E501 - - - :return: The description of this StreamEntryInput. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this StreamEntryInput. - - - :param description: The description of this StreamEntryInput. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def nodes(self): - """Gets the nodes of this StreamEntryInput. # noqa: E501 - - - :return: The nodes of this StreamEntryInput. # noqa: E501 - :rtype: list[str] - """ - return self._nodes - - @nodes.setter - def nodes(self, nodes): - """Sets the nodes of this StreamEntryInput. - - - :param nodes: The nodes of this StreamEntryInput. # noqa: E501 - :type: list[str] - """ - - self._nodes = nodes - - @property - def properties(self): - """Gets the properties of this StreamEntryInput. # noqa: E501 - - - :return: The properties of this StreamEntryInput. # noqa: E501 - :rtype: dict(str, object) - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this StreamEntryInput. - - - :param properties: The properties of this StreamEntryInput. # noqa: E501 - :type: dict(str, object) - """ - - self._properties = properties - - @property - def priority(self): - """Gets the priority of this StreamEntryInput. # noqa: E501 - - - :return: The priority of this StreamEntryInput. # noqa: E501 - :rtype: int - """ - return self._priority - - @priority.setter - def priority(self, priority): - """Sets the priority of this StreamEntryInput. - - - :param priority: The priority of this StreamEntryInput. # noqa: E501 - :type: int - """ - - self._priority = priority - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StreamEntryInput, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StreamEntryInput): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/stream_list.py b/edu_sharing_client/models/stream_list.py deleted file mode 100644 index ce3cc47b..00000000 --- a/edu_sharing_client/models/stream_list.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class StreamList(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'stream': 'list[StreamEntry]', - 'pagination': 'Pagination' - } - - attribute_map = { - 'stream': 'stream', - 'pagination': 'pagination' - } - - def __init__(self, stream=None, pagination=None): # noqa: E501 - """StreamList - a model defined in Swagger""" # noqa: E501 - self._stream = None - self._pagination = None - self.discriminator = None - if stream is not None: - self.stream = stream - if pagination is not None: - self.pagination = pagination - - @property - def stream(self): - """Gets the stream of this StreamList. # noqa: E501 - - - :return: The stream of this StreamList. # noqa: E501 - :rtype: list[StreamEntry] - """ - return self._stream - - @stream.setter - def stream(self, stream): - """Sets the stream of this StreamList. - - - :param stream: The stream of this StreamList. # noqa: E501 - :type: list[StreamEntry] - """ - - self._stream = stream - - @property - def pagination(self): - """Gets the pagination of this StreamList. # noqa: E501 - - - :return: The pagination of this StreamList. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this StreamList. - - - :param pagination: The pagination of this StreamList. # noqa: E501 - :type: Pagination - """ - - self._pagination = pagination - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(StreamList, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, StreamList): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/sub_group_item.py b/edu_sharing_client/models/sub_group_item.py deleted file mode 100644 index 53131c2c..00000000 --- a/edu_sharing_client/models/sub_group_item.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SubGroupItem(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'key': 'str', - 'display_name': 'str', - 'count': 'int' - } - - attribute_map = { - 'key': 'key', - 'display_name': 'displayName', - 'count': 'count' - } - - def __init__(self, key=None, display_name=None, count=None): # noqa: E501 - """SubGroupItem - a model defined in Swagger""" # noqa: E501 - self._key = None - self._display_name = None - self._count = None - self.discriminator = None - if key is not None: - self.key = key - if display_name is not None: - self.display_name = display_name - if count is not None: - self.count = count - - @property - def key(self): - """Gets the key of this SubGroupItem. # noqa: E501 - - - :return: The key of this SubGroupItem. # noqa: E501 - :rtype: str - """ - return self._key - - @key.setter - def key(self, key): - """Sets the key of this SubGroupItem. - - - :param key: The key of this SubGroupItem. # noqa: E501 - :type: str - """ - - self._key = key - - @property - def display_name(self): - """Gets the display_name of this SubGroupItem. # noqa: E501 - - - :return: The display_name of this SubGroupItem. # noqa: E501 - :rtype: str - """ - return self._display_name - - @display_name.setter - def display_name(self, display_name): - """Sets the display_name of this SubGroupItem. - - - :param display_name: The display_name of this SubGroupItem. # noqa: E501 - :type: str - """ - - self._display_name = display_name - - @property - def count(self): - """Gets the count of this SubGroupItem. # noqa: E501 - - - :return: The count of this SubGroupItem. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this SubGroupItem. - - - :param count: The count of this SubGroupItem. # noqa: E501 - :type: int - """ - - self._count = count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SubGroupItem, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SubGroupItem): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/subwidget.py b/edu_sharing_client/models/subwidget.py deleted file mode 100644 index 0233b238..00000000 --- a/edu_sharing_client/models/subwidget.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Subwidget(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str' - } - - attribute_map = { - 'id': 'id' - } - - def __init__(self, id=None): # noqa: E501 - """Subwidget - a model defined in Swagger""" # noqa: E501 - self._id = None - self.discriminator = None - if id is not None: - self.id = id - - @property - def id(self): - """Gets the id of this Subwidget. # noqa: E501 - - - :return: The id of this Subwidget. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Subwidget. - - - :param id: The id of this Subwidget. # noqa: E501 - :type: str - """ - - self._id = id - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Subwidget, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Subwidget): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/suggestion_param.py b/edu_sharing_client/models/suggestion_param.py deleted file mode 100644 index bb6ef495..00000000 --- a/edu_sharing_client/models/suggestion_param.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class SuggestionParam(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'value_parameters': 'ValueParameters', - 'criterias': 'list[MdsQueryCriteria]' - } - - attribute_map = { - 'value_parameters': 'valueParameters', - 'criterias': 'criterias' - } - - def __init__(self, value_parameters=None, criterias=None): # noqa: E501 - """SuggestionParam - a model defined in Swagger""" # noqa: E501 - self._value_parameters = None - self._criterias = None - self.discriminator = None - if value_parameters is not None: - self.value_parameters = value_parameters - if criterias is not None: - self.criterias = criterias - - @property - def value_parameters(self): - """Gets the value_parameters of this SuggestionParam. # noqa: E501 - - - :return: The value_parameters of this SuggestionParam. # noqa: E501 - :rtype: ValueParameters - """ - return self._value_parameters - - @value_parameters.setter - def value_parameters(self, value_parameters): - """Sets the value_parameters of this SuggestionParam. - - - :param value_parameters: The value_parameters of this SuggestionParam. # noqa: E501 - :type: ValueParameters - """ - - self._value_parameters = value_parameters - - @property - def criterias(self): - """Gets the criterias of this SuggestionParam. # noqa: E501 - - - :return: The criterias of this SuggestionParam. # noqa: E501 - :rtype: list[MdsQueryCriteria] - """ - return self._criterias - - @criterias.setter - def criterias(self, criterias): - """Sets the criterias of this SuggestionParam. - - - :param criterias: The criterias of this SuggestionParam. # noqa: E501 - :type: list[MdsQueryCriteria] - """ - - self._criterias = criterias - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(SuggestionParam, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, SuggestionParam): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/tracking.py b/edu_sharing_client/models/tracking.py deleted file mode 100644 index 8b0c8425..00000000 --- a/edu_sharing_client/models/tracking.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Tracking(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'counts': 'dict(str, int)', - '_date': 'str', - 'authority': 'Authority', - 'fields': 'dict(str, Serializable)', - 'groups': 'dict(str, dict(str, dict(str, int)))' - } - - attribute_map = { - 'counts': 'counts', - '_date': 'date', - 'authority': 'authority', - 'fields': 'fields', - 'groups': 'groups' - } - - def __init__(self, counts=None, _date=None, authority=None, fields=None, groups=None): # noqa: E501 - """Tracking - a model defined in Swagger""" # noqa: E501 - self._counts = None - self.__date = None - self._authority = None - self._fields = None - self._groups = None - self.discriminator = None - if counts is not None: - self.counts = counts - if _date is not None: - self._date = _date - if authority is not None: - self.authority = authority - if fields is not None: - self.fields = fields - if groups is not None: - self.groups = groups - - @property - def counts(self): - """Gets the counts of this Tracking. # noqa: E501 - - - :return: The counts of this Tracking. # noqa: E501 - :rtype: dict(str, int) - """ - return self._counts - - @counts.setter - def counts(self, counts): - """Sets the counts of this Tracking. - - - :param counts: The counts of this Tracking. # noqa: E501 - :type: dict(str, int) - """ - - self._counts = counts - - @property - def _date(self): - """Gets the _date of this Tracking. # noqa: E501 - - - :return: The _date of this Tracking. # noqa: E501 - :rtype: str - """ - return self.__date - - @_date.setter - def _date(self, _date): - """Sets the _date of this Tracking. - - - :param _date: The _date of this Tracking. # noqa: E501 - :type: str - """ - - self.__date = _date - - @property - def authority(self): - """Gets the authority of this Tracking. # noqa: E501 - - - :return: The authority of this Tracking. # noqa: E501 - :rtype: Authority - """ - return self._authority - - @authority.setter - def authority(self, authority): - """Sets the authority of this Tracking. - - - :param authority: The authority of this Tracking. # noqa: E501 - :type: Authority - """ - - self._authority = authority - - @property - def fields(self): - """Gets the fields of this Tracking. # noqa: E501 - - - :return: The fields of this Tracking. # noqa: E501 - :rtype: dict(str, Serializable) - """ - return self._fields - - @fields.setter - def fields(self, fields): - """Sets the fields of this Tracking. - - - :param fields: The fields of this Tracking. # noqa: E501 - :type: dict(str, Serializable) - """ - - self._fields = fields - - @property - def groups(self): - """Gets the groups of this Tracking. # noqa: E501 - - - :return: The groups of this Tracking. # noqa: E501 - :rtype: dict(str, dict(str, dict(str, int))) - """ - return self._groups - - @groups.setter - def groups(self, groups): - """Sets the groups of this Tracking. - - - :param groups: The groups of this Tracking. # noqa: E501 - :type: dict(str, dict(str, dict(str, int))) - """ - - self._groups = groups - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Tracking, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Tracking): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/tracking_node.py b/edu_sharing_client/models/tracking_node.py deleted file mode 100644 index 1b1caadf..00000000 --- a/edu_sharing_client/models/tracking_node.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class TrackingNode(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'counts': 'dict(str, int)', - '_date': 'str', - 'authority': 'Authority', - 'fields': 'dict(str, Serializable)', - 'groups': 'dict(str, dict(str, dict(str, int)))', - 'node': 'Node' - } - - attribute_map = { - 'counts': 'counts', - '_date': 'date', - 'authority': 'authority', - 'fields': 'fields', - 'groups': 'groups', - 'node': 'node' - } - - def __init__(self, counts=None, _date=None, authority=None, fields=None, groups=None, node=None): # noqa: E501 - """TrackingNode - a model defined in Swagger""" # noqa: E501 - self._counts = None - self.__date = None - self._authority = None - self._fields = None - self._groups = None - self._node = None - self.discriminator = None - if counts is not None: - self.counts = counts - if _date is not None: - self._date = _date - if authority is not None: - self.authority = authority - if fields is not None: - self.fields = fields - if groups is not None: - self.groups = groups - if node is not None: - self.node = node - - @property - def counts(self): - """Gets the counts of this TrackingNode. # noqa: E501 - - - :return: The counts of this TrackingNode. # noqa: E501 - :rtype: dict(str, int) - """ - return self._counts - - @counts.setter - def counts(self, counts): - """Sets the counts of this TrackingNode. - - - :param counts: The counts of this TrackingNode. # noqa: E501 - :type: dict(str, int) - """ - - self._counts = counts - - @property - def _date(self): - """Gets the _date of this TrackingNode. # noqa: E501 - - - :return: The _date of this TrackingNode. # noqa: E501 - :rtype: str - """ - return self.__date - - @_date.setter - def _date(self, _date): - """Sets the _date of this TrackingNode. - - - :param _date: The _date of this TrackingNode. # noqa: E501 - :type: str - """ - - self.__date = _date - - @property - def authority(self): - """Gets the authority of this TrackingNode. # noqa: E501 - - - :return: The authority of this TrackingNode. # noqa: E501 - :rtype: Authority - """ - return self._authority - - @authority.setter - def authority(self, authority): - """Sets the authority of this TrackingNode. - - - :param authority: The authority of this TrackingNode. # noqa: E501 - :type: Authority - """ - - self._authority = authority - - @property - def fields(self): - """Gets the fields of this TrackingNode. # noqa: E501 - - - :return: The fields of this TrackingNode. # noqa: E501 - :rtype: dict(str, Serializable) - """ - return self._fields - - @fields.setter - def fields(self, fields): - """Sets the fields of this TrackingNode. - - - :param fields: The fields of this TrackingNode. # noqa: E501 - :type: dict(str, Serializable) - """ - - self._fields = fields - - @property - def groups(self): - """Gets the groups of this TrackingNode. # noqa: E501 - - - :return: The groups of this TrackingNode. # noqa: E501 - :rtype: dict(str, dict(str, dict(str, int))) - """ - return self._groups - - @groups.setter - def groups(self, groups): - """Sets the groups of this TrackingNode. - - - :param groups: The groups of this TrackingNode. # noqa: E501 - :type: dict(str, dict(str, dict(str, int))) - """ - - self._groups = groups - - @property - def node(self): - """Gets the node of this TrackingNode. # noqa: E501 - - - :return: The node of this TrackingNode. # noqa: E501 - :rtype: Node - """ - return self._node - - @node.setter - def node(self, node): - """Sets the node of this TrackingNode. - - - :param node: The node of this TrackingNode. # noqa: E501 - :type: Node - """ - - self._node = node - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(TrackingNode, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, TrackingNode): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/upload_result.py b/edu_sharing_client/models/upload_result.py deleted file mode 100644 index 0237d137..00000000 --- a/edu_sharing_client/models/upload_result.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UploadResult(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'file': 'str' - } - - attribute_map = { - 'file': 'file' - } - - def __init__(self, file=None): # noqa: E501 - """UploadResult - a model defined in Swagger""" # noqa: E501 - self._file = None - self.discriminator = None - if file is not None: - self.file = file - - @property - def file(self): - """Gets the file of this UploadResult. # noqa: E501 - - - :return: The file of this UploadResult. # noqa: E501 - :rtype: str - """ - return self._file - - @file.setter - def file(self, file): - """Sets the file of this UploadResult. - - - :param file: The file of this UploadResult. # noqa: E501 - :type: str - """ - - self._file = file - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UploadResult, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UploadResult): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/usage.py b/edu_sharing_client/models/usage.py deleted file mode 100644 index 8263fd8c..00000000 --- a/edu_sharing_client/models/usage.py +++ /dev/null @@ -1,613 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Usage(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'from_used': 'datetime', - 'to_used': 'datetime', - 'usage_counter': 'int', - 'app_subtype': 'str', - 'app_type': 'str', - 'type': 'str', - 'created': 'datetime', - 'modified': 'datetime', - 'app_user': 'str', - 'app_user_mail': 'str', - 'course_id': 'str', - 'distinct_persons': 'int', - 'app_id': 'str', - 'node_id': 'str', - 'parent_node_id': 'str', - 'usage_version': 'str', - 'usage_xml_params': 'Parameters', - 'usage_xml_params_raw': 'str', - 'resource_id': 'str', - 'guid': 'str' - } - - attribute_map = { - 'from_used': 'fromUsed', - 'to_used': 'toUsed', - 'usage_counter': 'usageCounter', - 'app_subtype': 'appSubtype', - 'app_type': 'appType', - 'type': 'type', - 'created': 'created', - 'modified': 'modified', - 'app_user': 'appUser', - 'app_user_mail': 'appUserMail', - 'course_id': 'courseId', - 'distinct_persons': 'distinctPersons', - 'app_id': 'appId', - 'node_id': 'nodeId', - 'parent_node_id': 'parentNodeId', - 'usage_version': 'usageVersion', - 'usage_xml_params': 'usageXmlParams', - 'usage_xml_params_raw': 'usageXmlParamsRaw', - 'resource_id': 'resourceId', - 'guid': 'guid' - } - - def __init__(self, from_used=None, to_used=None, usage_counter=None, app_subtype=None, app_type=None, type=None, created=None, modified=None, app_user=None, app_user_mail=None, course_id=None, distinct_persons=None, app_id=None, node_id=None, parent_node_id=None, usage_version=None, usage_xml_params=None, usage_xml_params_raw=None, resource_id=None, guid=None): # noqa: E501 - """Usage - a model defined in Swagger""" # noqa: E501 - self._from_used = None - self._to_used = None - self._usage_counter = None - self._app_subtype = None - self._app_type = None - self._type = None - self._created = None - self._modified = None - self._app_user = None - self._app_user_mail = None - self._course_id = None - self._distinct_persons = None - self._app_id = None - self._node_id = None - self._parent_node_id = None - self._usage_version = None - self._usage_xml_params = None - self._usage_xml_params_raw = None - self._resource_id = None - self._guid = None - self.discriminator = None - if from_used is not None: - self.from_used = from_used - if to_used is not None: - self.to_used = to_used - if usage_counter is not None: - self.usage_counter = usage_counter - if app_subtype is not None: - self.app_subtype = app_subtype - if app_type is not None: - self.app_type = app_type - if type is not None: - self.type = type - if created is not None: - self.created = created - if modified is not None: - self.modified = modified - self.app_user = app_user - self.app_user_mail = app_user_mail - self.course_id = course_id - if distinct_persons is not None: - self.distinct_persons = distinct_persons - self.app_id = app_id - self.node_id = node_id - self.parent_node_id = parent_node_id - self.usage_version = usage_version - if usage_xml_params is not None: - self.usage_xml_params = usage_xml_params - if usage_xml_params_raw is not None: - self.usage_xml_params_raw = usage_xml_params_raw - self.resource_id = resource_id - if guid is not None: - self.guid = guid - - @property - def from_used(self): - """Gets the from_used of this Usage. # noqa: E501 - - - :return: The from_used of this Usage. # noqa: E501 - :rtype: datetime - """ - return self._from_used - - @from_used.setter - def from_used(self, from_used): - """Sets the from_used of this Usage. - - - :param from_used: The from_used of this Usage. # noqa: E501 - :type: datetime - """ - - self._from_used = from_used - - @property - def to_used(self): - """Gets the to_used of this Usage. # noqa: E501 - - - :return: The to_used of this Usage. # noqa: E501 - :rtype: datetime - """ - return self._to_used - - @to_used.setter - def to_used(self, to_used): - """Sets the to_used of this Usage. - - - :param to_used: The to_used of this Usage. # noqa: E501 - :type: datetime - """ - - self._to_used = to_used - - @property - def usage_counter(self): - """Gets the usage_counter of this Usage. # noqa: E501 - - - :return: The usage_counter of this Usage. # noqa: E501 - :rtype: int - """ - return self._usage_counter - - @usage_counter.setter - def usage_counter(self, usage_counter): - """Sets the usage_counter of this Usage. - - - :param usage_counter: The usage_counter of this Usage. # noqa: E501 - :type: int - """ - - self._usage_counter = usage_counter - - @property - def app_subtype(self): - """Gets the app_subtype of this Usage. # noqa: E501 - - - :return: The app_subtype of this Usage. # noqa: E501 - :rtype: str - """ - return self._app_subtype - - @app_subtype.setter - def app_subtype(self, app_subtype): - """Sets the app_subtype of this Usage. - - - :param app_subtype: The app_subtype of this Usage. # noqa: E501 - :type: str - """ - - self._app_subtype = app_subtype - - @property - def app_type(self): - """Gets the app_type of this Usage. # noqa: E501 - - - :return: The app_type of this Usage. # noqa: E501 - :rtype: str - """ - return self._app_type - - @app_type.setter - def app_type(self, app_type): - """Sets the app_type of this Usage. - - - :param app_type: The app_type of this Usage. # noqa: E501 - :type: str - """ - - self._app_type = app_type - - @property - def type(self): - """Gets the type of this Usage. # noqa: E501 - - - :return: The type of this Usage. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this Usage. - - - :param type: The type of this Usage. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def created(self): - """Gets the created of this Usage. # noqa: E501 - - - :return: The created of this Usage. # noqa: E501 - :rtype: datetime - """ - return self._created - - @created.setter - def created(self, created): - """Sets the created of this Usage. - - - :param created: The created of this Usage. # noqa: E501 - :type: datetime - """ - - self._created = created - - @property - def modified(self): - """Gets the modified of this Usage. # noqa: E501 - - - :return: The modified of this Usage. # noqa: E501 - :rtype: datetime - """ - return self._modified - - @modified.setter - def modified(self, modified): - """Sets the modified of this Usage. - - - :param modified: The modified of this Usage. # noqa: E501 - :type: datetime - """ - - self._modified = modified - - @property - def app_user(self): - """Gets the app_user of this Usage. # noqa: E501 - - - :return: The app_user of this Usage. # noqa: E501 - :rtype: str - """ - return self._app_user - - @app_user.setter - def app_user(self, app_user): - """Sets the app_user of this Usage. - - - :param app_user: The app_user of this Usage. # noqa: E501 - :type: str - """ - if app_user is None: - raise ValueError("Invalid value for `app_user`, must not be `None`") # noqa: E501 - - self._app_user = app_user - - @property - def app_user_mail(self): - """Gets the app_user_mail of this Usage. # noqa: E501 - - - :return: The app_user_mail of this Usage. # noqa: E501 - :rtype: str - """ - return self._app_user_mail - - @app_user_mail.setter - def app_user_mail(self, app_user_mail): - """Sets the app_user_mail of this Usage. - - - :param app_user_mail: The app_user_mail of this Usage. # noqa: E501 - :type: str - """ - if app_user_mail is None: - raise ValueError("Invalid value for `app_user_mail`, must not be `None`") # noqa: E501 - - self._app_user_mail = app_user_mail - - @property - def course_id(self): - """Gets the course_id of this Usage. # noqa: E501 - - - :return: The course_id of this Usage. # noqa: E501 - :rtype: str - """ - return self._course_id - - @course_id.setter - def course_id(self, course_id): - """Sets the course_id of this Usage. - - - :param course_id: The course_id of this Usage. # noqa: E501 - :type: str - """ - if course_id is None: - raise ValueError("Invalid value for `course_id`, must not be `None`") # noqa: E501 - - self._course_id = course_id - - @property - def distinct_persons(self): - """Gets the distinct_persons of this Usage. # noqa: E501 - - - :return: The distinct_persons of this Usage. # noqa: E501 - :rtype: int - """ - return self._distinct_persons - - @distinct_persons.setter - def distinct_persons(self, distinct_persons): - """Sets the distinct_persons of this Usage. - - - :param distinct_persons: The distinct_persons of this Usage. # noqa: E501 - :type: int - """ - - self._distinct_persons = distinct_persons - - @property - def app_id(self): - """Gets the app_id of this Usage. # noqa: E501 - - - :return: The app_id of this Usage. # noqa: E501 - :rtype: str - """ - return self._app_id - - @app_id.setter - def app_id(self, app_id): - """Sets the app_id of this Usage. - - - :param app_id: The app_id of this Usage. # noqa: E501 - :type: str - """ - if app_id is None: - raise ValueError("Invalid value for `app_id`, must not be `None`") # noqa: E501 - - self._app_id = app_id - - @property - def node_id(self): - """Gets the node_id of this Usage. # noqa: E501 - - - :return: The node_id of this Usage. # noqa: E501 - :rtype: str - """ - return self._node_id - - @node_id.setter - def node_id(self, node_id): - """Sets the node_id of this Usage. - - - :param node_id: The node_id of this Usage. # noqa: E501 - :type: str - """ - if node_id is None: - raise ValueError("Invalid value for `node_id`, must not be `None`") # noqa: E501 - - self._node_id = node_id - - @property - def parent_node_id(self): - """Gets the parent_node_id of this Usage. # noqa: E501 - - - :return: The parent_node_id of this Usage. # noqa: E501 - :rtype: str - """ - return self._parent_node_id - - @parent_node_id.setter - def parent_node_id(self, parent_node_id): - """Sets the parent_node_id of this Usage. - - - :param parent_node_id: The parent_node_id of this Usage. # noqa: E501 - :type: str - """ - if parent_node_id is None: - raise ValueError("Invalid value for `parent_node_id`, must not be `None`") # noqa: E501 - - self._parent_node_id = parent_node_id - - @property - def usage_version(self): - """Gets the usage_version of this Usage. # noqa: E501 - - - :return: The usage_version of this Usage. # noqa: E501 - :rtype: str - """ - return self._usage_version - - @usage_version.setter - def usage_version(self, usage_version): - """Sets the usage_version of this Usage. - - - :param usage_version: The usage_version of this Usage. # noqa: E501 - :type: str - """ - if usage_version is None: - raise ValueError("Invalid value for `usage_version`, must not be `None`") # noqa: E501 - - self._usage_version = usage_version - - @property - def usage_xml_params(self): - """Gets the usage_xml_params of this Usage. # noqa: E501 - - - :return: The usage_xml_params of this Usage. # noqa: E501 - :rtype: Parameters - """ - return self._usage_xml_params - - @usage_xml_params.setter - def usage_xml_params(self, usage_xml_params): - """Sets the usage_xml_params of this Usage. - - - :param usage_xml_params: The usage_xml_params of this Usage. # noqa: E501 - :type: Parameters - """ - - self._usage_xml_params = usage_xml_params - - @property - def usage_xml_params_raw(self): - """Gets the usage_xml_params_raw of this Usage. # noqa: E501 - - - :return: The usage_xml_params_raw of this Usage. # noqa: E501 - :rtype: str - """ - return self._usage_xml_params_raw - - @usage_xml_params_raw.setter - def usage_xml_params_raw(self, usage_xml_params_raw): - """Sets the usage_xml_params_raw of this Usage. - - - :param usage_xml_params_raw: The usage_xml_params_raw of this Usage. # noqa: E501 - :type: str - """ - - self._usage_xml_params_raw = usage_xml_params_raw - - @property - def resource_id(self): - """Gets the resource_id of this Usage. # noqa: E501 - - - :return: The resource_id of this Usage. # noqa: E501 - :rtype: str - """ - return self._resource_id - - @resource_id.setter - def resource_id(self, resource_id): - """Sets the resource_id of this Usage. - - - :param resource_id: The resource_id of this Usage. # noqa: E501 - :type: str - """ - if resource_id is None: - raise ValueError("Invalid value for `resource_id`, must not be `None`") # noqa: E501 - - self._resource_id = resource_id - - @property - def guid(self): - """Gets the guid of this Usage. # noqa: E501 - - - :return: The guid of this Usage. # noqa: E501 - :rtype: str - """ - return self._guid - - @guid.setter - def guid(self, guid): - """Sets the guid of this Usage. - - - :param guid: The guid of this Usage. # noqa: E501 - :type: str - """ - - self._guid = guid - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Usage, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Usage): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/usages.py b/edu_sharing_client/models/usages.py deleted file mode 100644 index 36e0bf5e..00000000 --- a/edu_sharing_client/models/usages.py +++ /dev/null @@ -1,111 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Usages(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'usages': 'list[Usage]' - } - - attribute_map = { - 'usages': 'usages' - } - - def __init__(self, usages=None): # noqa: E501 - """Usages - a model defined in Swagger""" # noqa: E501 - self._usages = None - self.discriminator = None - if usages is not None: - self.usages = usages - - @property - def usages(self): - """Gets the usages of this Usages. # noqa: E501 - - - :return: The usages of this Usages. # noqa: E501 - :rtype: list[Usage] - """ - return self._usages - - @usages.setter - def usages(self, usages): - """Sets the usages of this Usages. - - - :param usages: The usages of this Usages. # noqa: E501 - :type: list[Usage] - """ - - self._usages = usages - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Usages, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Usages): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user.py b/edu_sharing_client/models/user.py deleted file mode 100644 index 0ca27366..00000000 --- a/edu_sharing_client/models/user.py +++ /dev/null @@ -1,353 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class User(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'status': 'UserStatus', - 'quota': 'UserQuota', - 'properties': 'dict(str, list[str])', - 'authority_name': 'str', - 'authority_type': 'str', - 'user_name': 'str', - 'profile': 'UserProfile', - 'stats': 'UserStats', - 'home_folder': 'NodeRef', - 'shared_folders': 'list[NodeRef]' - } - - attribute_map = { - 'status': 'status', - 'quota': 'quota', - 'properties': 'properties', - 'authority_name': 'authorityName', - 'authority_type': 'authorityType', - 'user_name': 'userName', - 'profile': 'profile', - 'stats': 'stats', - 'home_folder': 'homeFolder', - 'shared_folders': 'sharedFolders' - } - - def __init__(self, status=None, quota=None, properties=None, authority_name=None, authority_type=None, user_name=None, profile=None, stats=None, home_folder=None, shared_folders=None): # noqa: E501 - """User - a model defined in Swagger""" # noqa: E501 - self._status = None - self._quota = None - self._properties = None - self._authority_name = None - self._authority_type = None - self._user_name = None - self._profile = None - self._stats = None - self._home_folder = None - self._shared_folders = None - self.discriminator = None - if status is not None: - self.status = status - if quota is not None: - self.quota = quota - if properties is not None: - self.properties = properties - self.authority_name = authority_name - if authority_type is not None: - self.authority_type = authority_type - if user_name is not None: - self.user_name = user_name - if profile is not None: - self.profile = profile - if stats is not None: - self.stats = stats - self.home_folder = home_folder - if shared_folders is not None: - self.shared_folders = shared_folders - - @property - def status(self): - """Gets the status of this User. # noqa: E501 - - - :return: The status of this User. # noqa: E501 - :rtype: UserStatus - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this User. - - - :param status: The status of this User. # noqa: E501 - :type: UserStatus - """ - - self._status = status - - @property - def quota(self): - """Gets the quota of this User. # noqa: E501 - - - :return: The quota of this User. # noqa: E501 - :rtype: UserQuota - """ - return self._quota - - @quota.setter - def quota(self, quota): - """Sets the quota of this User. - - - :param quota: The quota of this User. # noqa: E501 - :type: UserQuota - """ - - self._quota = quota - - @property - def properties(self): - """Gets the properties of this User. # noqa: E501 - - - :return: The properties of this User. # noqa: E501 - :rtype: dict(str, list[str]) - """ - return self._properties - - @properties.setter - def properties(self, properties): - """Sets the properties of this User. - - - :param properties: The properties of this User. # noqa: E501 - :type: dict(str, list[str]) - """ - - self._properties = properties - - @property - def authority_name(self): - """Gets the authority_name of this User. # noqa: E501 - - - :return: The authority_name of this User. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this User. - - - :param authority_name: The authority_name of this User. # noqa: E501 - :type: str - """ - if authority_name is None: - raise ValueError("Invalid value for `authority_name`, must not be `None`") # noqa: E501 - - self._authority_name = authority_name - - @property - def authority_type(self): - """Gets the authority_type of this User. # noqa: E501 - - - :return: The authority_type of this User. # noqa: E501 - :rtype: str - """ - return self._authority_type - - @authority_type.setter - def authority_type(self, authority_type): - """Sets the authority_type of this User. - - - :param authority_type: The authority_type of this User. # noqa: E501 - :type: str - """ - allowed_values = ["USER", "GROUP", "OWNER", "EVERYONE", "GUEST"] # noqa: E501 - if authority_type not in allowed_values: - raise ValueError( - "Invalid value for `authority_type` ({0}), must be one of {1}" # noqa: E501 - .format(authority_type, allowed_values) - ) - - self._authority_type = authority_type - - @property - def user_name(self): - """Gets the user_name of this User. # noqa: E501 - - - :return: The user_name of this User. # noqa: E501 - :rtype: str - """ - return self._user_name - - @user_name.setter - def user_name(self, user_name): - """Sets the user_name of this User. - - - :param user_name: The user_name of this User. # noqa: E501 - :type: str - """ - - self._user_name = user_name - - @property - def profile(self): - """Gets the profile of this User. # noqa: E501 - - - :return: The profile of this User. # noqa: E501 - :rtype: UserProfile - """ - return self._profile - - @profile.setter - def profile(self, profile): - """Sets the profile of this User. - - - :param profile: The profile of this User. # noqa: E501 - :type: UserProfile - """ - - self._profile = profile - - @property - def stats(self): - """Gets the stats of this User. # noqa: E501 - - - :return: The stats of this User. # noqa: E501 - :rtype: UserStats - """ - return self._stats - - @stats.setter - def stats(self, stats): - """Sets the stats of this User. - - - :param stats: The stats of this User. # noqa: E501 - :type: UserStats - """ - - self._stats = stats - - @property - def home_folder(self): - """Gets the home_folder of this User. # noqa: E501 - - - :return: The home_folder of this User. # noqa: E501 - :rtype: NodeRef - """ - return self._home_folder - - @home_folder.setter - def home_folder(self, home_folder): - """Sets the home_folder of this User. - - - :param home_folder: The home_folder of this User. # noqa: E501 - :type: NodeRef - """ - if home_folder is None: - raise ValueError("Invalid value for `home_folder`, must not be `None`") # noqa: E501 - - self._home_folder = home_folder - - @property - def shared_folders(self): - """Gets the shared_folders of this User. # noqa: E501 - - - :return: The shared_folders of this User. # noqa: E501 - :rtype: list[NodeRef] - """ - return self._shared_folders - - @shared_folders.setter - def shared_folders(self, shared_folders): - """Sets the shared_folders of this User. - - - :param shared_folders: The shared_folders of this User. # noqa: E501 - :type: list[NodeRef] - """ - - self._shared_folders = shared_folders - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(User, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, User): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_credential.py b/edu_sharing_client/models/user_credential.py deleted file mode 100644 index 97331259..00000000 --- a/edu_sharing_client/models/user_credential.py +++ /dev/null @@ -1,138 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserCredential(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'old_password': 'str', - 'new_password': 'str' - } - - attribute_map = { - 'old_password': 'oldPassword', - 'new_password': 'newPassword' - } - - def __init__(self, old_password=None, new_password=None): # noqa: E501 - """UserCredential - a model defined in Swagger""" # noqa: E501 - self._old_password = None - self._new_password = None - self.discriminator = None - if old_password is not None: - self.old_password = old_password - self.new_password = new_password - - @property - def old_password(self): - """Gets the old_password of this UserCredential. # noqa: E501 - - - :return: The old_password of this UserCredential. # noqa: E501 - :rtype: str - """ - return self._old_password - - @old_password.setter - def old_password(self, old_password): - """Sets the old_password of this UserCredential. - - - :param old_password: The old_password of this UserCredential. # noqa: E501 - :type: str - """ - - self._old_password = old_password - - @property - def new_password(self): - """Gets the new_password of this UserCredential. # noqa: E501 - - - :return: The new_password of this UserCredential. # noqa: E501 - :rtype: str - """ - return self._new_password - - @new_password.setter - def new_password(self, new_password): - """Sets the new_password of this UserCredential. - - - :param new_password: The new_password of this UserCredential. # noqa: E501 - :type: str - """ - if new_password is None: - raise ValueError("Invalid value for `new_password`, must not be `None`") # noqa: E501 - - self._new_password = new_password - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserCredential, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserCredential): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_entries.py b/edu_sharing_client/models/user_entries.py deleted file mode 100644 index abeb7024..00000000 --- a/edu_sharing_client/models/user_entries.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserEntries(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'users': 'list[UserSimple]', - 'pagination': 'Pagination' - } - - attribute_map = { - 'users': 'users', - 'pagination': 'pagination' - } - - def __init__(self, users=None, pagination=None): # noqa: E501 - """UserEntries - a model defined in Swagger""" # noqa: E501 - self._users = None - self._pagination = None - self.discriminator = None - self.users = users - self.pagination = pagination - - @property - def users(self): - """Gets the users of this UserEntries. # noqa: E501 - - - :return: The users of this UserEntries. # noqa: E501 - :rtype: list[UserSimple] - """ - return self._users - - @users.setter - def users(self, users): - """Sets the users of this UserEntries. - - - :param users: The users of this UserEntries. # noqa: E501 - :type: list[UserSimple] - """ - if users is None: - raise ValueError("Invalid value for `users`, must not be `None`") # noqa: E501 - - self._users = users - - @property - def pagination(self): - """Gets the pagination of this UserEntries. # noqa: E501 - - - :return: The pagination of this UserEntries. # noqa: E501 - :rtype: Pagination - """ - return self._pagination - - @pagination.setter - def pagination(self, pagination): - """Sets the pagination of this UserEntries. - - - :param pagination: The pagination of this UserEntries. # noqa: E501 - :type: Pagination - """ - if pagination is None: - raise ValueError("Invalid value for `pagination`, must not be `None`") # noqa: E501 - - self._pagination = pagination - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserEntries, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserEntries): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_entry.py b/edu_sharing_client/models/user_entry.py deleted file mode 100644 index 653cd3a4..00000000 --- a/edu_sharing_client/models/user_entry.py +++ /dev/null @@ -1,138 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserEntry(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'edit_profile': 'bool', - 'person': 'User' - } - - attribute_map = { - 'edit_profile': 'editProfile', - 'person': 'person' - } - - def __init__(self, edit_profile=False, person=None): # noqa: E501 - """UserEntry - a model defined in Swagger""" # noqa: E501 - self._edit_profile = None - self._person = None - self.discriminator = None - if edit_profile is not None: - self.edit_profile = edit_profile - self.person = person - - @property - def edit_profile(self): - """Gets the edit_profile of this UserEntry. # noqa: E501 - - - :return: The edit_profile of this UserEntry. # noqa: E501 - :rtype: bool - """ - return self._edit_profile - - @edit_profile.setter - def edit_profile(self, edit_profile): - """Sets the edit_profile of this UserEntry. - - - :param edit_profile: The edit_profile of this UserEntry. # noqa: E501 - :type: bool - """ - - self._edit_profile = edit_profile - - @property - def person(self): - """Gets the person of this UserEntry. # noqa: E501 - - - :return: The person of this UserEntry. # noqa: E501 - :rtype: User - """ - return self._person - - @person.setter - def person(self, person): - """Sets the person of this UserEntry. - - - :param person: The person of this UserEntry. # noqa: E501 - :type: User - """ - if person is None: - raise ValueError("Invalid value for `person`, must not be `None`") # noqa: E501 - - self._person = person - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserEntry, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserEntry): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_profile.py b/edu_sharing_client/models/user_profile.py deleted file mode 100644 index 2e686d77..00000000 --- a/edu_sharing_client/models/user_profile.py +++ /dev/null @@ -1,293 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserProfile(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'primary_affiliation': 'str', - 'skills': 'list[str]', - 'types': 'list[str]', - 'first_name': 'str', - 'last_name': 'str', - 'email': 'str', - 'avatar': 'str', - 'about': 'str' - } - - attribute_map = { - 'primary_affiliation': 'primaryAffiliation', - 'skills': 'skills', - 'types': 'types', - 'first_name': 'firstName', - 'last_name': 'lastName', - 'email': 'email', - 'avatar': 'avatar', - 'about': 'about' - } - - def __init__(self, primary_affiliation=None, skills=None, types=None, first_name=None, last_name=None, email=None, avatar=None, about=None): # noqa: E501 - """UserProfile - a model defined in Swagger""" # noqa: E501 - self._primary_affiliation = None - self._skills = None - self._types = None - self._first_name = None - self._last_name = None - self._email = None - self._avatar = None - self._about = None - self.discriminator = None - if primary_affiliation is not None: - self.primary_affiliation = primary_affiliation - if skills is not None: - self.skills = skills - if types is not None: - self.types = types - if first_name is not None: - self.first_name = first_name - if last_name is not None: - self.last_name = last_name - if email is not None: - self.email = email - if avatar is not None: - self.avatar = avatar - if about is not None: - self.about = about - - @property - def primary_affiliation(self): - """Gets the primary_affiliation of this UserProfile. # noqa: E501 - - - :return: The primary_affiliation of this UserProfile. # noqa: E501 - :rtype: str - """ - return self._primary_affiliation - - @primary_affiliation.setter - def primary_affiliation(self, primary_affiliation): - """Sets the primary_affiliation of this UserProfile. - - - :param primary_affiliation: The primary_affiliation of this UserProfile. # noqa: E501 - :type: str - """ - - self._primary_affiliation = primary_affiliation - - @property - def skills(self): - """Gets the skills of this UserProfile. # noqa: E501 - - - :return: The skills of this UserProfile. # noqa: E501 - :rtype: list[str] - """ - return self._skills - - @skills.setter - def skills(self, skills): - """Sets the skills of this UserProfile. - - - :param skills: The skills of this UserProfile. # noqa: E501 - :type: list[str] - """ - - self._skills = skills - - @property - def types(self): - """Gets the types of this UserProfile. # noqa: E501 - - - :return: The types of this UserProfile. # noqa: E501 - :rtype: list[str] - """ - return self._types - - @types.setter - def types(self, types): - """Sets the types of this UserProfile. - - - :param types: The types of this UserProfile. # noqa: E501 - :type: list[str] - """ - - self._types = types - - @property - def first_name(self): - """Gets the first_name of this UserProfile. # noqa: E501 - - - :return: The first_name of this UserProfile. # noqa: E501 - :rtype: str - """ - return self._first_name - - @first_name.setter - def first_name(self, first_name): - """Sets the first_name of this UserProfile. - - - :param first_name: The first_name of this UserProfile. # noqa: E501 - :type: str - """ - - self._first_name = first_name - - @property - def last_name(self): - """Gets the last_name of this UserProfile. # noqa: E501 - - - :return: The last_name of this UserProfile. # noqa: E501 - :rtype: str - """ - return self._last_name - - @last_name.setter - def last_name(self, last_name): - """Sets the last_name of this UserProfile. - - - :param last_name: The last_name of this UserProfile. # noqa: E501 - :type: str - """ - - self._last_name = last_name - - @property - def email(self): - """Gets the email of this UserProfile. # noqa: E501 - - - :return: The email of this UserProfile. # noqa: E501 - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this UserProfile. - - - :param email: The email of this UserProfile. # noqa: E501 - :type: str - """ - - self._email = email - - @property - def avatar(self): - """Gets the avatar of this UserProfile. # noqa: E501 - - - :return: The avatar of this UserProfile. # noqa: E501 - :rtype: str - """ - return self._avatar - - @avatar.setter - def avatar(self, avatar): - """Sets the avatar of this UserProfile. - - - :param avatar: The avatar of this UserProfile. # noqa: E501 - :type: str - """ - - self._avatar = avatar - - @property - def about(self): - """Gets the about of this UserProfile. # noqa: E501 - - - :return: The about of this UserProfile. # noqa: E501 - :rtype: str - """ - return self._about - - @about.setter - def about(self, about): - """Sets the about of this UserProfile. - - - :param about: The about of this UserProfile. # noqa: E501 - :type: str - """ - - self._about = about - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserProfile, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserProfile): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_profile_edit.py b/edu_sharing_client/models/user_profile_edit.py deleted file mode 100644 index f86395dc..00000000 --- a/edu_sharing_client/models/user_profile_edit.py +++ /dev/null @@ -1,319 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserProfileEdit(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'primary_affiliation': 'str', - 'skills': 'list[str]', - 'types': 'list[str]', - 'size_quota': 'int', - 'first_name': 'str', - 'last_name': 'str', - 'email': 'str', - 'avatar': 'str', - 'about': 'str' - } - - attribute_map = { - 'primary_affiliation': 'primaryAffiliation', - 'skills': 'skills', - 'types': 'types', - 'size_quota': 'sizeQuota', - 'first_name': 'firstName', - 'last_name': 'lastName', - 'email': 'email', - 'avatar': 'avatar', - 'about': 'about' - } - - def __init__(self, primary_affiliation=None, skills=None, types=None, size_quota=None, first_name=None, last_name=None, email=None, avatar=None, about=None): # noqa: E501 - """UserProfileEdit - a model defined in Swagger""" # noqa: E501 - self._primary_affiliation = None - self._skills = None - self._types = None - self._size_quota = None - self._first_name = None - self._last_name = None - self._email = None - self._avatar = None - self._about = None - self.discriminator = None - if primary_affiliation is not None: - self.primary_affiliation = primary_affiliation - if skills is not None: - self.skills = skills - if types is not None: - self.types = types - if size_quota is not None: - self.size_quota = size_quota - if first_name is not None: - self.first_name = first_name - if last_name is not None: - self.last_name = last_name - if email is not None: - self.email = email - if avatar is not None: - self.avatar = avatar - if about is not None: - self.about = about - - @property - def primary_affiliation(self): - """Gets the primary_affiliation of this UserProfileEdit. # noqa: E501 - - - :return: The primary_affiliation of this UserProfileEdit. # noqa: E501 - :rtype: str - """ - return self._primary_affiliation - - @primary_affiliation.setter - def primary_affiliation(self, primary_affiliation): - """Sets the primary_affiliation of this UserProfileEdit. - - - :param primary_affiliation: The primary_affiliation of this UserProfileEdit. # noqa: E501 - :type: str - """ - - self._primary_affiliation = primary_affiliation - - @property - def skills(self): - """Gets the skills of this UserProfileEdit. # noqa: E501 - - - :return: The skills of this UserProfileEdit. # noqa: E501 - :rtype: list[str] - """ - return self._skills - - @skills.setter - def skills(self, skills): - """Sets the skills of this UserProfileEdit. - - - :param skills: The skills of this UserProfileEdit. # noqa: E501 - :type: list[str] - """ - - self._skills = skills - - @property - def types(self): - """Gets the types of this UserProfileEdit. # noqa: E501 - - - :return: The types of this UserProfileEdit. # noqa: E501 - :rtype: list[str] - """ - return self._types - - @types.setter - def types(self, types): - """Sets the types of this UserProfileEdit. - - - :param types: The types of this UserProfileEdit. # noqa: E501 - :type: list[str] - """ - - self._types = types - - @property - def size_quota(self): - """Gets the size_quota of this UserProfileEdit. # noqa: E501 - - - :return: The size_quota of this UserProfileEdit. # noqa: E501 - :rtype: int - """ - return self._size_quota - - @size_quota.setter - def size_quota(self, size_quota): - """Sets the size_quota of this UserProfileEdit. - - - :param size_quota: The size_quota of this UserProfileEdit. # noqa: E501 - :type: int - """ - - self._size_quota = size_quota - - @property - def first_name(self): - """Gets the first_name of this UserProfileEdit. # noqa: E501 - - - :return: The first_name of this UserProfileEdit. # noqa: E501 - :rtype: str - """ - return self._first_name - - @first_name.setter - def first_name(self, first_name): - """Sets the first_name of this UserProfileEdit. - - - :param first_name: The first_name of this UserProfileEdit. # noqa: E501 - :type: str - """ - - self._first_name = first_name - - @property - def last_name(self): - """Gets the last_name of this UserProfileEdit. # noqa: E501 - - - :return: The last_name of this UserProfileEdit. # noqa: E501 - :rtype: str - """ - return self._last_name - - @last_name.setter - def last_name(self, last_name): - """Sets the last_name of this UserProfileEdit. - - - :param last_name: The last_name of this UserProfileEdit. # noqa: E501 - :type: str - """ - - self._last_name = last_name - - @property - def email(self): - """Gets the email of this UserProfileEdit. # noqa: E501 - - - :return: The email of this UserProfileEdit. # noqa: E501 - :rtype: str - """ - return self._email - - @email.setter - def email(self, email): - """Sets the email of this UserProfileEdit. - - - :param email: The email of this UserProfileEdit. # noqa: E501 - :type: str - """ - - self._email = email - - @property - def avatar(self): - """Gets the avatar of this UserProfileEdit. # noqa: E501 - - - :return: The avatar of this UserProfileEdit. # noqa: E501 - :rtype: str - """ - return self._avatar - - @avatar.setter - def avatar(self, avatar): - """Sets the avatar of this UserProfileEdit. - - - :param avatar: The avatar of this UserProfileEdit. # noqa: E501 - :type: str - """ - - self._avatar = avatar - - @property - def about(self): - """Gets the about of this UserProfileEdit. # noqa: E501 - - - :return: The about of this UserProfileEdit. # noqa: E501 - :rtype: str - """ - return self._about - - @about.setter - def about(self, about): - """Sets the about of this UserProfileEdit. - - - :param about: The about of this UserProfileEdit. # noqa: E501 - :type: str - """ - - self._about = about - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserProfileEdit, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserProfileEdit): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_quota.py b/edu_sharing_client/models/user_quota.py deleted file mode 100644 index bc66a712..00000000 --- a/edu_sharing_client/models/user_quota.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserQuota(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'enabled': 'bool', - 'size_current': 'int', - 'size_quota': 'int' - } - - attribute_map = { - 'enabled': 'enabled', - 'size_current': 'sizeCurrent', - 'size_quota': 'sizeQuota' - } - - def __init__(self, enabled=False, size_current=None, size_quota=None): # noqa: E501 - """UserQuota - a model defined in Swagger""" # noqa: E501 - self._enabled = None - self._size_current = None - self._size_quota = None - self.discriminator = None - if enabled is not None: - self.enabled = enabled - if size_current is not None: - self.size_current = size_current - if size_quota is not None: - self.size_quota = size_quota - - @property - def enabled(self): - """Gets the enabled of this UserQuota. # noqa: E501 - - - :return: The enabled of this UserQuota. # noqa: E501 - :rtype: bool - """ - return self._enabled - - @enabled.setter - def enabled(self, enabled): - """Sets the enabled of this UserQuota. - - - :param enabled: The enabled of this UserQuota. # noqa: E501 - :type: bool - """ - - self._enabled = enabled - - @property - def size_current(self): - """Gets the size_current of this UserQuota. # noqa: E501 - - - :return: The size_current of this UserQuota. # noqa: E501 - :rtype: int - """ - return self._size_current - - @size_current.setter - def size_current(self, size_current): - """Sets the size_current of this UserQuota. - - - :param size_current: The size_current of this UserQuota. # noqa: E501 - :type: int - """ - - self._size_current = size_current - - @property - def size_quota(self): - """Gets the size_quota of this UserQuota. # noqa: E501 - - - :return: The size_quota of this UserQuota. # noqa: E501 - :rtype: int - """ - return self._size_quota - - @size_quota.setter - def size_quota(self, size_quota): - """Sets the size_quota of this UserQuota. - - - :param size_quota: The size_quota of this UserQuota. # noqa: E501 - :type: int - """ - - self._size_quota = size_quota - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserQuota, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserQuota): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_simple.py b/edu_sharing_client/models/user_simple.py deleted file mode 100644 index 55577f9d..00000000 --- a/edu_sharing_client/models/user_simple.py +++ /dev/null @@ -1,248 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserSimple(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'status': 'UserStatus', - 'authority_name': 'str', - 'authority_type': 'str', - 'user_name': 'str', - 'profile': 'UserProfile', - 'stats': 'UserStats' - } - - attribute_map = { - 'status': 'status', - 'authority_name': 'authorityName', - 'authority_type': 'authorityType', - 'user_name': 'userName', - 'profile': 'profile', - 'stats': 'stats' - } - - def __init__(self, status=None, authority_name=None, authority_type=None, user_name=None, profile=None, stats=None): # noqa: E501 - """UserSimple - a model defined in Swagger""" # noqa: E501 - self._status = None - self._authority_name = None - self._authority_type = None - self._user_name = None - self._profile = None - self._stats = None - self.discriminator = None - if status is not None: - self.status = status - self.authority_name = authority_name - if authority_type is not None: - self.authority_type = authority_type - if user_name is not None: - self.user_name = user_name - if profile is not None: - self.profile = profile - if stats is not None: - self.stats = stats - - @property - def status(self): - """Gets the status of this UserSimple. # noqa: E501 - - - :return: The status of this UserSimple. # noqa: E501 - :rtype: UserStatus - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this UserSimple. - - - :param status: The status of this UserSimple. # noqa: E501 - :type: UserStatus - """ - - self._status = status - - @property - def authority_name(self): - """Gets the authority_name of this UserSimple. # noqa: E501 - - - :return: The authority_name of this UserSimple. # noqa: E501 - :rtype: str - """ - return self._authority_name - - @authority_name.setter - def authority_name(self, authority_name): - """Sets the authority_name of this UserSimple. - - - :param authority_name: The authority_name of this UserSimple. # noqa: E501 - :type: str - """ - if authority_name is None: - raise ValueError("Invalid value for `authority_name`, must not be `None`") # noqa: E501 - - self._authority_name = authority_name - - @property - def authority_type(self): - """Gets the authority_type of this UserSimple. # noqa: E501 - - - :return: The authority_type of this UserSimple. # noqa: E501 - :rtype: str - """ - return self._authority_type - - @authority_type.setter - def authority_type(self, authority_type): - """Sets the authority_type of this UserSimple. - - - :param authority_type: The authority_type of this UserSimple. # noqa: E501 - :type: str - """ - allowed_values = ["USER", "GROUP", "OWNER", "EVERYONE", "GUEST"] # noqa: E501 - if authority_type not in allowed_values: - raise ValueError( - "Invalid value for `authority_type` ({0}), must be one of {1}" # noqa: E501 - .format(authority_type, allowed_values) - ) - - self._authority_type = authority_type - - @property - def user_name(self): - """Gets the user_name of this UserSimple. # noqa: E501 - - - :return: The user_name of this UserSimple. # noqa: E501 - :rtype: str - """ - return self._user_name - - @user_name.setter - def user_name(self, user_name): - """Sets the user_name of this UserSimple. - - - :param user_name: The user_name of this UserSimple. # noqa: E501 - :type: str - """ - - self._user_name = user_name - - @property - def profile(self): - """Gets the profile of this UserSimple. # noqa: E501 - - - :return: The profile of this UserSimple. # noqa: E501 - :rtype: UserProfile - """ - return self._profile - - @profile.setter - def profile(self, profile): - """Sets the profile of this UserSimple. - - - :param profile: The profile of this UserSimple. # noqa: E501 - :type: UserProfile - """ - - self._profile = profile - - @property - def stats(self): - """Gets the stats of this UserSimple. # noqa: E501 - - - :return: The stats of this UserSimple. # noqa: E501 - :rtype: UserStats - """ - return self._stats - - @stats.setter - def stats(self, stats): - """Sets the stats of this UserSimple. - - - :param stats: The stats of this UserSimple. # noqa: E501 - :type: UserStats - """ - - self._stats = stats - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserSimple, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserSimple): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_stats.py b/edu_sharing_client/models/user_stats.py deleted file mode 100644 index 717522cd..00000000 --- a/edu_sharing_client/models/user_stats.py +++ /dev/null @@ -1,163 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserStats(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'node_count': 'int', - 'node_count_cc': 'int', - 'collection_count': 'int' - } - - attribute_map = { - 'node_count': 'nodeCount', - 'node_count_cc': 'nodeCountCC', - 'collection_count': 'collectionCount' - } - - def __init__(self, node_count=None, node_count_cc=None, collection_count=None): # noqa: E501 - """UserStats - a model defined in Swagger""" # noqa: E501 - self._node_count = None - self._node_count_cc = None - self._collection_count = None - self.discriminator = None - if node_count is not None: - self.node_count = node_count - if node_count_cc is not None: - self.node_count_cc = node_count_cc - if collection_count is not None: - self.collection_count = collection_count - - @property - def node_count(self): - """Gets the node_count of this UserStats. # noqa: E501 - - - :return: The node_count of this UserStats. # noqa: E501 - :rtype: int - """ - return self._node_count - - @node_count.setter - def node_count(self, node_count): - """Sets the node_count of this UserStats. - - - :param node_count: The node_count of this UserStats. # noqa: E501 - :type: int - """ - - self._node_count = node_count - - @property - def node_count_cc(self): - """Gets the node_count_cc of this UserStats. # noqa: E501 - - - :return: The node_count_cc of this UserStats. # noqa: E501 - :rtype: int - """ - return self._node_count_cc - - @node_count_cc.setter - def node_count_cc(self, node_count_cc): - """Sets the node_count_cc of this UserStats. - - - :param node_count_cc: The node_count_cc of this UserStats. # noqa: E501 - :type: int - """ - - self._node_count_cc = node_count_cc - - @property - def collection_count(self): - """Gets the collection_count of this UserStats. # noqa: E501 - - - :return: The collection_count of this UserStats. # noqa: E501 - :rtype: int - """ - return self._collection_count - - @collection_count.setter - def collection_count(self, collection_count): - """Sets the collection_count of this UserStats. - - - :param collection_count: The collection_count of this UserStats. # noqa: E501 - :type: int - """ - - self._collection_count = collection_count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserStats, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserStats): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/user_status.py b/edu_sharing_client/models/user_status.py deleted file mode 100644 index e173d3ee..00000000 --- a/edu_sharing_client/models/user_status.py +++ /dev/null @@ -1,143 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class UserStatus(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'status': 'str', - '_date': 'datetime' - } - - attribute_map = { - 'status': 'status', - '_date': 'date' - } - - def __init__(self, status=None, _date=None): # noqa: E501 - """UserStatus - a model defined in Swagger""" # noqa: E501 - self._status = None - self.__date = None - self.discriminator = None - if status is not None: - self.status = status - if _date is not None: - self._date = _date - - @property - def status(self): - """Gets the status of this UserStatus. # noqa: E501 - - - :return: The status of this UserStatus. # noqa: E501 - :rtype: str - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this UserStatus. - - - :param status: The status of this UserStatus. # noqa: E501 - :type: str - """ - allowed_values = ["active", "blocked", "todelete"] # noqa: E501 - if status not in allowed_values: - raise ValueError( - "Invalid value for `status` ({0}), must be one of {1}" # noqa: E501 - .format(status, allowed_values) - ) - - self._status = status - - @property - def _date(self): - """Gets the _date of this UserStatus. # noqa: E501 - - - :return: The _date of this UserStatus. # noqa: E501 - :rtype: datetime - """ - return self.__date - - @_date.setter - def _date(self, _date): - """Sets the _date of this UserStatus. - - - :param _date: The _date of this UserStatus. # noqa: E501 - :type: datetime - """ - - self.__date = _date - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(UserStatus, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, UserStatus): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/value.py b/edu_sharing_client/models/value.py deleted file mode 100644 index cfdb7c93..00000000 --- a/edu_sharing_client/models/value.py +++ /dev/null @@ -1,139 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Value(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'value': 'str', - 'count': 'int' - } - - attribute_map = { - 'value': 'value', - 'count': 'count' - } - - def __init__(self, value=None, count=None): # noqa: E501 - """Value - a model defined in Swagger""" # noqa: E501 - self._value = None - self._count = None - self.discriminator = None - self.value = value - self.count = count - - @property - def value(self): - """Gets the value of this Value. # noqa: E501 - - - :return: The value of this Value. # noqa: E501 - :rtype: str - """ - return self._value - - @value.setter - def value(self, value): - """Sets the value of this Value. - - - :param value: The value of this Value. # noqa: E501 - :type: str - """ - if value is None: - raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501 - - self._value = value - - @property - def count(self): - """Gets the count of this Value. # noqa: E501 - - - :return: The count of this Value. # noqa: E501 - :rtype: int - """ - return self._count - - @count.setter - def count(self, count): - """Sets the count of this Value. - - - :param count: The count of this Value. # noqa: E501 - :type: int - """ - if count is None: - raise ValueError("Invalid value for `count`, must not be `None`") # noqa: E501 - - self._count = count - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Value, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Value): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/value_parameters.py b/edu_sharing_client/models/value_parameters.py deleted file mode 100644 index bfcf1377..00000000 --- a/edu_sharing_client/models/value_parameters.py +++ /dev/null @@ -1,168 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ValueParameters(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'query': 'str', - '_property': 'str', - 'pattern': 'str' - } - - attribute_map = { - 'query': 'query', - '_property': 'property', - 'pattern': 'pattern' - } - - def __init__(self, query=None, _property=None, pattern=None): # noqa: E501 - """ValueParameters - a model defined in Swagger""" # noqa: E501 - self._query = None - self.__property = None - self._pattern = None - self.discriminator = None - self.query = query - self._property = _property - self.pattern = pattern - - @property - def query(self): - """Gets the query of this ValueParameters. # noqa: E501 - - - :return: The query of this ValueParameters. # noqa: E501 - :rtype: str - """ - return self._query - - @query.setter - def query(self, query): - """Sets the query of this ValueParameters. - - - :param query: The query of this ValueParameters. # noqa: E501 - :type: str - """ - if query is None: - raise ValueError("Invalid value for `query`, must not be `None`") # noqa: E501 - - self._query = query - - @property - def _property(self): - """Gets the _property of this ValueParameters. # noqa: E501 - - - :return: The _property of this ValueParameters. # noqa: E501 - :rtype: str - """ - return self.__property - - @_property.setter - def _property(self, _property): - """Sets the _property of this ValueParameters. - - - :param _property: The _property of this ValueParameters. # noqa: E501 - :type: str - """ - if _property is None: - raise ValueError("Invalid value for `_property`, must not be `None`") # noqa: E501 - - self.__property = _property - - @property - def pattern(self): - """Gets the pattern of this ValueParameters. # noqa: E501 - - prefix of the value (or \"-all-\" for all values) # noqa: E501 - - :return: The pattern of this ValueParameters. # noqa: E501 - :rtype: str - """ - return self._pattern - - @pattern.setter - def pattern(self, pattern): - """Sets the pattern of this ValueParameters. - - prefix of the value (or \"-all-\" for all values) # noqa: E501 - - :param pattern: The pattern of this ValueParameters. # noqa: E501 - :type: str - """ - if pattern is None: - raise ValueError("Invalid value for `pattern`, must not be `None`") # noqa: E501 - - self._pattern = pattern - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ValueParameters, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ValueParameters): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/value_v2.py b/edu_sharing_client/models/value_v2.py deleted file mode 100644 index d5beee52..00000000 --- a/edu_sharing_client/models/value_v2.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ValueV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'caption': 'str', - 'description': 'str', - 'parent': 'str' - } - - attribute_map = { - 'id': 'id', - 'caption': 'caption', - 'description': 'description', - 'parent': 'parent' - } - - def __init__(self, id=None, caption=None, description=None, parent=None): # noqa: E501 - """ValueV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._caption = None - self._description = None - self._parent = None - self.discriminator = None - if id is not None: - self.id = id - if caption is not None: - self.caption = caption - if description is not None: - self.description = description - if parent is not None: - self.parent = parent - - @property - def id(self): - """Gets the id of this ValueV2. # noqa: E501 - - - :return: The id of this ValueV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ValueV2. - - - :param id: The id of this ValueV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def caption(self): - """Gets the caption of this ValueV2. # noqa: E501 - - - :return: The caption of this ValueV2. # noqa: E501 - :rtype: str - """ - return self._caption - - @caption.setter - def caption(self, caption): - """Sets the caption of this ValueV2. - - - :param caption: The caption of this ValueV2. # noqa: E501 - :type: str - """ - - self._caption = caption - - @property - def description(self): - """Gets the description of this ValueV2. # noqa: E501 - - - :return: The description of this ValueV2. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this ValueV2. - - - :param description: The description of this ValueV2. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def parent(self): - """Gets the parent of this ValueV2. # noqa: E501 - - - :return: The parent of this ValueV2. # noqa: E501 - :rtype: str - """ - return self._parent - - @parent.setter - def parent(self, parent): - """Sets the parent of this ValueV2. - - - :param parent: The parent of this ValueV2. # noqa: E501 - :type: str - """ - - self._parent = parent - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ValueV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ValueV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/values.py b/edu_sharing_client/models/values.py deleted file mode 100644 index 98714d64..00000000 --- a/edu_sharing_client/models/values.py +++ /dev/null @@ -1,1541 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Values(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'supported_languages': 'list[str]', - 'extension': 'str', - 'login_url': 'str', - 'login_allow_local': 'bool', - 'login_providers_url': 'str', - 'login_provider_target_url': 'str', - 'register': 'Register', - 'recover_password_url': 'str', - 'imprint_url': 'str', - 'privacy_information_url': 'str', - 'help_url': 'str', - 'whats_new_url': 'str', - 'edit_profile_url': 'str', - 'edit_profile': 'bool', - 'workspace_columns': 'list[str]', - 'hide_main_menu': 'list[str]', - 'logout': 'LogoutInfo', - 'menu_entries': 'list[MenuEntry]', - 'node_options': 'list[ContextMenuEntry]', - 'search_node_options': 'list[ContextMenuEntry]', - 'render_node_options': 'list[ContextMenuEntry]', - 'collection_node_options': 'list[ContextMenuEntry]', - 'node_store_options': 'list[ContextMenuEntry]', - 'allowed_licenses': 'list[str]', - 'custom_licenses': 'list[License]', - 'workflows': 'list[Workflow]', - 'license_dialog_on_upload': 'bool', - 'node_report': 'bool', - 'branding': 'bool', - 'publishing_notice': 'bool', - 'site_title': 'str', - 'user_display_name': 'str', - 'user_secondary_display_name': 'str', - 'user_affiliation': 'bool', - 'default_username': 'str', - 'default_password': 'str', - 'banner': 'Banner', - 'available_mds': 'list[AvailableMds]', - 'available_repositories': 'list[str]', - 'search_view_type': 'int', - 'items_per_request': 'int', - 'rendering': 'Rendering', - 'session_expired_dialog': 'SessionExpiredDialog', - 'login_default_location': 'str', - 'search_group_results': 'bool', - 'mainnav': 'Mainnav', - 'search_sidenav_mode': 'str', - 'guest': 'Guest', - 'collections': 'Collections', - 'license_agreement': 'LicenseAgreement', - 'services': 'Services', - 'help_menu_options': 'list[HelpMenuOptions]', - 'images': 'list[Image]', - 'stream': 'Stream', - 'admin': 'Admin', - 'simple_edit': 'SimpleEdit' - } - - attribute_map = { - 'supported_languages': 'supportedLanguages', - 'extension': 'extension', - 'login_url': 'loginUrl', - 'login_allow_local': 'loginAllowLocal', - 'login_providers_url': 'loginProvidersUrl', - 'login_provider_target_url': 'loginProviderTargetUrl', - 'register': 'register', - 'recover_password_url': 'recoverPasswordUrl', - 'imprint_url': 'imprintUrl', - 'privacy_information_url': 'privacyInformationUrl', - 'help_url': 'helpUrl', - 'whats_new_url': 'whatsNewUrl', - 'edit_profile_url': 'editProfileUrl', - 'edit_profile': 'editProfile', - 'workspace_columns': 'workspaceColumns', - 'hide_main_menu': 'hideMainMenu', - 'logout': 'logout', - 'menu_entries': 'menuEntries', - 'node_options': 'nodeOptions', - 'search_node_options': 'searchNodeOptions', - 'render_node_options': 'renderNodeOptions', - 'collection_node_options': 'collectionNodeOptions', - 'node_store_options': 'nodeStoreOptions', - 'allowed_licenses': 'allowedLicenses', - 'custom_licenses': 'customLicenses', - 'workflows': 'workflows', - 'license_dialog_on_upload': 'licenseDialogOnUpload', - 'node_report': 'nodeReport', - 'branding': 'branding', - 'publishing_notice': 'publishingNotice', - 'site_title': 'siteTitle', - 'user_display_name': 'userDisplayName', - 'user_secondary_display_name': 'userSecondaryDisplayName', - 'user_affiliation': 'userAffiliation', - 'default_username': 'defaultUsername', - 'default_password': 'defaultPassword', - 'banner': 'banner', - 'available_mds': 'availableMds', - 'available_repositories': 'availableRepositories', - 'search_view_type': 'searchViewType', - 'items_per_request': 'itemsPerRequest', - 'rendering': 'rendering', - 'session_expired_dialog': 'sessionExpiredDialog', - 'login_default_location': 'loginDefaultLocation', - 'search_group_results': 'searchGroupResults', - 'mainnav': 'mainnav', - 'search_sidenav_mode': 'searchSidenavMode', - 'guest': 'guest', - 'collections': 'collections', - 'license_agreement': 'licenseAgreement', - 'services': 'services', - 'help_menu_options': 'helpMenuOptions', - 'images': 'images', - 'stream': 'stream', - 'admin': 'admin', - 'simple_edit': 'simpleEdit' - } - - def __init__(self, supported_languages=None, extension=None, login_url=None, login_allow_local=False, login_providers_url=None, login_provider_target_url=None, register=None, recover_password_url=None, imprint_url=None, privacy_information_url=None, help_url=None, whats_new_url=None, edit_profile_url=None, edit_profile=False, workspace_columns=None, hide_main_menu=None, logout=None, menu_entries=None, node_options=None, search_node_options=None, render_node_options=None, collection_node_options=None, node_store_options=None, allowed_licenses=None, custom_licenses=None, workflows=None, license_dialog_on_upload=False, node_report=False, branding=False, publishing_notice=False, site_title=None, user_display_name=None, user_secondary_display_name=None, user_affiliation=False, default_username=None, default_password=None, banner=None, available_mds=None, available_repositories=None, search_view_type=None, items_per_request=None, rendering=None, session_expired_dialog=None, login_default_location=None, search_group_results=False, mainnav=None, search_sidenav_mode=None, guest=None, collections=None, license_agreement=None, services=None, help_menu_options=None, images=None, stream=None, admin=None, simple_edit=None): # noqa: E501 - """Values - a model defined in Swagger""" # noqa: E501 - self._supported_languages = None - self._extension = None - self._login_url = None - self._login_allow_local = None - self._login_providers_url = None - self._login_provider_target_url = None - self._register = None - self._recover_password_url = None - self._imprint_url = None - self._privacy_information_url = None - self._help_url = None - self._whats_new_url = None - self._edit_profile_url = None - self._edit_profile = None - self._workspace_columns = None - self._hide_main_menu = None - self._logout = None - self._menu_entries = None - self._node_options = None - self._search_node_options = None - self._render_node_options = None - self._collection_node_options = None - self._node_store_options = None - self._allowed_licenses = None - self._custom_licenses = None - self._workflows = None - self._license_dialog_on_upload = None - self._node_report = None - self._branding = None - self._publishing_notice = None - self._site_title = None - self._user_display_name = None - self._user_secondary_display_name = None - self._user_affiliation = None - self._default_username = None - self._default_password = None - self._banner = None - self._available_mds = None - self._available_repositories = None - self._search_view_type = None - self._items_per_request = None - self._rendering = None - self._session_expired_dialog = None - self._login_default_location = None - self._search_group_results = None - self._mainnav = None - self._search_sidenav_mode = None - self._guest = None - self._collections = None - self._license_agreement = None - self._services = None - self._help_menu_options = None - self._images = None - self._stream = None - self._admin = None - self._simple_edit = None - self.discriminator = None - if supported_languages is not None: - self.supported_languages = supported_languages - if extension is not None: - self.extension = extension - if login_url is not None: - self.login_url = login_url - if login_allow_local is not None: - self.login_allow_local = login_allow_local - if login_providers_url is not None: - self.login_providers_url = login_providers_url - if login_provider_target_url is not None: - self.login_provider_target_url = login_provider_target_url - if register is not None: - self.register = register - if recover_password_url is not None: - self.recover_password_url = recover_password_url - if imprint_url is not None: - self.imprint_url = imprint_url - if privacy_information_url is not None: - self.privacy_information_url = privacy_information_url - if help_url is not None: - self.help_url = help_url - if whats_new_url is not None: - self.whats_new_url = whats_new_url - if edit_profile_url is not None: - self.edit_profile_url = edit_profile_url - if edit_profile is not None: - self.edit_profile = edit_profile - if workspace_columns is not None: - self.workspace_columns = workspace_columns - if hide_main_menu is not None: - self.hide_main_menu = hide_main_menu - if logout is not None: - self.logout = logout - if menu_entries is not None: - self.menu_entries = menu_entries - if node_options is not None: - self.node_options = node_options - if search_node_options is not None: - self.search_node_options = search_node_options - if render_node_options is not None: - self.render_node_options = render_node_options - if collection_node_options is not None: - self.collection_node_options = collection_node_options - if node_store_options is not None: - self.node_store_options = node_store_options - if allowed_licenses is not None: - self.allowed_licenses = allowed_licenses - if custom_licenses is not None: - self.custom_licenses = custom_licenses - if workflows is not None: - self.workflows = workflows - if license_dialog_on_upload is not None: - self.license_dialog_on_upload = license_dialog_on_upload - if node_report is not None: - self.node_report = node_report - if branding is not None: - self.branding = branding - if publishing_notice is not None: - self.publishing_notice = publishing_notice - if site_title is not None: - self.site_title = site_title - if user_display_name is not None: - self.user_display_name = user_display_name - if user_secondary_display_name is not None: - self.user_secondary_display_name = user_secondary_display_name - if user_affiliation is not None: - self.user_affiliation = user_affiliation - if default_username is not None: - self.default_username = default_username - if default_password is not None: - self.default_password = default_password - if banner is not None: - self.banner = banner - if available_mds is not None: - self.available_mds = available_mds - if available_repositories is not None: - self.available_repositories = available_repositories - if search_view_type is not None: - self.search_view_type = search_view_type - if items_per_request is not None: - self.items_per_request = items_per_request - if rendering is not None: - self.rendering = rendering - if session_expired_dialog is not None: - self.session_expired_dialog = session_expired_dialog - if login_default_location is not None: - self.login_default_location = login_default_location - if search_group_results is not None: - self.search_group_results = search_group_results - if mainnav is not None: - self.mainnav = mainnav - if search_sidenav_mode is not None: - self.search_sidenav_mode = search_sidenav_mode - if guest is not None: - self.guest = guest - if collections is not None: - self.collections = collections - if license_agreement is not None: - self.license_agreement = license_agreement - if services is not None: - self.services = services - if help_menu_options is not None: - self.help_menu_options = help_menu_options - if images is not None: - self.images = images - if stream is not None: - self.stream = stream - if admin is not None: - self.admin = admin - if simple_edit is not None: - self.simple_edit = simple_edit - - @property - def supported_languages(self): - """Gets the supported_languages of this Values. # noqa: E501 - - - :return: The supported_languages of this Values. # noqa: E501 - :rtype: list[str] - """ - return self._supported_languages - - @supported_languages.setter - def supported_languages(self, supported_languages): - """Sets the supported_languages of this Values. - - - :param supported_languages: The supported_languages of this Values. # noqa: E501 - :type: list[str] - """ - - self._supported_languages = supported_languages - - @property - def extension(self): - """Gets the extension of this Values. # noqa: E501 - - - :return: The extension of this Values. # noqa: E501 - :rtype: str - """ - return self._extension - - @extension.setter - def extension(self, extension): - """Sets the extension of this Values. - - - :param extension: The extension of this Values. # noqa: E501 - :type: str - """ - - self._extension = extension - - @property - def login_url(self): - """Gets the login_url of this Values. # noqa: E501 - - - :return: The login_url of this Values. # noqa: E501 - :rtype: str - """ - return self._login_url - - @login_url.setter - def login_url(self, login_url): - """Sets the login_url of this Values. - - - :param login_url: The login_url of this Values. # noqa: E501 - :type: str - """ - - self._login_url = login_url - - @property - def login_allow_local(self): - """Gets the login_allow_local of this Values. # noqa: E501 - - - :return: The login_allow_local of this Values. # noqa: E501 - :rtype: bool - """ - return self._login_allow_local - - @login_allow_local.setter - def login_allow_local(self, login_allow_local): - """Sets the login_allow_local of this Values. - - - :param login_allow_local: The login_allow_local of this Values. # noqa: E501 - :type: bool - """ - - self._login_allow_local = login_allow_local - - @property - def login_providers_url(self): - """Gets the login_providers_url of this Values. # noqa: E501 - - - :return: The login_providers_url of this Values. # noqa: E501 - :rtype: str - """ - return self._login_providers_url - - @login_providers_url.setter - def login_providers_url(self, login_providers_url): - """Sets the login_providers_url of this Values. - - - :param login_providers_url: The login_providers_url of this Values. # noqa: E501 - :type: str - """ - - self._login_providers_url = login_providers_url - - @property - def login_provider_target_url(self): - """Gets the login_provider_target_url of this Values. # noqa: E501 - - - :return: The login_provider_target_url of this Values. # noqa: E501 - :rtype: str - """ - return self._login_provider_target_url - - @login_provider_target_url.setter - def login_provider_target_url(self, login_provider_target_url): - """Sets the login_provider_target_url of this Values. - - - :param login_provider_target_url: The login_provider_target_url of this Values. # noqa: E501 - :type: str - """ - - self._login_provider_target_url = login_provider_target_url - - @property - def register(self): - """Gets the register of this Values. # noqa: E501 - - - :return: The register of this Values. # noqa: E501 - :rtype: Register - """ - return self._register - - @register.setter - def register(self, register): - """Sets the register of this Values. - - - :param register: The register of this Values. # noqa: E501 - :type: Register - """ - - self._register = register - - @property - def recover_password_url(self): - """Gets the recover_password_url of this Values. # noqa: E501 - - - :return: The recover_password_url of this Values. # noqa: E501 - :rtype: str - """ - return self._recover_password_url - - @recover_password_url.setter - def recover_password_url(self, recover_password_url): - """Sets the recover_password_url of this Values. - - - :param recover_password_url: The recover_password_url of this Values. # noqa: E501 - :type: str - """ - - self._recover_password_url = recover_password_url - - @property - def imprint_url(self): - """Gets the imprint_url of this Values. # noqa: E501 - - - :return: The imprint_url of this Values. # noqa: E501 - :rtype: str - """ - return self._imprint_url - - @imprint_url.setter - def imprint_url(self, imprint_url): - """Sets the imprint_url of this Values. - - - :param imprint_url: The imprint_url of this Values. # noqa: E501 - :type: str - """ - - self._imprint_url = imprint_url - - @property - def privacy_information_url(self): - """Gets the privacy_information_url of this Values. # noqa: E501 - - - :return: The privacy_information_url of this Values. # noqa: E501 - :rtype: str - """ - return self._privacy_information_url - - @privacy_information_url.setter - def privacy_information_url(self, privacy_information_url): - """Sets the privacy_information_url of this Values. - - - :param privacy_information_url: The privacy_information_url of this Values. # noqa: E501 - :type: str - """ - - self._privacy_information_url = privacy_information_url - - @property - def help_url(self): - """Gets the help_url of this Values. # noqa: E501 - - - :return: The help_url of this Values. # noqa: E501 - :rtype: str - """ - return self._help_url - - @help_url.setter - def help_url(self, help_url): - """Sets the help_url of this Values. - - - :param help_url: The help_url of this Values. # noqa: E501 - :type: str - """ - - self._help_url = help_url - - @property - def whats_new_url(self): - """Gets the whats_new_url of this Values. # noqa: E501 - - - :return: The whats_new_url of this Values. # noqa: E501 - :rtype: str - """ - return self._whats_new_url - - @whats_new_url.setter - def whats_new_url(self, whats_new_url): - """Sets the whats_new_url of this Values. - - - :param whats_new_url: The whats_new_url of this Values. # noqa: E501 - :type: str - """ - - self._whats_new_url = whats_new_url - - @property - def edit_profile_url(self): - """Gets the edit_profile_url of this Values. # noqa: E501 - - - :return: The edit_profile_url of this Values. # noqa: E501 - :rtype: str - """ - return self._edit_profile_url - - @edit_profile_url.setter - def edit_profile_url(self, edit_profile_url): - """Sets the edit_profile_url of this Values. - - - :param edit_profile_url: The edit_profile_url of this Values. # noqa: E501 - :type: str - """ - - self._edit_profile_url = edit_profile_url - - @property - def edit_profile(self): - """Gets the edit_profile of this Values. # noqa: E501 - - - :return: The edit_profile of this Values. # noqa: E501 - :rtype: bool - """ - return self._edit_profile - - @edit_profile.setter - def edit_profile(self, edit_profile): - """Sets the edit_profile of this Values. - - - :param edit_profile: The edit_profile of this Values. # noqa: E501 - :type: bool - """ - - self._edit_profile = edit_profile - - @property - def workspace_columns(self): - """Gets the workspace_columns of this Values. # noqa: E501 - - - :return: The workspace_columns of this Values. # noqa: E501 - :rtype: list[str] - """ - return self._workspace_columns - - @workspace_columns.setter - def workspace_columns(self, workspace_columns): - """Sets the workspace_columns of this Values. - - - :param workspace_columns: The workspace_columns of this Values. # noqa: E501 - :type: list[str] - """ - - self._workspace_columns = workspace_columns - - @property - def hide_main_menu(self): - """Gets the hide_main_menu of this Values. # noqa: E501 - - - :return: The hide_main_menu of this Values. # noqa: E501 - :rtype: list[str] - """ - return self._hide_main_menu - - @hide_main_menu.setter - def hide_main_menu(self, hide_main_menu): - """Sets the hide_main_menu of this Values. - - - :param hide_main_menu: The hide_main_menu of this Values. # noqa: E501 - :type: list[str] - """ - - self._hide_main_menu = hide_main_menu - - @property - def logout(self): - """Gets the logout of this Values. # noqa: E501 - - - :return: The logout of this Values. # noqa: E501 - :rtype: LogoutInfo - """ - return self._logout - - @logout.setter - def logout(self, logout): - """Sets the logout of this Values. - - - :param logout: The logout of this Values. # noqa: E501 - :type: LogoutInfo - """ - - self._logout = logout - - @property - def menu_entries(self): - """Gets the menu_entries of this Values. # noqa: E501 - - - :return: The menu_entries of this Values. # noqa: E501 - :rtype: list[MenuEntry] - """ - return self._menu_entries - - @menu_entries.setter - def menu_entries(self, menu_entries): - """Sets the menu_entries of this Values. - - - :param menu_entries: The menu_entries of this Values. # noqa: E501 - :type: list[MenuEntry] - """ - - self._menu_entries = menu_entries - - @property - def node_options(self): - """Gets the node_options of this Values. # noqa: E501 - - - :return: The node_options of this Values. # noqa: E501 - :rtype: list[ContextMenuEntry] - """ - return self._node_options - - @node_options.setter - def node_options(self, node_options): - """Sets the node_options of this Values. - - - :param node_options: The node_options of this Values. # noqa: E501 - :type: list[ContextMenuEntry] - """ - - self._node_options = node_options - - @property - def search_node_options(self): - """Gets the search_node_options of this Values. # noqa: E501 - - - :return: The search_node_options of this Values. # noqa: E501 - :rtype: list[ContextMenuEntry] - """ - return self._search_node_options - - @search_node_options.setter - def search_node_options(self, search_node_options): - """Sets the search_node_options of this Values. - - - :param search_node_options: The search_node_options of this Values. # noqa: E501 - :type: list[ContextMenuEntry] - """ - - self._search_node_options = search_node_options - - @property - def render_node_options(self): - """Gets the render_node_options of this Values. # noqa: E501 - - - :return: The render_node_options of this Values. # noqa: E501 - :rtype: list[ContextMenuEntry] - """ - return self._render_node_options - - @render_node_options.setter - def render_node_options(self, render_node_options): - """Sets the render_node_options of this Values. - - - :param render_node_options: The render_node_options of this Values. # noqa: E501 - :type: list[ContextMenuEntry] - """ - - self._render_node_options = render_node_options - - @property - def collection_node_options(self): - """Gets the collection_node_options of this Values. # noqa: E501 - - - :return: The collection_node_options of this Values. # noqa: E501 - :rtype: list[ContextMenuEntry] - """ - return self._collection_node_options - - @collection_node_options.setter - def collection_node_options(self, collection_node_options): - """Sets the collection_node_options of this Values. - - - :param collection_node_options: The collection_node_options of this Values. # noqa: E501 - :type: list[ContextMenuEntry] - """ - - self._collection_node_options = collection_node_options - - @property - def node_store_options(self): - """Gets the node_store_options of this Values. # noqa: E501 - - - :return: The node_store_options of this Values. # noqa: E501 - :rtype: list[ContextMenuEntry] - """ - return self._node_store_options - - @node_store_options.setter - def node_store_options(self, node_store_options): - """Sets the node_store_options of this Values. - - - :param node_store_options: The node_store_options of this Values. # noqa: E501 - :type: list[ContextMenuEntry] - """ - - self._node_store_options = node_store_options - - @property - def allowed_licenses(self): - """Gets the allowed_licenses of this Values. # noqa: E501 - - - :return: The allowed_licenses of this Values. # noqa: E501 - :rtype: list[str] - """ - return self._allowed_licenses - - @allowed_licenses.setter - def allowed_licenses(self, allowed_licenses): - """Sets the allowed_licenses of this Values. - - - :param allowed_licenses: The allowed_licenses of this Values. # noqa: E501 - :type: list[str] - """ - - self._allowed_licenses = allowed_licenses - - @property - def custom_licenses(self): - """Gets the custom_licenses of this Values. # noqa: E501 - - - :return: The custom_licenses of this Values. # noqa: E501 - :rtype: list[License] - """ - return self._custom_licenses - - @custom_licenses.setter - def custom_licenses(self, custom_licenses): - """Sets the custom_licenses of this Values. - - - :param custom_licenses: The custom_licenses of this Values. # noqa: E501 - :type: list[License] - """ - - self._custom_licenses = custom_licenses - - @property - def workflows(self): - """Gets the workflows of this Values. # noqa: E501 - - - :return: The workflows of this Values. # noqa: E501 - :rtype: list[Workflow] - """ - return self._workflows - - @workflows.setter - def workflows(self, workflows): - """Sets the workflows of this Values. - - - :param workflows: The workflows of this Values. # noqa: E501 - :type: list[Workflow] - """ - - self._workflows = workflows - - @property - def license_dialog_on_upload(self): - """Gets the license_dialog_on_upload of this Values. # noqa: E501 - - - :return: The license_dialog_on_upload of this Values. # noqa: E501 - :rtype: bool - """ - return self._license_dialog_on_upload - - @license_dialog_on_upload.setter - def license_dialog_on_upload(self, license_dialog_on_upload): - """Sets the license_dialog_on_upload of this Values. - - - :param license_dialog_on_upload: The license_dialog_on_upload of this Values. # noqa: E501 - :type: bool - """ - - self._license_dialog_on_upload = license_dialog_on_upload - - @property - def node_report(self): - """Gets the node_report of this Values. # noqa: E501 - - - :return: The node_report of this Values. # noqa: E501 - :rtype: bool - """ - return self._node_report - - @node_report.setter - def node_report(self, node_report): - """Sets the node_report of this Values. - - - :param node_report: The node_report of this Values. # noqa: E501 - :type: bool - """ - - self._node_report = node_report - - @property - def branding(self): - """Gets the branding of this Values. # noqa: E501 - - - :return: The branding of this Values. # noqa: E501 - :rtype: bool - """ - return self._branding - - @branding.setter - def branding(self, branding): - """Sets the branding of this Values. - - - :param branding: The branding of this Values. # noqa: E501 - :type: bool - """ - - self._branding = branding - - @property - def publishing_notice(self): - """Gets the publishing_notice of this Values. # noqa: E501 - - - :return: The publishing_notice of this Values. # noqa: E501 - :rtype: bool - """ - return self._publishing_notice - - @publishing_notice.setter - def publishing_notice(self, publishing_notice): - """Sets the publishing_notice of this Values. - - - :param publishing_notice: The publishing_notice of this Values. # noqa: E501 - :type: bool - """ - - self._publishing_notice = publishing_notice - - @property - def site_title(self): - """Gets the site_title of this Values. # noqa: E501 - - - :return: The site_title of this Values. # noqa: E501 - :rtype: str - """ - return self._site_title - - @site_title.setter - def site_title(self, site_title): - """Sets the site_title of this Values. - - - :param site_title: The site_title of this Values. # noqa: E501 - :type: str - """ - - self._site_title = site_title - - @property - def user_display_name(self): - """Gets the user_display_name of this Values. # noqa: E501 - - - :return: The user_display_name of this Values. # noqa: E501 - :rtype: str - """ - return self._user_display_name - - @user_display_name.setter - def user_display_name(self, user_display_name): - """Sets the user_display_name of this Values. - - - :param user_display_name: The user_display_name of this Values. # noqa: E501 - :type: str - """ - - self._user_display_name = user_display_name - - @property - def user_secondary_display_name(self): - """Gets the user_secondary_display_name of this Values. # noqa: E501 - - - :return: The user_secondary_display_name of this Values. # noqa: E501 - :rtype: str - """ - return self._user_secondary_display_name - - @user_secondary_display_name.setter - def user_secondary_display_name(self, user_secondary_display_name): - """Sets the user_secondary_display_name of this Values. - - - :param user_secondary_display_name: The user_secondary_display_name of this Values. # noqa: E501 - :type: str - """ - - self._user_secondary_display_name = user_secondary_display_name - - @property - def user_affiliation(self): - """Gets the user_affiliation of this Values. # noqa: E501 - - - :return: The user_affiliation of this Values. # noqa: E501 - :rtype: bool - """ - return self._user_affiliation - - @user_affiliation.setter - def user_affiliation(self, user_affiliation): - """Sets the user_affiliation of this Values. - - - :param user_affiliation: The user_affiliation of this Values. # noqa: E501 - :type: bool - """ - - self._user_affiliation = user_affiliation - - @property - def default_username(self): - """Gets the default_username of this Values. # noqa: E501 - - - :return: The default_username of this Values. # noqa: E501 - :rtype: str - """ - return self._default_username - - @default_username.setter - def default_username(self, default_username): - """Sets the default_username of this Values. - - - :param default_username: The default_username of this Values. # noqa: E501 - :type: str - """ - - self._default_username = default_username - - @property - def default_password(self): - """Gets the default_password of this Values. # noqa: E501 - - - :return: The default_password of this Values. # noqa: E501 - :rtype: str - """ - return self._default_password - - @default_password.setter - def default_password(self, default_password): - """Sets the default_password of this Values. - - - :param default_password: The default_password of this Values. # noqa: E501 - :type: str - """ - - self._default_password = default_password - - @property - def banner(self): - """Gets the banner of this Values. # noqa: E501 - - - :return: The banner of this Values. # noqa: E501 - :rtype: Banner - """ - return self._banner - - @banner.setter - def banner(self, banner): - """Sets the banner of this Values. - - - :param banner: The banner of this Values. # noqa: E501 - :type: Banner - """ - - self._banner = banner - - @property - def available_mds(self): - """Gets the available_mds of this Values. # noqa: E501 - - - :return: The available_mds of this Values. # noqa: E501 - :rtype: list[AvailableMds] - """ - return self._available_mds - - @available_mds.setter - def available_mds(self, available_mds): - """Sets the available_mds of this Values. - - - :param available_mds: The available_mds of this Values. # noqa: E501 - :type: list[AvailableMds] - """ - - self._available_mds = available_mds - - @property - def available_repositories(self): - """Gets the available_repositories of this Values. # noqa: E501 - - - :return: The available_repositories of this Values. # noqa: E501 - :rtype: list[str] - """ - return self._available_repositories - - @available_repositories.setter - def available_repositories(self, available_repositories): - """Sets the available_repositories of this Values. - - - :param available_repositories: The available_repositories of this Values. # noqa: E501 - :type: list[str] - """ - - self._available_repositories = available_repositories - - @property - def search_view_type(self): - """Gets the search_view_type of this Values. # noqa: E501 - - - :return: The search_view_type of this Values. # noqa: E501 - :rtype: int - """ - return self._search_view_type - - @search_view_type.setter - def search_view_type(self, search_view_type): - """Sets the search_view_type of this Values. - - - :param search_view_type: The search_view_type of this Values. # noqa: E501 - :type: int - """ - - self._search_view_type = search_view_type - - @property - def items_per_request(self): - """Gets the items_per_request of this Values. # noqa: E501 - - - :return: The items_per_request of this Values. # noqa: E501 - :rtype: int - """ - return self._items_per_request - - @items_per_request.setter - def items_per_request(self, items_per_request): - """Sets the items_per_request of this Values. - - - :param items_per_request: The items_per_request of this Values. # noqa: E501 - :type: int - """ - - self._items_per_request = items_per_request - - @property - def rendering(self): - """Gets the rendering of this Values. # noqa: E501 - - - :return: The rendering of this Values. # noqa: E501 - :rtype: Rendering - """ - return self._rendering - - @rendering.setter - def rendering(self, rendering): - """Sets the rendering of this Values. - - - :param rendering: The rendering of this Values. # noqa: E501 - :type: Rendering - """ - - self._rendering = rendering - - @property - def session_expired_dialog(self): - """Gets the session_expired_dialog of this Values. # noqa: E501 - - - :return: The session_expired_dialog of this Values. # noqa: E501 - :rtype: SessionExpiredDialog - """ - return self._session_expired_dialog - - @session_expired_dialog.setter - def session_expired_dialog(self, session_expired_dialog): - """Sets the session_expired_dialog of this Values. - - - :param session_expired_dialog: The session_expired_dialog of this Values. # noqa: E501 - :type: SessionExpiredDialog - """ - - self._session_expired_dialog = session_expired_dialog - - @property - def login_default_location(self): - """Gets the login_default_location of this Values. # noqa: E501 - - - :return: The login_default_location of this Values. # noqa: E501 - :rtype: str - """ - return self._login_default_location - - @login_default_location.setter - def login_default_location(self, login_default_location): - """Sets the login_default_location of this Values. - - - :param login_default_location: The login_default_location of this Values. # noqa: E501 - :type: str - """ - - self._login_default_location = login_default_location - - @property - def search_group_results(self): - """Gets the search_group_results of this Values. # noqa: E501 - - - :return: The search_group_results of this Values. # noqa: E501 - :rtype: bool - """ - return self._search_group_results - - @search_group_results.setter - def search_group_results(self, search_group_results): - """Sets the search_group_results of this Values. - - - :param search_group_results: The search_group_results of this Values. # noqa: E501 - :type: bool - """ - - self._search_group_results = search_group_results - - @property - def mainnav(self): - """Gets the mainnav of this Values. # noqa: E501 - - - :return: The mainnav of this Values. # noqa: E501 - :rtype: Mainnav - """ - return self._mainnav - - @mainnav.setter - def mainnav(self, mainnav): - """Sets the mainnav of this Values. - - - :param mainnav: The mainnav of this Values. # noqa: E501 - :type: Mainnav - """ - - self._mainnav = mainnav - - @property - def search_sidenav_mode(self): - """Gets the search_sidenav_mode of this Values. # noqa: E501 - - - :return: The search_sidenav_mode of this Values. # noqa: E501 - :rtype: str - """ - return self._search_sidenav_mode - - @search_sidenav_mode.setter - def search_sidenav_mode(self, search_sidenav_mode): - """Sets the search_sidenav_mode of this Values. - - - :param search_sidenav_mode: The search_sidenav_mode of this Values. # noqa: E501 - :type: str - """ - - self._search_sidenav_mode = search_sidenav_mode - - @property - def guest(self): - """Gets the guest of this Values. # noqa: E501 - - - :return: The guest of this Values. # noqa: E501 - :rtype: Guest - """ - return self._guest - - @guest.setter - def guest(self, guest): - """Sets the guest of this Values. - - - :param guest: The guest of this Values. # noqa: E501 - :type: Guest - """ - - self._guest = guest - - @property - def collections(self): - """Gets the collections of this Values. # noqa: E501 - - - :return: The collections of this Values. # noqa: E501 - :rtype: Collections - """ - return self._collections - - @collections.setter - def collections(self, collections): - """Sets the collections of this Values. - - - :param collections: The collections of this Values. # noqa: E501 - :type: Collections - """ - - self._collections = collections - - @property - def license_agreement(self): - """Gets the license_agreement of this Values. # noqa: E501 - - - :return: The license_agreement of this Values. # noqa: E501 - :rtype: LicenseAgreement - """ - return self._license_agreement - - @license_agreement.setter - def license_agreement(self, license_agreement): - """Sets the license_agreement of this Values. - - - :param license_agreement: The license_agreement of this Values. # noqa: E501 - :type: LicenseAgreement - """ - - self._license_agreement = license_agreement - - @property - def services(self): - """Gets the services of this Values. # noqa: E501 - - - :return: The services of this Values. # noqa: E501 - :rtype: Services - """ - return self._services - - @services.setter - def services(self, services): - """Sets the services of this Values. - - - :param services: The services of this Values. # noqa: E501 - :type: Services - """ - - self._services = services - - @property - def help_menu_options(self): - """Gets the help_menu_options of this Values. # noqa: E501 - - - :return: The help_menu_options of this Values. # noqa: E501 - :rtype: list[HelpMenuOptions] - """ - return self._help_menu_options - - @help_menu_options.setter - def help_menu_options(self, help_menu_options): - """Sets the help_menu_options of this Values. - - - :param help_menu_options: The help_menu_options of this Values. # noqa: E501 - :type: list[HelpMenuOptions] - """ - - self._help_menu_options = help_menu_options - - @property - def images(self): - """Gets the images of this Values. # noqa: E501 - - - :return: The images of this Values. # noqa: E501 - :rtype: list[Image] - """ - return self._images - - @images.setter - def images(self, images): - """Sets the images of this Values. - - - :param images: The images of this Values. # noqa: E501 - :type: list[Image] - """ - - self._images = images - - @property - def stream(self): - """Gets the stream of this Values. # noqa: E501 - - - :return: The stream of this Values. # noqa: E501 - :rtype: Stream - """ - return self._stream - - @stream.setter - def stream(self, stream): - """Sets the stream of this Values. - - - :param stream: The stream of this Values. # noqa: E501 - :type: Stream - """ - - self._stream = stream - - @property - def admin(self): - """Gets the admin of this Values. # noqa: E501 - - - :return: The admin of this Values. # noqa: E501 - :rtype: Admin - """ - return self._admin - - @admin.setter - def admin(self, admin): - """Sets the admin of this Values. - - - :param admin: The admin of this Values. # noqa: E501 - :type: Admin - """ - - self._admin = admin - - @property - def simple_edit(self): - """Gets the simple_edit of this Values. # noqa: E501 - - - :return: The simple_edit of this Values. # noqa: E501 - :rtype: SimpleEdit - """ - return self._simple_edit - - @simple_edit.setter - def simple_edit(self, simple_edit): - """Sets the simple_edit of this Values. - - - :param simple_edit: The simple_edit of this Values. # noqa: E501 - :type: SimpleEdit - """ - - self._simple_edit = simple_edit - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Values, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Values): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/variables.py b/edu_sharing_client/models/variables.py deleted file mode 100644 index 7a216d5d..00000000 --- a/edu_sharing_client/models/variables.py +++ /dev/null @@ -1,137 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Variables(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - '_global': 'dict(str, str)', - 'current': 'dict(str, str)' - } - - attribute_map = { - '_global': 'global', - 'current': 'current' - } - - def __init__(self, _global=None, current=None): # noqa: E501 - """Variables - a model defined in Swagger""" # noqa: E501 - self.__global = None - self._current = None - self.discriminator = None - if _global is not None: - self._global = _global - if current is not None: - self.current = current - - @property - def _global(self): - """Gets the _global of this Variables. # noqa: E501 - - - :return: The _global of this Variables. # noqa: E501 - :rtype: dict(str, str) - """ - return self.__global - - @_global.setter - def _global(self, _global): - """Sets the _global of this Variables. - - - :param _global: The _global of this Variables. # noqa: E501 - :type: dict(str, str) - """ - - self.__global = _global - - @property - def current(self): - """Gets the current of this Variables. # noqa: E501 - - - :return: The current of this Variables. # noqa: E501 - :rtype: dict(str, str) - """ - return self._current - - @current.setter - def current(self, current): - """Sets the current of this Variables. - - - :param current: The current of this Variables. # noqa: E501 - :type: dict(str, str) - """ - - self._current = current - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Variables, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Variables): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/view_v2.py b/edu_sharing_client/models/view_v2.py deleted file mode 100644 index ca240556..00000000 --- a/edu_sharing_client/models/view_v2.py +++ /dev/null @@ -1,241 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class ViewV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'caption': 'str', - 'icon': 'str', - 'html': 'str', - 'rel': 'str', - 'hide_if_empty': 'bool' - } - - attribute_map = { - 'id': 'id', - 'caption': 'caption', - 'icon': 'icon', - 'html': 'html', - 'rel': 'rel', - 'hide_if_empty': 'hideIfEmpty' - } - - def __init__(self, id=None, caption=None, icon=None, html=None, rel=None, hide_if_empty=False): # noqa: E501 - """ViewV2 - a model defined in Swagger""" # noqa: E501 - self._id = None - self._caption = None - self._icon = None - self._html = None - self._rel = None - self._hide_if_empty = None - self.discriminator = None - if id is not None: - self.id = id - if caption is not None: - self.caption = caption - if icon is not None: - self.icon = icon - if html is not None: - self.html = html - if rel is not None: - self.rel = rel - if hide_if_empty is not None: - self.hide_if_empty = hide_if_empty - - @property - def id(self): - """Gets the id of this ViewV2. # noqa: E501 - - - :return: The id of this ViewV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this ViewV2. - - - :param id: The id of this ViewV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def caption(self): - """Gets the caption of this ViewV2. # noqa: E501 - - - :return: The caption of this ViewV2. # noqa: E501 - :rtype: str - """ - return self._caption - - @caption.setter - def caption(self, caption): - """Sets the caption of this ViewV2. - - - :param caption: The caption of this ViewV2. # noqa: E501 - :type: str - """ - - self._caption = caption - - @property - def icon(self): - """Gets the icon of this ViewV2. # noqa: E501 - - - :return: The icon of this ViewV2. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this ViewV2. - - - :param icon: The icon of this ViewV2. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def html(self): - """Gets the html of this ViewV2. # noqa: E501 - - - :return: The html of this ViewV2. # noqa: E501 - :rtype: str - """ - return self._html - - @html.setter - def html(self, html): - """Sets the html of this ViewV2. - - - :param html: The html of this ViewV2. # noqa: E501 - :type: str - """ - - self._html = html - - @property - def rel(self): - """Gets the rel of this ViewV2. # noqa: E501 - - - :return: The rel of this ViewV2. # noqa: E501 - :rtype: str - """ - return self._rel - - @rel.setter - def rel(self, rel): - """Sets the rel of this ViewV2. - - - :param rel: The rel of this ViewV2. # noqa: E501 - :type: str - """ - - self._rel = rel - - @property - def hide_if_empty(self): - """Gets the hide_if_empty of this ViewV2. # noqa: E501 - - - :return: The hide_if_empty of this ViewV2. # noqa: E501 - :rtype: bool - """ - return self._hide_if_empty - - @hide_if_empty.setter - def hide_if_empty(self, hide_if_empty): - """Sets the hide_if_empty of this ViewV2. - - - :param hide_if_empty: The hide_if_empty of this ViewV2. # noqa: E501 - :type: bool - """ - - self._hide_if_empty = hide_if_empty - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(ViewV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ViewV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/website_information.py b/edu_sharing_client/models/website_information.py deleted file mode 100644 index 28e32c56..00000000 --- a/edu_sharing_client/models/website_information.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class WebsiteInformation(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'title': 'str', - 'page': 'str', - 'description': 'str', - 'license': 'str', - 'keywords': 'list[str]' - } - - attribute_map = { - 'title': 'title', - 'page': 'page', - 'description': 'description', - 'license': 'license', - 'keywords': 'keywords' - } - - def __init__(self, title=None, page=None, description=None, license=None, keywords=None): # noqa: E501 - """WebsiteInformation - a model defined in Swagger""" # noqa: E501 - self._title = None - self._page = None - self._description = None - self._license = None - self._keywords = None - self.discriminator = None - if title is not None: - self.title = title - if page is not None: - self.page = page - if description is not None: - self.description = description - if license is not None: - self.license = license - if keywords is not None: - self.keywords = keywords - - @property - def title(self): - """Gets the title of this WebsiteInformation. # noqa: E501 - - - :return: The title of this WebsiteInformation. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this WebsiteInformation. - - - :param title: The title of this WebsiteInformation. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def page(self): - """Gets the page of this WebsiteInformation. # noqa: E501 - - - :return: The page of this WebsiteInformation. # noqa: E501 - :rtype: str - """ - return self._page - - @page.setter - def page(self, page): - """Sets the page of this WebsiteInformation. - - - :param page: The page of this WebsiteInformation. # noqa: E501 - :type: str - """ - - self._page = page - - @property - def description(self): - """Gets the description of this WebsiteInformation. # noqa: E501 - - - :return: The description of this WebsiteInformation. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this WebsiteInformation. - - - :param description: The description of this WebsiteInformation. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def license(self): - """Gets the license of this WebsiteInformation. # noqa: E501 - - - :return: The license of this WebsiteInformation. # noqa: E501 - :rtype: str - """ - return self._license - - @license.setter - def license(self, license): - """Sets the license of this WebsiteInformation. - - - :param license: The license of this WebsiteInformation. # noqa: E501 - :type: str - """ - - self._license = license - - @property - def keywords(self): - """Gets the keywords of this WebsiteInformation. # noqa: E501 - - - :return: The keywords of this WebsiteInformation. # noqa: E501 - :rtype: list[str] - """ - return self._keywords - - @keywords.setter - def keywords(self, keywords): - """Sets the keywords of this WebsiteInformation. - - - :param keywords: The keywords of this WebsiteInformation. # noqa: E501 - :type: list[str] - """ - - self._keywords = keywords - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(WebsiteInformation, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, WebsiteInformation): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/widget_v2.py b/edu_sharing_client/models/widget_v2.py deleted file mode 100644 index 63aaed28..00000000 --- a/edu_sharing_client/models/widget_v2.py +++ /dev/null @@ -1,683 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class WidgetV2(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'link': 'str', - 'subwidgets': 'list[Subwidget]', - 'condition': 'Condition', - 'id': 'str', - 'caption': 'str', - 'bottom_caption': 'str', - 'icon': 'str', - 'type': 'str', - 'template': 'str', - 'has_values': 'bool', - 'values': 'list[ValueV2]', - 'placeholder': 'str', - 'unit': 'str', - 'min': 'int', - 'max': 'int', - 'default_min': 'int', - 'default_max': 'int', - 'step': 'int', - 'allowempty': 'bool', - 'defaultvalue': 'str', - 'is_extended': 'bool', - 'is_required': 'bool', - 'is_searchable': 'bool' - } - - attribute_map = { - 'link': 'link', - 'subwidgets': 'subwidgets', - 'condition': 'condition', - 'id': 'id', - 'caption': 'caption', - 'bottom_caption': 'bottomCaption', - 'icon': 'icon', - 'type': 'type', - 'template': 'template', - 'has_values': 'hasValues', - 'values': 'values', - 'placeholder': 'placeholder', - 'unit': 'unit', - 'min': 'min', - 'max': 'max', - 'default_min': 'defaultMin', - 'default_max': 'defaultMax', - 'step': 'step', - 'allowempty': 'allowempty', - 'defaultvalue': 'defaultvalue', - 'is_extended': 'isExtended', - 'is_required': 'isRequired', - 'is_searchable': 'isSearchable' - } - - def __init__(self, link=None, subwidgets=None, condition=None, id=None, caption=None, bottom_caption=None, icon=None, type=None, template=None, has_values=False, values=None, placeholder=None, unit=None, min=None, max=None, default_min=None, default_max=None, step=None, allowempty=False, defaultvalue=None, is_extended=False, is_required=False, is_searchable=False): # noqa: E501 - """WidgetV2 - a model defined in Swagger""" # noqa: E501 - self._link = None - self._subwidgets = None - self._condition = None - self._id = None - self._caption = None - self._bottom_caption = None - self._icon = None - self._type = None - self._template = None - self._has_values = None - self._values = None - self._placeholder = None - self._unit = None - self._min = None - self._max = None - self._default_min = None - self._default_max = None - self._step = None - self._allowempty = None - self._defaultvalue = None - self._is_extended = None - self._is_required = None - self._is_searchable = None - self.discriminator = None - if link is not None: - self.link = link - if subwidgets is not None: - self.subwidgets = subwidgets - if condition is not None: - self.condition = condition - if id is not None: - self.id = id - if caption is not None: - self.caption = caption - if bottom_caption is not None: - self.bottom_caption = bottom_caption - if icon is not None: - self.icon = icon - if type is not None: - self.type = type - if template is not None: - self.template = template - if has_values is not None: - self.has_values = has_values - if values is not None: - self.values = values - if placeholder is not None: - self.placeholder = placeholder - if unit is not None: - self.unit = unit - if min is not None: - self.min = min - if max is not None: - self.max = max - if default_min is not None: - self.default_min = default_min - if default_max is not None: - self.default_max = default_max - if step is not None: - self.step = step - if allowempty is not None: - self.allowempty = allowempty - if defaultvalue is not None: - self.defaultvalue = defaultvalue - if is_extended is not None: - self.is_extended = is_extended - if is_required is not None: - self.is_required = is_required - if is_searchable is not None: - self.is_searchable = is_searchable - - @property - def link(self): - """Gets the link of this WidgetV2. # noqa: E501 - - - :return: The link of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._link - - @link.setter - def link(self, link): - """Sets the link of this WidgetV2. - - - :param link: The link of this WidgetV2. # noqa: E501 - :type: str - """ - - self._link = link - - @property - def subwidgets(self): - """Gets the subwidgets of this WidgetV2. # noqa: E501 - - - :return: The subwidgets of this WidgetV2. # noqa: E501 - :rtype: list[Subwidget] - """ - return self._subwidgets - - @subwidgets.setter - def subwidgets(self, subwidgets): - """Sets the subwidgets of this WidgetV2. - - - :param subwidgets: The subwidgets of this WidgetV2. # noqa: E501 - :type: list[Subwidget] - """ - - self._subwidgets = subwidgets - - @property - def condition(self): - """Gets the condition of this WidgetV2. # noqa: E501 - - - :return: The condition of this WidgetV2. # noqa: E501 - :rtype: Condition - """ - return self._condition - - @condition.setter - def condition(self, condition): - """Sets the condition of this WidgetV2. - - - :param condition: The condition of this WidgetV2. # noqa: E501 - :type: Condition - """ - - self._condition = condition - - @property - def id(self): - """Gets the id of this WidgetV2. # noqa: E501 - - - :return: The id of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this WidgetV2. - - - :param id: The id of this WidgetV2. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def caption(self): - """Gets the caption of this WidgetV2. # noqa: E501 - - - :return: The caption of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._caption - - @caption.setter - def caption(self, caption): - """Sets the caption of this WidgetV2. - - - :param caption: The caption of this WidgetV2. # noqa: E501 - :type: str - """ - - self._caption = caption - - @property - def bottom_caption(self): - """Gets the bottom_caption of this WidgetV2. # noqa: E501 - - - :return: The bottom_caption of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._bottom_caption - - @bottom_caption.setter - def bottom_caption(self, bottom_caption): - """Sets the bottom_caption of this WidgetV2. - - - :param bottom_caption: The bottom_caption of this WidgetV2. # noqa: E501 - :type: str - """ - - self._bottom_caption = bottom_caption - - @property - def icon(self): - """Gets the icon of this WidgetV2. # noqa: E501 - - - :return: The icon of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._icon - - @icon.setter - def icon(self, icon): - """Sets the icon of this WidgetV2. - - - :param icon: The icon of this WidgetV2. # noqa: E501 - :type: str - """ - - self._icon = icon - - @property - def type(self): - """Gets the type of this WidgetV2. # noqa: E501 - - - :return: The type of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._type - - @type.setter - def type(self, type): - """Sets the type of this WidgetV2. - - - :param type: The type of this WidgetV2. # noqa: E501 - :type: str - """ - - self._type = type - - @property - def template(self): - """Gets the template of this WidgetV2. # noqa: E501 - - - :return: The template of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._template - - @template.setter - def template(self, template): - """Sets the template of this WidgetV2. - - - :param template: The template of this WidgetV2. # noqa: E501 - :type: str - """ - - self._template = template - - @property - def has_values(self): - """Gets the has_values of this WidgetV2. # noqa: E501 - - - :return: The has_values of this WidgetV2. # noqa: E501 - :rtype: bool - """ - return self._has_values - - @has_values.setter - def has_values(self, has_values): - """Sets the has_values of this WidgetV2. - - - :param has_values: The has_values of this WidgetV2. # noqa: E501 - :type: bool - """ - - self._has_values = has_values - - @property - def values(self): - """Gets the values of this WidgetV2. # noqa: E501 - - - :return: The values of this WidgetV2. # noqa: E501 - :rtype: list[ValueV2] - """ - return self._values - - @values.setter - def values(self, values): - """Sets the values of this WidgetV2. - - - :param values: The values of this WidgetV2. # noqa: E501 - :type: list[ValueV2] - """ - - self._values = values - - @property - def placeholder(self): - """Gets the placeholder of this WidgetV2. # noqa: E501 - - - :return: The placeholder of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._placeholder - - @placeholder.setter - def placeholder(self, placeholder): - """Sets the placeholder of this WidgetV2. - - - :param placeholder: The placeholder of this WidgetV2. # noqa: E501 - :type: str - """ - - self._placeholder = placeholder - - @property - def unit(self): - """Gets the unit of this WidgetV2. # noqa: E501 - - - :return: The unit of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._unit - - @unit.setter - def unit(self, unit): - """Sets the unit of this WidgetV2. - - - :param unit: The unit of this WidgetV2. # noqa: E501 - :type: str - """ - - self._unit = unit - - @property - def min(self): - """Gets the min of this WidgetV2. # noqa: E501 - - - :return: The min of this WidgetV2. # noqa: E501 - :rtype: int - """ - return self._min - - @min.setter - def min(self, min): - """Sets the min of this WidgetV2. - - - :param min: The min of this WidgetV2. # noqa: E501 - :type: int - """ - - self._min = min - - @property - def max(self): - """Gets the max of this WidgetV2. # noqa: E501 - - - :return: The max of this WidgetV2. # noqa: E501 - :rtype: int - """ - return self._max - - @max.setter - def max(self, max): - """Sets the max of this WidgetV2. - - - :param max: The max of this WidgetV2. # noqa: E501 - :type: int - """ - - self._max = max - - @property - def default_min(self): - """Gets the default_min of this WidgetV2. # noqa: E501 - - - :return: The default_min of this WidgetV2. # noqa: E501 - :rtype: int - """ - return self._default_min - - @default_min.setter - def default_min(self, default_min): - """Sets the default_min of this WidgetV2. - - - :param default_min: The default_min of this WidgetV2. # noqa: E501 - :type: int - """ - - self._default_min = default_min - - @property - def default_max(self): - """Gets the default_max of this WidgetV2. # noqa: E501 - - - :return: The default_max of this WidgetV2. # noqa: E501 - :rtype: int - """ - return self._default_max - - @default_max.setter - def default_max(self, default_max): - """Sets the default_max of this WidgetV2. - - - :param default_max: The default_max of this WidgetV2. # noqa: E501 - :type: int - """ - - self._default_max = default_max - - @property - def step(self): - """Gets the step of this WidgetV2. # noqa: E501 - - - :return: The step of this WidgetV2. # noqa: E501 - :rtype: int - """ - return self._step - - @step.setter - def step(self, step): - """Sets the step of this WidgetV2. - - - :param step: The step of this WidgetV2. # noqa: E501 - :type: int - """ - - self._step = step - - @property - def allowempty(self): - """Gets the allowempty of this WidgetV2. # noqa: E501 - - - :return: The allowempty of this WidgetV2. # noqa: E501 - :rtype: bool - """ - return self._allowempty - - @allowempty.setter - def allowempty(self, allowempty): - """Sets the allowempty of this WidgetV2. - - - :param allowempty: The allowempty of this WidgetV2. # noqa: E501 - :type: bool - """ - - self._allowempty = allowempty - - @property - def defaultvalue(self): - """Gets the defaultvalue of this WidgetV2. # noqa: E501 - - - :return: The defaultvalue of this WidgetV2. # noqa: E501 - :rtype: str - """ - return self._defaultvalue - - @defaultvalue.setter - def defaultvalue(self, defaultvalue): - """Sets the defaultvalue of this WidgetV2. - - - :param defaultvalue: The defaultvalue of this WidgetV2. # noqa: E501 - :type: str - """ - - self._defaultvalue = defaultvalue - - @property - def is_extended(self): - """Gets the is_extended of this WidgetV2. # noqa: E501 - - - :return: The is_extended of this WidgetV2. # noqa: E501 - :rtype: bool - """ - return self._is_extended - - @is_extended.setter - def is_extended(self, is_extended): - """Sets the is_extended of this WidgetV2. - - - :param is_extended: The is_extended of this WidgetV2. # noqa: E501 - :type: bool - """ - - self._is_extended = is_extended - - @property - def is_required(self): - """Gets the is_required of this WidgetV2. # noqa: E501 - - - :return: The is_required of this WidgetV2. # noqa: E501 - :rtype: bool - """ - return self._is_required - - @is_required.setter - def is_required(self, is_required): - """Sets the is_required of this WidgetV2. - - - :param is_required: The is_required of this WidgetV2. # noqa: E501 - :type: bool - """ - - self._is_required = is_required - - @property - def is_searchable(self): - """Gets the is_searchable of this WidgetV2. # noqa: E501 - - - :return: The is_searchable of this WidgetV2. # noqa: E501 - :rtype: bool - """ - return self._is_searchable - - @is_searchable.setter - def is_searchable(self, is_searchable): - """Sets the is_searchable of this WidgetV2. - - - :param is_searchable: The is_searchable of this WidgetV2. # noqa: E501 - :type: bool - """ - - self._is_searchable = is_searchable - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(WidgetV2, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, WidgetV2): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/workflow.py b/edu_sharing_client/models/workflow.py deleted file mode 100644 index e6cc83fd..00000000 --- a/edu_sharing_client/models/workflow.py +++ /dev/null @@ -1,189 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class Workflow(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'str', - 'color': 'str', - 'has_receiver': 'bool', - 'next': 'list[str]' - } - - attribute_map = { - 'id': 'id', - 'color': 'color', - 'has_receiver': 'hasReceiver', - 'next': 'next' - } - - def __init__(self, id=None, color=None, has_receiver=False, next=None): # noqa: E501 - """Workflow - a model defined in Swagger""" # noqa: E501 - self._id = None - self._color = None - self._has_receiver = None - self._next = None - self.discriminator = None - if id is not None: - self.id = id - if color is not None: - self.color = color - if has_receiver is not None: - self.has_receiver = has_receiver - if next is not None: - self.next = next - - @property - def id(self): - """Gets the id of this Workflow. # noqa: E501 - - - :return: The id of this Workflow. # noqa: E501 - :rtype: str - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this Workflow. - - - :param id: The id of this Workflow. # noqa: E501 - :type: str - """ - - self._id = id - - @property - def color(self): - """Gets the color of this Workflow. # noqa: E501 - - - :return: The color of this Workflow. # noqa: E501 - :rtype: str - """ - return self._color - - @color.setter - def color(self, color): - """Sets the color of this Workflow. - - - :param color: The color of this Workflow. # noqa: E501 - :type: str - """ - - self._color = color - - @property - def has_receiver(self): - """Gets the has_receiver of this Workflow. # noqa: E501 - - - :return: The has_receiver of this Workflow. # noqa: E501 - :rtype: bool - """ - return self._has_receiver - - @has_receiver.setter - def has_receiver(self, has_receiver): - """Sets the has_receiver of this Workflow. - - - :param has_receiver: The has_receiver of this Workflow. # noqa: E501 - :type: bool - """ - - self._has_receiver = has_receiver - - @property - def next(self): - """Gets the next of this Workflow. # noqa: E501 - - - :return: The next of this Workflow. # noqa: E501 - :rtype: list[str] - """ - return self._next - - @next.setter - def next(self, next): - """Sets the next of this Workflow. - - - :param next: The next of this Workflow. # noqa: E501 - :type: list[str] - """ - - self._next = next - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(Workflow, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Workflow): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/models/workflow_history.py b/edu_sharing_client/models/workflow_history.py deleted file mode 100644 index 76285cc9..00000000 --- a/edu_sharing_client/models/workflow_history.py +++ /dev/null @@ -1,215 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -import pprint -import re # noqa: F401 - -import six - - -class WorkflowHistory(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'time': 'int', - 'editor': 'UserSimple', - 'receiver': 'list[Authority]', - 'status': 'str', - 'comment': 'str' - } - - attribute_map = { - 'time': 'time', - 'editor': 'editor', - 'receiver': 'receiver', - 'status': 'status', - 'comment': 'comment' - } - - def __init__(self, time=None, editor=None, receiver=None, status=None, comment=None): # noqa: E501 - """WorkflowHistory - a model defined in Swagger""" # noqa: E501 - self._time = None - self._editor = None - self._receiver = None - self._status = None - self._comment = None - self.discriminator = None - if time is not None: - self.time = time - if editor is not None: - self.editor = editor - if receiver is not None: - self.receiver = receiver - if status is not None: - self.status = status - if comment is not None: - self.comment = comment - - @property - def time(self): - """Gets the time of this WorkflowHistory. # noqa: E501 - - - :return: The time of this WorkflowHistory. # noqa: E501 - :rtype: int - """ - return self._time - - @time.setter - def time(self, time): - """Sets the time of this WorkflowHistory. - - - :param time: The time of this WorkflowHistory. # noqa: E501 - :type: int - """ - - self._time = time - - @property - def editor(self): - """Gets the editor of this WorkflowHistory. # noqa: E501 - - - :return: The editor of this WorkflowHistory. # noqa: E501 - :rtype: UserSimple - """ - return self._editor - - @editor.setter - def editor(self, editor): - """Sets the editor of this WorkflowHistory. - - - :param editor: The editor of this WorkflowHistory. # noqa: E501 - :type: UserSimple - """ - - self._editor = editor - - @property - def receiver(self): - """Gets the receiver of this WorkflowHistory. # noqa: E501 - - - :return: The receiver of this WorkflowHistory. # noqa: E501 - :rtype: list[Authority] - """ - return self._receiver - - @receiver.setter - def receiver(self, receiver): - """Sets the receiver of this WorkflowHistory. - - - :param receiver: The receiver of this WorkflowHistory. # noqa: E501 - :type: list[Authority] - """ - - self._receiver = receiver - - @property - def status(self): - """Gets the status of this WorkflowHistory. # noqa: E501 - - - :return: The status of this WorkflowHistory. # noqa: E501 - :rtype: str - """ - return self._status - - @status.setter - def status(self, status): - """Sets the status of this WorkflowHistory. - - - :param status: The status of this WorkflowHistory. # noqa: E501 - :type: str - """ - - self._status = status - - @property - def comment(self): - """Gets the comment of this WorkflowHistory. # noqa: E501 - - - :return: The comment of this WorkflowHistory. # noqa: E501 - :rtype: str - """ - return self._comment - - @comment.setter - def comment(self, comment): - """Sets the comment of this WorkflowHistory. - - - :param comment: The comment of this WorkflowHistory. # noqa: E501 - :type: str - """ - - self._comment = comment - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - if issubclass(WorkflowHistory, dict): - for key, value in self.items(): - result[key] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, WorkflowHistory): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/edu_sharing_client/rest.py b/edu_sharing_client/rest.py deleted file mode 100644 index fc68c702..00000000 --- a/edu_sharing_client/rest.py +++ /dev/null @@ -1,322 +0,0 @@ -# coding: utf-8 - -""" - edu-sharing Repository REST API - - The public restful API of the edu-sharing repository. # noqa: E501 - - OpenAPI spec version: 1.1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import io -import json -import logging -import re -import ssl - -import certifi -# python 2 and python 3 compatibility library -import six -from six.moves.urllib.parse import urlencode - -try: - import urllib3 -except ImportError: - raise ImportError('Swagger python client requires urllib3.') - - -logger = logging.getLogger(__name__) - - -class RESTResponse(io.IOBase): - - def __init__(self, resp): - self.urllib3_response = resp - self.status = resp.status - self.reason = resp.reason - self.data = resp.data - - def getheaders(self): - """Returns a dictionary of the response headers.""" - return self.urllib3_response.getheaders() - - def getheader(self, name, default=None): - """Returns a given response header.""" - return self.urllib3_response.getheader(name, default) - - -class RESTClientObject(object): - - def __init__(self, configuration, pools_size=4, maxsize=None): - # urllib3.PoolManager will pass all kw parameters to connectionpool - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 - # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 - # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 - - # cert_reqs - if configuration.verify_ssl: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - - # ca_certs - if configuration.ssl_ca_cert: - ca_certs = configuration.ssl_ca_cert - else: - # if not set certificate file, use Mozilla's root certificates. - ca_certs = certifi.where() - - addition_pool_args = {} - if configuration.assert_hostname is not None: - addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 - - if maxsize is None: - if configuration.connection_pool_maxsize is not None: - maxsize = configuration.connection_pool_maxsize - else: - maxsize = 4 - - # https pool manager - if configuration.proxy: - self.pool_manager = urllib3.ProxyManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=ca_certs, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - proxy_url=configuration.proxy, - **addition_pool_args - ) - else: - self.pool_manager = urllib3.PoolManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=ca_certs, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - **addition_pool_args - ) - - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None, _preload_content=True, - _request_timeout=None): - """Perform requests. - - :param method: http request method - :param url: http request url - :param query_params: query parameters in the url - :param headers: http request headers - :param body: request json body, for `application/json` - :param post_params: request post parameters, - `application/x-www-form-urlencoded` - and `multipart/form-data` - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - """ - method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', - 'PATCH', 'OPTIONS'] - - if post_params and body: - raise ValueError( - "body parameter cannot be used with post_params parameter." - ) - - post_params = post_params or {} - headers = headers or {} - - timeout = None - if _request_timeout: - if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 - timeout = urllib3.Timeout(total=_request_timeout) - elif (isinstance(_request_timeout, tuple) and - len(_request_timeout) == 2): - timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1]) - - if 'Content-Type' not in headers: - headers['Content-Type'] = 'application/json' - - try: - # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` - if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: - if query_params: - url += '?' + urlencode(query_params) - if re.search('json', headers['Content-Type'], re.IGNORECASE): - request_body = '{}' - if body is not None: - request_body = json.dumps(body) - r = self.pool_manager.request( - method, url, - body=request_body, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 - r = self.pool_manager.request( - method, url, - fields=post_params, - encode_multipart=False, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'multipart/form-data': - # must del headers['Content-Type'], or the correct - # Content-Type which generated by urllib3 will be - # overwritten. - del headers['Content-Type'] - r = self.pool_manager.request( - method, url, - fields=post_params, - encode_multipart=True, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - # Pass a `string` parameter directly in the body to support - # other content types than Json when `body` argument is - # provided in serialized form - elif isinstance(body, str): - request_body = body - r = self.pool_manager.request( - method, url, - body=request_body, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - else: - # Cannot generate the request from given parameters - msg = """Cannot prepare a request message for provided - arguments. Please check that your arguments match - declared content type.""" - raise ApiException(status=0, reason=msg) - # For `GET`, `HEAD` - else: - r = self.pool_manager.request(method, url, - fields=query_params, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - except urllib3.exceptions.SSLError as e: - msg = "{0}\n{1}".format(type(e).__name__, str(e)) - raise ApiException(status=0, reason=msg) - - if _preload_content: - r = RESTResponse(r) - - # In the python 3, the response.data is bytes. - # we need to decode it to string. - if six.PY3: - r.data = r.data.decode('utf8') - - # log response body - logger.debug("response body: %s", r.data) - - if not 200 <= r.status <= 299: - raise ApiException(http_resp=r) - - return r - - def GET(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("GET", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def HEAD(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("HEAD", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def OPTIONS(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("OPTIONS", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def DELETE(self, url, headers=None, query_params=None, body=None, - _preload_content=True, _request_timeout=None): - return self.request("DELETE", url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def POST(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("POST", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PUT(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PUT", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PATCH(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PATCH", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - -class ApiException(Exception): - - def __init__(self, status=None, reason=None, http_resp=None): - if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data - self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None - - def __str__(self): - """Custom error messages for exception""" - error_message = "({0})\n"\ - "Reason: {1}\n".format(self.status, self.reason) - if self.headers: - error_message += "HTTP response headers: {0}\n".format( - self.headers) - - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) - - return error_message From 86c6c701637efdcf15b88e5e2c7a80e2d3f4dd77 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:46:07 +0200 Subject: [PATCH 538/590] add "openapi-generator-cli"-generated API client for edu-sharing v9.x --- .../.github/workflows/python.yml | 38 + edu_sharing_openapi/.gitignore | 66 + edu_sharing_openapi/.gitlab-ci.yml | 31 + edu_sharing_openapi/.openapi-generator-ignore | 23 + edu_sharing_openapi/.openapi-generator/FILES | 947 + .../.openapi-generator/VERSION | 1 + edu_sharing_openapi/.travis.yml | 17 + edu_sharing_openapi/README.md | 672 + edu_sharing_openapi/docs/ABOUTApi.md | 216 + edu_sharing_openapi/docs/ACE.md | 33 + edu_sharing_openapi/docs/ACL.md | 30 + edu_sharing_openapi/docs/ADMINV1Api.md | 3955 ++++ edu_sharing_openapi/docs/ARCHIVEV1Api.md | 335 + .../docs/AUTHENTICATIONV1Api.md | 346 + edu_sharing_openapi/docs/About.md | 34 + edu_sharing_openapi/docs/AboutService.md | 30 + edu_sharing_openapi/docs/AbstractEntries.md | 30 + .../docs/AddToCollectionEventDTO.md | 30 + edu_sharing_openapi/docs/Admin.md | 30 + edu_sharing_openapi/docs/AdminStatistics.md | 34 + edu_sharing_openapi/docs/Application.md | 39 + edu_sharing_openapi/docs/Audience.md | 29 + .../docs/AuthenticationToken.md | 30 + edu_sharing_openapi/docs/Authority.md | 32 + edu_sharing_openapi/docs/AuthorityEntries.md | 30 + edu_sharing_openapi/docs/AvailableMds.md | 30 + edu_sharing_openapi/docs/BULKV1Api.md | 172 + edu_sharing_openapi/docs/Banner.md | 31 + edu_sharing_openapi/docs/CLIENTUTILSV1Api.md | 80 + edu_sharing_openapi/docs/COLLECTIONV1Api.md | 1191 ++ edu_sharing_openapi/docs/COMMENTV1Api.md | 309 + edu_sharing_openapi/docs/CONFIGV1Api.md | 435 + edu_sharing_openapi/docs/CONNECTORV1Api.md | 80 + edu_sharing_openapi/docs/CacheCluster.md | 37 + edu_sharing_openapi/docs/CacheInfo.md | 41 + edu_sharing_openapi/docs/CacheMember.md | 29 + edu_sharing_openapi/docs/Catalog.md | 30 + edu_sharing_openapi/docs/Collection.md | 45 + edu_sharing_openapi/docs/CollectionCounts.md | 30 + edu_sharing_openapi/docs/CollectionDTO.md | 31 + edu_sharing_openapi/docs/CollectionEntries.md | 30 + edu_sharing_openapi/docs/CollectionEntry.md | 29 + edu_sharing_openapi/docs/CollectionOptions.md | 30 + .../docs/CollectionProposalEntries.md | 30 + .../docs/CollectionReference.md | 64 + edu_sharing_openapi/docs/Collections.md | 29 + edu_sharing_openapi/docs/CollectionsResult.md | 29 + edu_sharing_openapi/docs/Comment.md | 33 + edu_sharing_openapi/docs/CommentEventDTO.md | 32 + edu_sharing_openapi/docs/Comments.md | 29 + edu_sharing_openapi/docs/Condition.md | 31 + edu_sharing_openapi/docs/Config.md | 31 + edu_sharing_openapi/docs/ConfigFrontpage.md | 29 + edu_sharing_openapi/docs/ConfigPrivacy.md | 29 + edu_sharing_openapi/docs/ConfigPublish.md | 30 + edu_sharing_openapi/docs/ConfigRating.md | 29 + edu_sharing_openapi/docs/ConfigRemote.md | 29 + edu_sharing_openapi/docs/ConfigThemeColor.md | 30 + edu_sharing_openapi/docs/ConfigThemeColors.md | 29 + edu_sharing_openapi/docs/ConfigTutorial.md | 29 + edu_sharing_openapi/docs/ConfigUpload.md | 29 + edu_sharing_openapi/docs/ConfigWorkflow.md | 32 + .../docs/ConfigWorkflowList.md | 32 + edu_sharing_openapi/docs/Connector.md | 35 + edu_sharing_openapi/docs/ConnectorFileType.md | 36 + edu_sharing_openapi/docs/ConnectorList.md | 30 + edu_sharing_openapi/docs/Content.md | 31 + edu_sharing_openapi/docs/ContextMenuEntry.md | 48 + edu_sharing_openapi/docs/Contributor.md | 34 + edu_sharing_openapi/docs/Counts.md | 29 + edu_sharing_openapi/docs/Create.md | 29 + edu_sharing_openapi/docs/CreateUsage.md | 33 + edu_sharing_openapi/docs/DeleteOption.md | 29 + edu_sharing_openapi/docs/DynamicConfig.md | 30 + .../docs/DynamicRegistrationToken.md | 34 + .../docs/DynamicRegistrationTokens.md | 29 + edu_sharing_openapi/docs/Element.md | 31 + edu_sharing_openapi/docs/ErrorResponse.md | 34 + edu_sharing_openapi/docs/ExcelResult.md | 29 + edu_sharing_openapi/docs/FEEDBACKV1Api.md | 162 + edu_sharing_openapi/docs/Facet.md | 31 + edu_sharing_openapi/docs/FeatureInfo.md | 29 + edu_sharing_openapi/docs/FeedbackData.md | 32 + edu_sharing_openapi/docs/FeedbackResult.md | 30 + edu_sharing_openapi/docs/Filter.md | 29 + edu_sharing_openapi/docs/FilterEntry.md | 30 + edu_sharing_openapi/docs/FontIcon.md | 31 + edu_sharing_openapi/docs/Frontpage.md | 35 + edu_sharing_openapi/docs/General.md | 31 + edu_sharing_openapi/docs/Geo.md | 31 + edu_sharing_openapi/docs/Group.md | 38 + edu_sharing_openapi/docs/GroupEntries.md | 30 + edu_sharing_openapi/docs/GroupEntry.md | 29 + edu_sharing_openapi/docs/GroupProfile.md | 32 + .../docs/GroupSignupDetails.md | 30 + edu_sharing_openapi/docs/Guest.md | 29 + edu_sharing_openapi/docs/HandleParam.md | 30 + edu_sharing_openapi/docs/HelpMenuOptions.md | 31 + edu_sharing_openapi/docs/HomeFolderOptions.md | 32 + edu_sharing_openapi/docs/IAMV1Api.md | 2659 +++ edu_sharing_openapi/docs/Icon.md | 29 + edu_sharing_openapi/docs/Image.md | 30 + edu_sharing_openapi/docs/Interface.md | 34 + edu_sharing_openapi/docs/InviteEventDTO.md | 33 + edu_sharing_openapi/docs/JSONObject.md | 29 + edu_sharing_openapi/docs/Job.md | 30 + edu_sharing_openapi/docs/JobBuilder.md | 29 + edu_sharing_openapi/docs/JobDataMap.md | 33 + edu_sharing_openapi/docs/JobDescription.md | 32 + edu_sharing_openapi/docs/JobDetail.md | 35 + .../docs/JobDetailJobDataMap.md | 33 + edu_sharing_openapi/docs/JobEntry.md | 29 + .../docs/JobFieldDescription.md | 34 + edu_sharing_openapi/docs/JobInfo.md | 37 + edu_sharing_openapi/docs/JobKey.md | 30 + edu_sharing_openapi/docs/KNOWLEDGEV1Api.md | 154 + edu_sharing_openapi/docs/KeyValuePair.md | 30 + .../docs/LTIPlatformConfiguration.md | 32 + edu_sharing_openapi/docs/LTIPlatformV13Api.md | 1121 ++ edu_sharing_openapi/docs/LTISession.md | 36 + .../docs/LTIToolConfiguration.md | 34 + edu_sharing_openapi/docs/LTIV13Api.md | 923 + edu_sharing_openapi/docs/Language.md | 31 + edu_sharing_openapi/docs/Level.md | 30 + edu_sharing_openapi/docs/License.md | 30 + edu_sharing_openapi/docs/LicenseAgreement.md | 29 + .../docs/LicenseAgreementNode.md | 30 + edu_sharing_openapi/docs/Licenses.md | 30 + edu_sharing_openapi/docs/Location.md | 29 + edu_sharing_openapi/docs/LogEntry.md | 32 + .../docs/LoggerConfigResult.md | 32 + edu_sharing_openapi/docs/Login.md | 39 + edu_sharing_openapi/docs/LoginCredentials.md | 31 + edu_sharing_openapi/docs/LogoutInfo.md | 32 + edu_sharing_openapi/docs/MDSV1Api.md | 403 + edu_sharing_openapi/docs/MEDIACENTERV1Api.md | 942 + edu_sharing_openapi/docs/Mainnav.md | 30 + .../docs/ManualRegistrationData.md | 39 + .../docs/McOrgConnectResult.md | 29 + edu_sharing_openapi/docs/Mds.md | 35 + edu_sharing_openapi/docs/MdsColumn.md | 31 + edu_sharing_openapi/docs/MdsEntries.md | 29 + edu_sharing_openapi/docs/MdsGroup.md | 31 + edu_sharing_openapi/docs/MdsList.md | 30 + edu_sharing_openapi/docs/MdsQueryCriteria.md | 30 + edu_sharing_openapi/docs/MdsSort.md | 31 + edu_sharing_openapi/docs/MdsSortColumn.md | 30 + edu_sharing_openapi/docs/MdsSortDefault.md | 30 + edu_sharing_openapi/docs/MdsSubwidget.md | 29 + edu_sharing_openapi/docs/MdsValue.md | 34 + edu_sharing_openapi/docs/MdsView.md | 35 + edu_sharing_openapi/docs/MdsWidget.md | 62 + .../docs/MdsWidgetCondition.md | 33 + edu_sharing_openapi/docs/Mediacenter.md | 39 + .../docs/MediacenterProfileExtension.md | 34 + .../docs/MediacentersImportResult.md | 29 + edu_sharing_openapi/docs/MenuEntry.md | 40 + edu_sharing_openapi/docs/Message.md | 30 + edu_sharing_openapi/docs/MetadataSetInfo.md | 30 + .../docs/MetadataSuggestionEventDTO.md | 34 + edu_sharing_openapi/docs/NETWORKV1Api.md | 373 + edu_sharing_openapi/docs/NODEV1Api.md | 3724 ++++ edu_sharing_openapi/docs/NOTIFICATIONV1Api.md | 412 + edu_sharing_openapi/docs/Node.md | 61 + .../docs/NodeCollectionProposalCount.md | 63 + edu_sharing_openapi/docs/NodeData.md | 30 + edu_sharing_openapi/docs/NodeDataDTO.md | 31 + edu_sharing_openapi/docs/NodeEntries.md | 30 + edu_sharing_openapi/docs/NodeEntry.md | 29 + edu_sharing_openapi/docs/NodeIssueEventDTO.md | 31 + edu_sharing_openapi/docs/NodeLTIDeepLink.md | 30 + edu_sharing_openapi/docs/NodeLocked.md | 29 + .../docs/NodePermissionEntry.md | 29 + edu_sharing_openapi/docs/NodePermissions.md | 30 + edu_sharing_openapi/docs/NodeRef.md | 32 + edu_sharing_openapi/docs/NodeRelation.md | 30 + edu_sharing_openapi/docs/NodeRemote.md | 30 + edu_sharing_openapi/docs/NodeShare.md | 36 + edu_sharing_openapi/docs/NodeStats.md | 29 + edu_sharing_openapi/docs/NodeText.md | 31 + edu_sharing_openapi/docs/NodeVersion.md | 34 + .../docs/NodeVersionEntries.md | 29 + edu_sharing_openapi/docs/NodeVersionEntry.md | 29 + edu_sharing_openapi/docs/NodeVersionRef.md | 31 + .../docs/NodeVersionRefEntries.md | 29 + .../docs/NotificationConfig.md | 31 + .../docs/NotificationEventDTO.md | 34 + .../docs/NotificationIntervals.md | 36 + .../docs/NotificationResponsePage.md | 39 + edu_sharing_openapi/docs/NotifyEntry.md | 32 + edu_sharing_openapi/docs/ORGANIZATIONV1Api.md | 397 + .../docs/OpenIdConfiguration.md | 41 + .../docs/OpenIdRegistrationResult.md | 40 + .../docs/OrganisationsImportResult.md | 29 + edu_sharing_openapi/docs/Organization.md | 39 + .../docs/OrganizationEntries.md | 31 + edu_sharing_openapi/docs/Pageable.md | 34 + edu_sharing_openapi/docs/Pagination.md | 31 + edu_sharing_openapi/docs/Parameters.md | 29 + edu_sharing_openapi/docs/ParentEntries.md | 31 + edu_sharing_openapi/docs/Person.md | 32 + .../docs/PersonDeleteOptions.md | 39 + .../docs/PersonDeleteResult.md | 37 + edu_sharing_openapi/docs/PersonReport.md | 30 + edu_sharing_openapi/docs/PluginInfo.md | 29 + edu_sharing_openapi/docs/PluginStatus.md | 31 + edu_sharing_openapi/docs/Preferences.md | 29 + edu_sharing_openapi/docs/Preview.md | 36 + edu_sharing_openapi/docs/Profile.md | 33 + edu_sharing_openapi/docs/ProfileSettings.md | 29 + .../docs/ProposeForCollectionEventDTO.md | 30 + edu_sharing_openapi/docs/Provider.md | 33 + edu_sharing_openapi/docs/Query.md | 30 + edu_sharing_openapi/docs/RATINGV1Api.md | 310 + edu_sharing_openapi/docs/REGISTERV1Api.md | 406 + edu_sharing_openapi/docs/RELATIONV1Api.md | 238 + edu_sharing_openapi/docs/RENDERINGV1Api.md | 170 + edu_sharing_openapi/docs/RatingData.md | 31 + edu_sharing_openapi/docs/RatingDetails.md | 31 + edu_sharing_openapi/docs/RatingEventDTO.md | 32 + edu_sharing_openapi/docs/RatingHistory.md | 31 + edu_sharing_openapi/docs/ReferenceEntries.md | 30 + edu_sharing_openapi/docs/Register.md | 33 + edu_sharing_openapi/docs/RegisterExists.md | 29 + .../docs/RegisterInformation.md | 36 + edu_sharing_openapi/docs/RegistrationUrl.md | 29 + edu_sharing_openapi/docs/RelationData.md | 32 + edu_sharing_openapi/docs/Remote.md | 30 + .../docs/RemoteAuthDescription.md | 30 + edu_sharing_openapi/docs/Rendering.md | 32 + .../docs/RenderingDetailsEntry.md | 31 + edu_sharing_openapi/docs/RenderingGdpr.md | 31 + edu_sharing_openapi/docs/Repo.md | 35 + edu_sharing_openapi/docs/RepoEntries.md | 29 + edu_sharing_openapi/docs/RepositoryConfig.md | 29 + .../docs/RepositoryVersionInfo.md | 32 + edu_sharing_openapi/docs/RestoreResult.md | 34 + edu_sharing_openapi/docs/RestoreResults.md | 29 + edu_sharing_openapi/docs/SEARCHV1Api.md | 860 + edu_sharing_openapi/docs/SHARINGV1Api.md | 174 + edu_sharing_openapi/docs/STATISTICV1Api.md | 478 + edu_sharing_openapi/docs/STREAMV1Api.md | 464 + edu_sharing_openapi/docs/SearchParameters.md | 38 + .../docs/SearchParametersFacets.md | 33 + edu_sharing_openapi/docs/SearchResult.md | 31 + .../docs/SearchResultElastic.md | 34 + edu_sharing_openapi/docs/SearchResultLrmi.md | 33 + edu_sharing_openapi/docs/SearchResultNode.md | 33 + edu_sharing_openapi/docs/SearchVCard.md | 29 + edu_sharing_openapi/docs/ServerUpdateInfo.md | 34 + edu_sharing_openapi/docs/Service.md | 41 + edu_sharing_openapi/docs/ServiceInstance.md | 30 + edu_sharing_openapi/docs/ServiceVersion.md | 32 + edu_sharing_openapi/docs/Services.md | 29 + .../docs/SharedFolderOptions.md | 32 + edu_sharing_openapi/docs/SharingInfo.md | 33 + edu_sharing_openapi/docs/SimpleEdit.md | 32 + .../docs/SimpleEditGlobalGroups.md | 30 + .../docs/SimpleEditOrganization.md | 29 + edu_sharing_openapi/docs/Sort.md | 31 + edu_sharing_openapi/docs/StatisticEntity.md | 30 + edu_sharing_openapi/docs/StatisticEntry.md | 30 + edu_sharing_openapi/docs/Statistics.md | 29 + edu_sharing_openapi/docs/StatisticsGlobal.md | 31 + edu_sharing_openapi/docs/StatisticsGroup.md | 30 + .../docs/StatisticsKeyGroup.md | 32 + .../docs/StatisticsSubGroup.md | 30 + edu_sharing_openapi/docs/StatisticsUser.md | 29 + edu_sharing_openapi/docs/StoredService.md | 42 + edu_sharing_openapi/docs/Stream.md | 29 + edu_sharing_openapi/docs/StreamEntry.md | 36 + edu_sharing_openapi/docs/StreamEntryInput.md | 34 + edu_sharing_openapi/docs/StreamList.md | 30 + edu_sharing_openapi/docs/SubGroupItem.md | 31 + edu_sharing_openapi/docs/Suggest.md | 31 + edu_sharing_openapi/docs/Suggestion.md | 31 + edu_sharing_openapi/docs/SuggestionParam.md | 30 + edu_sharing_openapi/docs/Suggestions.md | 29 + edu_sharing_openapi/docs/TOOLV1Api.md | 481 + edu_sharing_openapi/docs/TRACKINGV1Api.md | 83 + edu_sharing_openapi/docs/Tool.md | 34 + edu_sharing_openapi/docs/Tools.md | 29 + edu_sharing_openapi/docs/Tracking.md | 33 + edu_sharing_openapi/docs/TrackingAuthority.md | 31 + edu_sharing_openapi/docs/TrackingNode.md | 34 + edu_sharing_openapi/docs/USAGEV1Api.md | 523 + edu_sharing_openapi/docs/UploadResult.md | 29 + edu_sharing_openapi/docs/Usage.md | 48 + edu_sharing_openapi/docs/Usages.md | 29 + edu_sharing_openapi/docs/User.md | 39 + edu_sharing_openapi/docs/UserCredential.md | 30 + edu_sharing_openapi/docs/UserDataDTO.md | 32 + edu_sharing_openapi/docs/UserEntries.md | 30 + edu_sharing_openapi/docs/UserEntry.md | 30 + edu_sharing_openapi/docs/UserProfile.md | 38 + .../docs/UserProfileAppAuth.md | 39 + edu_sharing_openapi/docs/UserProfileEdit.md | 39 + edu_sharing_openapi/docs/UserQuota.md | 31 + edu_sharing_openapi/docs/UserSimple.md | 36 + edu_sharing_openapi/docs/UserStats.md | 31 + edu_sharing_openapi/docs/UserStatus.md | 30 + edu_sharing_openapi/docs/Value.md | 30 + edu_sharing_openapi/docs/ValueParameters.md | 31 + edu_sharing_openapi/docs/Values.md | 93 + edu_sharing_openapi/docs/Variables.md | 30 + edu_sharing_openapi/docs/Version.md | 34 + edu_sharing_openapi/docs/VersionBuild.md | 29 + edu_sharing_openapi/docs/VersionGit.md | 30 + edu_sharing_openapi/docs/VersionGitCommit.md | 30 + edu_sharing_openapi/docs/VersionMaven.md | 30 + edu_sharing_openapi/docs/VersionProject.md | 31 + edu_sharing_openapi/docs/VersionTimestamp.md | 29 + .../docs/WebsiteInformation.md | 34 + edu_sharing_openapi/docs/WidgetDataDTO.md | 30 + edu_sharing_openapi/docs/WorkflowEventDTO.md | 31 + edu_sharing_openapi/docs/WorkflowHistory.md | 33 + .../edu_sharing_client/__init__.py | 340 + .../edu_sharing_client/api/__init__.py | 36 + .../edu_sharing_client/api/about_api.py | 824 + .../edu_sharing_client/api/adminv1_api.py | 15648 ++++++++++++++++ .../edu_sharing_client/api/archivev1_api.py | 1406 ++ .../api/authenticationv1_api.py | 1360 ++ .../edu_sharing_client/api/bulkv1_api.py | 748 + .../api/clientutilsv1_api.py | 311 + .../api/collectionv1_api.py | 4989 +++++ .../edu_sharing_client/api/commentv1_api.py | 1264 ++ .../edu_sharing_client/api/configv1_api.py | 1658 ++ .../edu_sharing_client/api/connectorv1_api.py | 308 + .../edu_sharing_client/api/feedbackv1_api.py | 644 + .../edu_sharing_client/api/iamv1_api.py | 10890 +++++++++++ .../edu_sharing_client/api/knowledgev1_api.py | 591 + .../api/lti_platform_v13_api.py | 4504 +++++ .../edu_sharing_client/api/ltiv13_api.py | 3788 ++++ .../edu_sharing_client/api/mdsv1_api.py | 1626 ++ .../api/mediacenterv1_api.py | 3815 ++++ .../edu_sharing_client/api/networkv1_api.py | 1419 ++ .../edu_sharing_client/api/nodev1_api.py | 15161 +++++++++++++++ .../api/notificationv1_api.py | 1685 ++ .../api/organizationv1_api.py | 1604 ++ .../edu_sharing_client/api/ratingv1_api.py | 1254 ++ .../edu_sharing_client/api/registerv1_api.py | 1619 ++ .../edu_sharing_client/api/relationv1_api.py | 962 + .../edu_sharing_client/api/renderingv1_api.py | 711 + .../edu_sharing_client/api/searchv1_api.py | 3812 ++++ .../edu_sharing_client/api/sharingv1_api.py | 747 + .../edu_sharing_client/api/statisticv1_api.py | 1989 ++ .../edu_sharing_client/api/streamv1_api.py | 1920 ++ .../edu_sharing_client/api/toolv1_api.py | 1932 ++ .../edu_sharing_client/api/trackingv1_api.py | 343 + .../edu_sharing_client/api/usagev1_api.py | 2032 ++ .../edu_sharing_client/api_client.py | 788 + .../edu_sharing_client/api_response.py | 21 + .../edu_sharing_client/configuration.py | 450 + .../edu_sharing_client/exceptions.py | 199 + .../edu_sharing_client/models/__init__.py | 292 + .../edu_sharing_client/models/about.py | 125 + .../models/about_service.py | 97 + .../models/abstract_entries.py | 93 + .../edu_sharing_client/models/ace.py | 107 + .../edu_sharing_client/models/acl.py | 97 + .../models/add_to_collection_event_dto.py | 111 + .../edu_sharing_client/models/admin.py | 103 + .../models/admin_statistics.py | 105 + .../edu_sharing_client/models/application.py | 107 + .../edu_sharing_client/models/audience.py | 87 + .../models/authentication_token.py | 89 + .../edu_sharing_client/models/authority.py | 103 + .../models/authority_entries.py | 101 + .../models/available_mds.py | 89 + .../edu_sharing_client/models/banner.py | 91 + .../models/cache_cluster.py | 120 + .../edu_sharing_client/models/cache_info.py | 111 + .../edu_sharing_client/models/cache_member.py | 87 + .../edu_sharing_client/models/catalog.py | 89 + .../edu_sharing_client/models/collection.py | 119 + .../models/collection_counts.py | 104 + .../models/collection_dto.py | 91 + .../models/collection_entries.py | 101 + .../models/collection_entry.py | 91 + .../models/collection_options.py | 109 + .../models/collection_proposal_entries.py | 101 + .../models/collection_reference.py | 231 + .../edu_sharing_client/models/collections.py | 87 + .../models/collections_result.py | 87 + .../edu_sharing_client/models/comment.py | 106 + .../models/comment_event_dto.py | 111 + .../edu_sharing_client/models/comments.py | 95 + .../edu_sharing_client/models/condition.py | 101 + .../edu_sharing_client/models/config.py | 102 + .../models/config_frontpage.py | 87 + .../models/config_privacy.py | 87 + .../models/config_publish.py | 89 + .../models/config_rating.py | 97 + .../models/config_remote.py | 87 + .../models/config_theme_color.py | 89 + .../models/config_theme_colors.py | 95 + .../models/config_tutorial.py | 87 + .../models/config_upload.py | 97 + .../models/config_workflow.py | 101 + .../models/config_workflow_list.py | 93 + .../edu_sharing_client/models/connector.py | 107 + .../models/connector_file_type.py | 101 + .../models/connector_list.py | 97 + .../edu_sharing_client/models/content.py | 91 + .../models/context_menu_entry.py | 146 + .../edu_sharing_client/models/contributor.py | 97 + .../edu_sharing_client/models/counts.py | 95 + .../edu_sharing_client/models/create.py | 87 + .../edu_sharing_client/models/create_usage.py | 95 + .../models/delete_option.py | 87 + .../models/dynamic_config.py | 89 + .../models/dynamic_registration_token.py | 97 + .../models/dynamic_registration_tokens.py | 95 + .../edu_sharing_client/models/element.py | 91 + .../models/error_response.py | 97 + .../edu_sharing_client/models/excel_result.py | 87 + .../edu_sharing_client/models/facet.py | 99 + .../edu_sharing_client/models/feature_info.py | 97 + .../models/feedback_data.py | 94 + .../models/feedback_result.py | 89 + .../edu_sharing_client/models/filter.py | 95 + .../edu_sharing_client/models/filter_entry.py | 89 + .../edu_sharing_client/models/font_icon.py | 91 + .../edu_sharing_client/models/frontpage.py | 117 + .../edu_sharing_client/models/general.py | 91 + .../edu_sharing_client/models/geo.py | 91 + .../edu_sharing_client/models/group.py | 141 + .../models/group_entries.py | 101 + .../edu_sharing_client/models/group_entry.py | 91 + .../models/group_profile.py | 93 + .../models/group_signup_details.py | 99 + .../edu_sharing_client/models/guest.py | 87 + .../edu_sharing_client/models/handle_param.py | 109 + .../models/help_menu_options.py | 91 + .../models/home_folder_options.py | 123 + .../edu_sharing_client/models/icon.py | 87 + .../edu_sharing_client/models/image.py | 89 + .../edu_sharing_client/models/interface.py | 117 + .../models/invite_event_dto.py | 113 + .../edu_sharing_client/models/job.py | 89 + .../edu_sharing_client/models/job_builder.py | 92 + .../edu_sharing_client/models/job_data_map.py | 108 + .../models/job_description.py | 112 + .../edu_sharing_client/models/job_detail.py | 111 + .../models/job_detail_job_data_map.py | 108 + .../edu_sharing_client/models/job_entry.py | 91 + .../models/job_field_description.py | 97 + .../edu_sharing_client/models/job_info.py | 133 + .../edu_sharing_client/models/job_key.py | 89 + .../edu_sharing_client/models/json_object.py | 87 + .../models/key_value_pair.py | 89 + .../edu_sharing_client/models/language.py | 91 + .../edu_sharing_client/models/level.py | 94 + .../edu_sharing_client/models/license.py | 89 + .../models/license_agreement.py | 95 + .../models/license_agreement_node.py | 89 + .../edu_sharing_client/models/licenses.py | 89 + .../edu_sharing_client/models/location.py | 91 + .../edu_sharing_client/models/log_entry.py | 97 + .../models/logger_config_result.py | 93 + .../edu_sharing_client/models/login.py | 124 + .../models/login_credentials.py | 91 + .../edu_sharing_client/models/logout_info.py | 93 + .../models/lti_platform_configuration.py | 101 + .../edu_sharing_client/models/lti_session.py | 105 + .../models/lti_tool_configuration.py | 97 + .../edu_sharing_client/models/mainnav.py | 93 + .../models/manual_registration_data.py | 107 + .../models/mc_org_connect_result.py | 87 + .../edu_sharing_client/models/mds.py | 143 + .../edu_sharing_client/models/mds_column.py | 91 + .../edu_sharing_client/models/mds_entries.py | 95 + .../edu_sharing_client/models/mds_group.py | 101 + .../edu_sharing_client/models/mds_list.py | 97 + .../models/mds_query_criteria.py | 89 + .../edu_sharing_client/models/mds_sort.py | 103 + .../models/mds_sort_column.py | 89 + .../models/mds_sort_default.py | 89 + .../models/mds_subwidget.py | 87 + .../edu_sharing_client/models/mds_value.py | 97 + .../edu_sharing_client/models/mds_view.py | 109 + .../edu_sharing_client/models/mds_widget.py | 223 + .../models/mds_widget_condition.py | 102 + .../edu_sharing_client/models/mediacenter.py | 143 + .../models/mediacenter_profile_extension.py | 115 + .../models/mediacenters_import_result.py | 87 + .../edu_sharing_client/models/menu_entry.py | 109 + .../edu_sharing_client/models/message.py | 89 + .../models/metadata_set_info.py | 89 + .../models/metadata_suggestion_event_dto.py | 119 + .../edu_sharing_client/models/node.py | 226 + .../models/node_collection_proposal_count.py | 229 + .../edu_sharing_client/models/node_data.py | 89 + .../models/node_data_dto.py | 91 + .../edu_sharing_client/models/node_entries.py | 101 + .../edu_sharing_client/models/node_entry.py | 91 + .../models/node_issue_event_dto.py | 109 + .../edu_sharing_client/models/node_locked.py | 87 + .../models/node_lti_deep_link.py | 89 + .../models/node_permission_entry.py | 91 + .../models/node_permissions.py | 101 + .../edu_sharing_client/models/node_ref.py | 93 + .../models/node_relation.py | 101 + .../edu_sharing_client/models/node_remote.py | 96 + .../edu_sharing_client/models/node_share.py | 101 + .../edu_sharing_client/models/node_stats.py | 87 + .../edu_sharing_client/models/node_text.py | 91 + .../edu_sharing_client/models/node_version.py | 105 + .../models/node_version_entries.py | 95 + .../models/node_version_entry.py | 91 + .../models/node_version_ref.py | 95 + .../models/node_version_ref_entries.py | 95 + .../models/notification_config.py | 115 + .../models/notification_event_dto.py | 151 + .../models/notification_intervals.py | 181 + .../models/notification_response_page.py | 123 + .../edu_sharing_client/models/notify_entry.py | 101 + .../models/open_id_configuration.py | 115 + .../models/open_id_registration_result.py | 113 + .../models/organisations_import_result.py | 87 + .../edu_sharing_client/models/organization.py | 138 + .../models/organization_entries.py | 103 + .../edu_sharing_client/models/pageable.py | 101 + .../edu_sharing_client/models/pagination.py | 91 + .../edu_sharing_client/models/parameters.py | 91 + .../models/parent_entries.py | 103 + .../edu_sharing_client/models/person.py | 97 + .../models/person_delete_options.py | 135 + .../models/person_delete_result.py | 132 + .../models/person_report.py | 101 + .../edu_sharing_client/models/plugin_info.py | 87 + .../models/plugin_status.py | 91 + .../edu_sharing_client/models/preferences.py | 87 + .../edu_sharing_client/models/preview.py | 101 + .../edu_sharing_client/models/profile.py | 99 + .../models/profile_settings.py | 87 + .../propose_for_collection_event_dto.py | 111 + .../edu_sharing_client/models/provider.py | 109 + .../edu_sharing_client/models/query.py | 93 + .../edu_sharing_client/models/rating_data.py | 91 + .../models/rating_details.py | 107 + .../models/rating_event_dto.py | 111 + .../models/rating_history.py | 107 + .../models/reference_entries.py | 101 + .../edu_sharing_client/models/register.py | 95 + .../models/register_exists.py | 87 + .../models/register_information.py | 101 + .../models/registration_url.py | 87 + .../models/relation_data.py | 112 + .../edu_sharing_client/models/remote.py | 93 + .../models/remote_auth_description.py | 89 + .../edu_sharing_client/models/rendering.py | 101 + .../models/rendering_details_entry.py | 95 + .../models/rendering_gdpr.py | 91 + .../edu_sharing_client/models/repo.py | 99 + .../edu_sharing_client/models/repo_entries.py | 95 + .../models/repository_config.py | 91 + .../models/repository_version_info.py | 109 + .../models/restore_result.py | 97 + .../models/restore_results.py | 95 + .../models/search_parameters.py | 113 + .../models/search_parameters_facets.py | 103 + .../models/search_result.py | 111 + .../models/search_result_elastic.py | 117 + .../models/search_result_lrmi.py | 115 + .../models/search_result_node.py | 123 + .../models/search_v_card.py | 87 + .../models/server_update_info.py | 97 + .../edu_sharing_client/models/service.py | 131 + .../models/service_instance.py | 93 + .../models/service_version.py | 93 + .../edu_sharing_client/models/services.py | 87 + .../models/shared_folder_options.py | 123 + .../edu_sharing_client/models/sharing_info.py | 103 + .../edu_sharing_client/models/simple_edit.py | 105 + .../models/simple_edit_global_groups.py | 89 + .../models/simple_edit_organization.py | 87 + .../edu_sharing_client/models/sort.py | 91 + .../models/statistic_entity.py | 89 + .../models/statistic_entry.py | 97 + .../edu_sharing_client/models/statistics.py | 95 + .../models/statistics_global.py | 107 + .../models/statistics_group.py | 97 + .../models/statistics_key_group.py | 101 + .../models/statistics_sub_group.py | 97 + .../models/statistics_user.py | 87 + .../models/stored_service.py | 133 + .../edu_sharing_client/models/stream.py | 87 + .../edu_sharing_client/models/stream_entry.py | 113 + .../models/stream_entry_input.py | 97 + .../edu_sharing_client/models/stream_list.py | 101 + .../models/sub_group_item.py | 91 + .../edu_sharing_client/models/suggest.py | 91 + .../edu_sharing_client/models/suggestion.py | 91 + .../models/suggestion_param.py | 101 + .../edu_sharing_client/models/suggestions.py | 95 + .../edu_sharing_client/models/tool.py | 97 + .../edu_sharing_client/models/tools.py | 95 + .../edu_sharing_client/models/tracking.py | 99 + .../models/tracking_authority.py | 107 + .../models/tracking_node.py | 105 + .../models/upload_result.py | 87 + .../edu_sharing_client/models/usage.py | 130 + .../edu_sharing_client/models/usages.py | 95 + .../edu_sharing_client/models/user.py | 148 + .../models/user_credential.py | 89 + .../models/user_data_dto.py | 93 + .../edu_sharing_client/models/user_entries.py | 101 + .../edu_sharing_client/models/user_entry.py | 93 + .../edu_sharing_client/models/user_profile.py | 105 + .../models/user_profile_app_auth.py | 107 + .../models/user_profile_edit.py | 107 + .../edu_sharing_client/models/user_quota.py | 91 + .../edu_sharing_client/models/user_simple.py | 127 + .../edu_sharing_client/models/user_stats.py | 91 + .../edu_sharing_client/models/user_status.py | 99 + .../edu_sharing_client/models/value.py | 89 + .../models/value_parameters.py | 91 + .../edu_sharing_client/models/values.py | 362 + .../edu_sharing_client/models/variables.py | 89 + .../edu_sharing_client/models/version.py | 97 + .../models/version_build.py | 87 + .../edu_sharing_client/models/version_git.py | 93 + .../models/version_git_commit.py | 93 + .../models/version_maven.py | 93 + .../models/version_project.py | 91 + .../models/version_timestamp.py | 87 + .../models/website_information.py | 105 + .../models/widget_data_dto.py | 89 + .../models/workflow_event_dto.py | 109 + .../models/workflow_history.py | 107 + .../edu_sharing_client/py.typed | 0 .../edu_sharing_client/rest.py | 257 + edu_sharing_openapi/git_push.sh | 57 + edu_sharing_openapi/pyproject.toml | 71 + edu_sharing_openapi/requirements.txt | 5 + edu_sharing_openapi/setup.cfg | 2 + edu_sharing_openapi/setup.py | 49 + edu_sharing_openapi/test-requirements.txt | 5 + edu_sharing_openapi/test/__init__.py | 0 edu_sharing_openapi/test/test_about.py | 96 + edu_sharing_openapi/test/test_about_api.py | 52 + .../test/test_about_service.py | 70 + .../test/test_abstract_entries.py | 64 + edu_sharing_openapi/test/test_ace.py | 97 + edu_sharing_openapi/test/test_acl.py | 128 + .../test/test_add_to_collection_event_dto.py | 66 + edu_sharing_openapi/test/test_admin.py | 61 + .../test/test_admin_statistics.py | 260 + edu_sharing_openapi/test/test_adminv1_api.py | 408 + edu_sharing_openapi/test/test_application.py | 61 + .../test/test_archivev1_api.py | 59 + edu_sharing_openapi/test/test_audience.py | 51 + .../test/test_authentication_token.py | 52 + .../test/test_authenticationv1_api.py | 66 + edu_sharing_openapi/test/test_authority.py | 59 + .../test/test_authority_entries.py | 80 + .../test/test_available_mds.py | 54 + edu_sharing_openapi/test/test_banner.py | 55 + edu_sharing_openapi/test/test_bulkv1_api.py | 45 + .../test/test_cache_cluster.py | 77 + edu_sharing_openapi/test/test_cache_info.py | 63 + edu_sharing_openapi/test/test_cache_member.py | 51 + edu_sharing_openapi/test/test_catalog.py | 52 + .../test/test_clientutilsv1_api.py | 38 + edu_sharing_openapi/test/test_collection.py | 72 + .../test/test_collection_counts.py | 62 + .../test/test_collection_dto.py | 57 + .../test/test_collection_entries.py | 464 + .../test/test_collection_entry.py | 456 + .../test/test_collection_options.py | 52 + .../test/test_collection_proposal_entries.py | 528 + .../test/test_collection_reference.py | 703 + edu_sharing_openapi/test/test_collections.py | 53 + .../test/test_collections_result.py | 51 + .../test/test_collectionv1_api.py | 136 + edu_sharing_openapi/test/test_comment.py | 118 + .../test/test_comment_event_dto.py | 61 + edu_sharing_openapi/test/test_comments.py | 112 + .../test/test_commentv1_api.py | 59 + edu_sharing_openapi/test/test_condition.py | 53 + edu_sharing_openapi/test/test_config.py | 554 + .../test/test_config_frontpage.py | 51 + .../test/test_config_privacy.py | 51 + .../test/test_config_publish.py | 52 + .../test/test_config_rating.py | 51 + .../test/test_config_remote.py | 51 + .../test/test_config_theme_color.py | 52 + .../test/test_config_theme_colors.py | 55 + .../test/test_config_tutorial.py | 51 + .../test/test_config_upload.py | 51 + .../test/test_config_workflow.py | 62 + .../test/test_config_workflow_list.py | 56 + edu_sharing_openapi/test/test_configv1_api.py | 73 + edu_sharing_openapi/test/test_connector.py | 70 + .../test/test_connector_file_type.py | 58 + .../test/test_connector_list.py | 73 + .../test/test_connectorv1_api.py | 38 + edu_sharing_openapi/test/test_content.py | 53 + .../test/test_context_menu_entry.py | 72 + edu_sharing_openapi/test/test_contributor.py | 56 + edu_sharing_openapi/test/test_counts.py | 56 + edu_sharing_openapi/test/test_create.py | 51 + edu_sharing_openapi/test/test_create_usage.py | 55 + .../test/test_delete_option.py | 51 + .../test/test_dynamic_config.py | 52 + .../test/test_dynamic_registration_token.py | 56 + .../test/test_dynamic_registration_tokens.py | 59 + edu_sharing_openapi/test/test_element.py | 53 + .../test/test_error_response.py | 65 + edu_sharing_openapi/test/test_excel_result.py | 51 + edu_sharing_openapi/test/test_facet.py | 63 + edu_sharing_openapi/test/test_feature_info.py | 51 + .../test/test_feedback_data.py | 58 + .../test/test_feedback_result.py | 52 + .../test/test_feedbackv1_api.py | 45 + edu_sharing_openapi/test/test_filter.py | 64 + edu_sharing_openapi/test/test_filter_entry.py | 58 + edu_sharing_openapi/test/test_font_icon.py | 53 + edu_sharing_openapi/test/test_frontpage.py | 64 + edu_sharing_openapi/test/test_general.py | 53 + edu_sharing_openapi/test/test_geo.py | 53 + edu_sharing_openapi/test/test_group.py | 106 + .../test/test_group_entries.py | 148 + edu_sharing_openapi/test/test_group_entry.py | 136 + .../test/test_group_profile.py | 54 + .../test/test_group_signup_details.py | 52 + edu_sharing_openapi/test/test_guest.py | 51 + edu_sharing_openapi/test/test_handle_param.py | 52 + .../test/test_help_menu_options.py | 53 + .../test/test_home_folder_options.py | 54 + edu_sharing_openapi/test/test_iamv1_api.py | 269 + edu_sharing_openapi/test/test_icon.py | 51 + edu_sharing_openapi/test/test_image.py | 52 + edu_sharing_openapi/test/test_interface.py | 56 + .../test/test_invite_event_dto.py | 64 + edu_sharing_openapi/test/test_job.py | 54 + edu_sharing_openapi/test/test_job_builder.py | 52 + edu_sharing_openapi/test/test_job_data_map.py | 59 + .../test/test_job_description.py | 64 + edu_sharing_openapi/test/test_job_detail.py | 62 + .../test/test_job_detail_job_data_map.py | 59 + edu_sharing_openapi/test/test_job_entry.py | 56 + .../test/test_job_field_description.py | 56 + edu_sharing_openapi/test/test_job_info.py | 85 + edu_sharing_openapi/test/test_job_key.py | 52 + edu_sharing_openapi/test/test_json_object.py | 51 + .../test/test_key_value_pair.py | 52 + .../test/test_knowledgev1_api.py | 45 + edu_sharing_openapi/test/test_language.py | 57 + edu_sharing_openapi/test/test_level.py | 53 + edu_sharing_openapi/test/test_license.py | 52 + .../test/test_license_agreement.py | 55 + .../test/test_license_agreement_node.py | 52 + edu_sharing_openapi/test/test_licenses.py | 58 + edu_sharing_openapi/test/test_location.py | 54 + edu_sharing_openapi/test/test_log_entry.py | 57 + .../test/test_logger_config_result.py | 56 + edu_sharing_openapi/test/test_login.py | 286 + .../test/test_login_credentials.py | 56 + edu_sharing_openapi/test/test_logout_info.py | 54 + .../test/test_lti_platform_configuration.py | 62 + .../test/test_lti_platform_v13_api.py | 136 + edu_sharing_openapi/test/test_lti_session.py | 264 + .../test/test_lti_tool_configuration.py | 58 + edu_sharing_openapi/test/test_ltiv13_api.py | 115 + edu_sharing_openapi/test/test_mainnav.py | 53 + .../test/test_manual_registration_data.py | 67 + .../test/test_mc_org_connect_result.py | 51 + edu_sharing_openapi/test/test_mds.py | 248 + edu_sharing_openapi/test/test_mds_column.py | 53 + edu_sharing_openapi/test/test_mds_entries.py | 60 + edu_sharing_openapi/test/test_mds_group.py | 55 + edu_sharing_openapi/test/test_mds_list.py | 57 + .../test/test_mds_query_criteria.py | 58 + edu_sharing_openapi/test/test_mds_sort.py | 60 + .../test/test_mds_sort_column.py | 53 + .../test/test_mds_sort_default.py | 54 + .../test/test_mds_subwidget.py | 51 + edu_sharing_openapi/test/test_mds_value.py | 59 + edu_sharing_openapi/test/test_mds_view.py | 57 + edu_sharing_openapi/test/test_mds_widget.py | 104 + .../test/test_mds_widget_condition.py | 59 + edu_sharing_openapi/test/test_mdsv1_api.py | 66 + edu_sharing_openapi/test/test_mediacenter.py | 107 + .../test_mediacenter_profile_extension.py | 60 + .../test/test_mediacenters_import_result.py | 51 + .../test/test_mediacenterv1_api.py | 115 + edu_sharing_openapi/test/test_menu_entry.py | 62 + edu_sharing_openapi/test/test_message.py | 54 + .../test/test_metadata_set_info.py | 54 + .../test_metadata_suggestion_event_dto.py | 65 + .../test/test_networkv1_api.py | 66 + edu_sharing_openapi/test/test_node.py | 692 + .../test_node_collection_proposal_count.py | 696 + edu_sharing_openapi/test/test_node_data.py | 54 + .../test/test_node_data_dto.py | 57 + edu_sharing_openapi/test/test_node_entries.py | 468 + edu_sharing_openapi/test/test_node_entry.py | 456 + .../test/test_node_issue_event_dto.py | 60 + edu_sharing_openapi/test/test_node_locked.py | 52 + .../test/test_node_lti_deep_link.py | 52 + .../test/test_node_permission_entry.py | 154 + .../test/test_node_permissions.py | 206 + edu_sharing_openapi/test/test_node_ref.py | 57 + .../test/test_node_relation.py | 486 + edu_sharing_openapi/test/test_node_remote.py | 862 + edu_sharing_openapi/test/test_node_share.py | 58 + edu_sharing_openapi/test/test_node_stats.py | 53 + edu_sharing_openapi/test/test_node_text.py | 53 + edu_sharing_openapi/test/test_node_version.py | 118 + .../test/test_node_version_entries.py | 130 + .../test/test_node_version_entry.py | 126 + .../test/test_node_version_ref.py | 64 + .../test/test_node_version_ref_entries.py | 70 + edu_sharing_openapi/test/test_nodev1_api.py | 360 + .../test/test_notification_config.py | 61 + .../test/test_notification_event_dto.py | 65 + .../test/test_notification_intervals.py | 58 + .../test/test_notification_response_page.py | 89 + .../test/test_notificationv1_api.py | 73 + edu_sharing_openapi/test/test_notify_entry.py | 262 + .../test/test_open_id_configuration.py | 89 + .../test/test_open_id_registration_result.py | 76 + .../test/test_organisations_import_result.py | 51 + edu_sharing_openapi/test/test_organization.py | 80 + .../test/test_organization_entries.py | 123 + .../test/test_organizationv1_api.py | 66 + edu_sharing_openapi/test/test_pageable.py | 59 + edu_sharing_openapi/test/test_pagination.py | 56 + edu_sharing_openapi/test/test_parameters.py | 54 + .../test/test_parent_entries.py | 469 + edu_sharing_openapi/test/test_person.py | 70 + .../test/test_person_delete_options.py | 76 + .../test/test_person_delete_result.py | 87 + .../test/test_person_report.py | 102 + edu_sharing_openapi/test/test_plugin_info.py | 51 + .../test/test_plugin_status.py | 53 + edu_sharing_openapi/test/test_preferences.py | 51 + edu_sharing_openapi/test/test_preview.py | 62 + edu_sharing_openapi/test/test_profile.py | 65 + .../test/test_profile_settings.py | 52 + .../test_propose_for_collection_event_dto.py | 66 + edu_sharing_openapi/test/test_provider.py | 59 + edu_sharing_openapi/test/test_query.py | 55 + edu_sharing_openapi/test/test_rating_data.py | 53 + .../test/test_rating_details.py | 61 + .../test/test_rating_event_dto.py | 61 + .../test/test_rating_history.py | 61 + edu_sharing_openapi/test/test_ratingv1_api.py | 59 + .../test/test_reference_entries.py | 526 + edu_sharing_openapi/test/test_register.py | 57 + .../test/test_register_exists.py | 51 + .../test/test_register_information.py | 58 + .../test/test_registerv1_api.py | 73 + .../test/test_registration_url.py | 51 + .../test/test_relation_data.py | 319 + .../test/test_relationv1_api.py | 52 + edu_sharing_openapi/test/test_remote.py | 59 + .../test/test_remote_auth_description.py | 52 + edu_sharing_openapi/test/test_rendering.py | 59 + .../test/test_rendering_details_entry.py | 460 + .../test/test_rendering_gdpr.py | 53 + .../test/test_renderingv1_api.py | 45 + edu_sharing_openapi/test/test_repo.py | 57 + edu_sharing_openapi/test/test_repo_entries.py | 70 + .../test/test_repository_config.py | 65 + .../test/test_repository_version_info.py | 73 + .../test/test_restore_result.py | 62 + .../test/test_restore_results.py | 68 + .../test/test_search_parameters.py | 79 + .../test/test_search_parameters_facets.py | 73 + .../test/test_search_result.py | 488 + .../test/test_search_result_elastic.py | 94 + .../test/test_search_result_lrmi.py | 93 + .../test/test_search_result_node.py | 497 + .../test/test_search_v_card.py | 51 + edu_sharing_openapi/test/test_searchv1_api.py | 101 + .../test/test_server_update_info.py | 56 + edu_sharing_openapi/test/test_service.py | 85 + .../test/test_service_instance.py | 62 + .../test/test_service_version.py | 56 + edu_sharing_openapi/test/test_services.py | 51 + .../test/test_shared_folder_options.py | 54 + edu_sharing_openapi/test/test_sharing_info.py | 277 + .../test/test_sharingv1_api.py | 45 + edu_sharing_openapi/test/test_simple_edit.py | 65 + .../test/test_simple_edit_global_groups.py | 54 + .../test/test_simple_edit_organization.py | 53 + edu_sharing_openapi/test/test_sort.py | 53 + .../test/test_statistic_entity.py | 54 + .../test/test_statistic_entry.py | 62 + edu_sharing_openapi/test/test_statistics.py | 68 + .../test/test_statistics_global.py | 78 + .../test/test_statistics_group.py | 60 + .../test/test_statistics_key_group.py | 62 + .../test/test_statistics_sub_group.py | 56 + .../test/test_statistics_user.py | 51 + .../test/test_statisticv1_api.py | 73 + .../test/test_stored_service.py | 86 + edu_sharing_openapi/test/test_stream.py | 51 + edu_sharing_openapi/test/test_stream_entry.py | 319 + .../test/test_stream_entry_input.py | 60 + edu_sharing_openapi/test/test_stream_list.py | 287 + edu_sharing_openapi/test/test_streamv1_api.py | 73 + .../test/test_sub_group_item.py | 53 + edu_sharing_openapi/test/test_suggest.py | 55 + edu_sharing_openapi/test/test_suggestion.py | 55 + .../test/test_suggestion_param.py | 61 + edu_sharing_openapi/test/test_suggestions.py | 62 + edu_sharing_openapi/test/test_tool.py | 56 + edu_sharing_openapi/test/test_tools.py | 59 + edu_sharing_openapi/test/test_toolv1_api.py | 73 + edu_sharing_openapi/test/test_tracking.py | 115 + .../test/test_tracking_authority.py | 128 + .../test/test_tracking_node.py | 318 + .../test/test_trackingv1_api.py | 38 + .../test/test_upload_result.py | 51 + edu_sharing_openapi/test/test_usage.py | 82 + edu_sharing_openapi/test/test_usages.py | 77 + edu_sharing_openapi/test/test_usagev1_api.py | 79 + edu_sharing_openapi/test/test_user.py | 133 + .../test/test_user_credential.py | 53 + .../test/test_user_data_dto.py | 54 + edu_sharing_openapi/test/test_user_entries.py | 174 + edu_sharing_openapi/test/test_user_entry.py | 179 + edu_sharing_openapi/test/test_user_profile.py | 66 + .../test/test_user_profile_app_auth.py | 71 + .../test/test_user_profile_edit.py | 67 + edu_sharing_openapi/test/test_user_quota.py | 53 + edu_sharing_openapi/test/test_user_simple.py | 111 + edu_sharing_openapi/test/test_user_stats.py | 53 + edu_sharing_openapi/test/test_user_status.py | 52 + edu_sharing_openapi/test/test_value.py | 54 + .../test/test_value_parameters.py | 56 + edu_sharing_openapi/test/test_values.py | 301 + edu_sharing_openapi/test/test_variables.py | 56 + edu_sharing_openapi/test/test_version.py | 56 + .../test/test_version_build.py | 51 + edu_sharing_openapi/test/test_version_git.py | 55 + .../test/test_version_git_commit.py | 53 + .../test/test_version_maven.py | 57 + .../test/test_version_project.py | 53 + .../test/test_version_timestamp.py | 51 + .../test/test_website_information.py | 262 + .../test/test_widget_data_dto.py | 52 + .../test/test_workflow_event_dto.py | 60 + .../test/test_workflow_history.py | 120 + edu_sharing_openapi/tox.ini | 9 + 949 files changed, 187353 insertions(+) create mode 100644 edu_sharing_openapi/.github/workflows/python.yml create mode 100644 edu_sharing_openapi/.gitignore create mode 100644 edu_sharing_openapi/.gitlab-ci.yml create mode 100644 edu_sharing_openapi/.openapi-generator-ignore create mode 100644 edu_sharing_openapi/.openapi-generator/FILES create mode 100644 edu_sharing_openapi/.openapi-generator/VERSION create mode 100644 edu_sharing_openapi/.travis.yml create mode 100644 edu_sharing_openapi/README.md create mode 100644 edu_sharing_openapi/docs/ABOUTApi.md create mode 100644 edu_sharing_openapi/docs/ACE.md create mode 100644 edu_sharing_openapi/docs/ACL.md create mode 100644 edu_sharing_openapi/docs/ADMINV1Api.md create mode 100644 edu_sharing_openapi/docs/ARCHIVEV1Api.md create mode 100644 edu_sharing_openapi/docs/AUTHENTICATIONV1Api.md create mode 100644 edu_sharing_openapi/docs/About.md create mode 100644 edu_sharing_openapi/docs/AboutService.md create mode 100644 edu_sharing_openapi/docs/AbstractEntries.md create mode 100644 edu_sharing_openapi/docs/AddToCollectionEventDTO.md create mode 100644 edu_sharing_openapi/docs/Admin.md create mode 100644 edu_sharing_openapi/docs/AdminStatistics.md create mode 100644 edu_sharing_openapi/docs/Application.md create mode 100644 edu_sharing_openapi/docs/Audience.md create mode 100644 edu_sharing_openapi/docs/AuthenticationToken.md create mode 100644 edu_sharing_openapi/docs/Authority.md create mode 100644 edu_sharing_openapi/docs/AuthorityEntries.md create mode 100644 edu_sharing_openapi/docs/AvailableMds.md create mode 100644 edu_sharing_openapi/docs/BULKV1Api.md create mode 100644 edu_sharing_openapi/docs/Banner.md create mode 100644 edu_sharing_openapi/docs/CLIENTUTILSV1Api.md create mode 100644 edu_sharing_openapi/docs/COLLECTIONV1Api.md create mode 100644 edu_sharing_openapi/docs/COMMENTV1Api.md create mode 100644 edu_sharing_openapi/docs/CONFIGV1Api.md create mode 100644 edu_sharing_openapi/docs/CONNECTORV1Api.md create mode 100644 edu_sharing_openapi/docs/CacheCluster.md create mode 100644 edu_sharing_openapi/docs/CacheInfo.md create mode 100644 edu_sharing_openapi/docs/CacheMember.md create mode 100644 edu_sharing_openapi/docs/Catalog.md create mode 100644 edu_sharing_openapi/docs/Collection.md create mode 100644 edu_sharing_openapi/docs/CollectionCounts.md create mode 100644 edu_sharing_openapi/docs/CollectionDTO.md create mode 100644 edu_sharing_openapi/docs/CollectionEntries.md create mode 100644 edu_sharing_openapi/docs/CollectionEntry.md create mode 100644 edu_sharing_openapi/docs/CollectionOptions.md create mode 100644 edu_sharing_openapi/docs/CollectionProposalEntries.md create mode 100644 edu_sharing_openapi/docs/CollectionReference.md create mode 100644 edu_sharing_openapi/docs/Collections.md create mode 100644 edu_sharing_openapi/docs/CollectionsResult.md create mode 100644 edu_sharing_openapi/docs/Comment.md create mode 100644 edu_sharing_openapi/docs/CommentEventDTO.md create mode 100644 edu_sharing_openapi/docs/Comments.md create mode 100644 edu_sharing_openapi/docs/Condition.md create mode 100644 edu_sharing_openapi/docs/Config.md create mode 100644 edu_sharing_openapi/docs/ConfigFrontpage.md create mode 100644 edu_sharing_openapi/docs/ConfigPrivacy.md create mode 100644 edu_sharing_openapi/docs/ConfigPublish.md create mode 100644 edu_sharing_openapi/docs/ConfigRating.md create mode 100644 edu_sharing_openapi/docs/ConfigRemote.md create mode 100644 edu_sharing_openapi/docs/ConfigThemeColor.md create mode 100644 edu_sharing_openapi/docs/ConfigThemeColors.md create mode 100644 edu_sharing_openapi/docs/ConfigTutorial.md create mode 100644 edu_sharing_openapi/docs/ConfigUpload.md create mode 100644 edu_sharing_openapi/docs/ConfigWorkflow.md create mode 100644 edu_sharing_openapi/docs/ConfigWorkflowList.md create mode 100644 edu_sharing_openapi/docs/Connector.md create mode 100644 edu_sharing_openapi/docs/ConnectorFileType.md create mode 100644 edu_sharing_openapi/docs/ConnectorList.md create mode 100644 edu_sharing_openapi/docs/Content.md create mode 100644 edu_sharing_openapi/docs/ContextMenuEntry.md create mode 100644 edu_sharing_openapi/docs/Contributor.md create mode 100644 edu_sharing_openapi/docs/Counts.md create mode 100644 edu_sharing_openapi/docs/Create.md create mode 100644 edu_sharing_openapi/docs/CreateUsage.md create mode 100644 edu_sharing_openapi/docs/DeleteOption.md create mode 100644 edu_sharing_openapi/docs/DynamicConfig.md create mode 100644 edu_sharing_openapi/docs/DynamicRegistrationToken.md create mode 100644 edu_sharing_openapi/docs/DynamicRegistrationTokens.md create mode 100644 edu_sharing_openapi/docs/Element.md create mode 100644 edu_sharing_openapi/docs/ErrorResponse.md create mode 100644 edu_sharing_openapi/docs/ExcelResult.md create mode 100644 edu_sharing_openapi/docs/FEEDBACKV1Api.md create mode 100644 edu_sharing_openapi/docs/Facet.md create mode 100644 edu_sharing_openapi/docs/FeatureInfo.md create mode 100644 edu_sharing_openapi/docs/FeedbackData.md create mode 100644 edu_sharing_openapi/docs/FeedbackResult.md create mode 100644 edu_sharing_openapi/docs/Filter.md create mode 100644 edu_sharing_openapi/docs/FilterEntry.md create mode 100644 edu_sharing_openapi/docs/FontIcon.md create mode 100644 edu_sharing_openapi/docs/Frontpage.md create mode 100644 edu_sharing_openapi/docs/General.md create mode 100644 edu_sharing_openapi/docs/Geo.md create mode 100644 edu_sharing_openapi/docs/Group.md create mode 100644 edu_sharing_openapi/docs/GroupEntries.md create mode 100644 edu_sharing_openapi/docs/GroupEntry.md create mode 100644 edu_sharing_openapi/docs/GroupProfile.md create mode 100644 edu_sharing_openapi/docs/GroupSignupDetails.md create mode 100644 edu_sharing_openapi/docs/Guest.md create mode 100644 edu_sharing_openapi/docs/HandleParam.md create mode 100644 edu_sharing_openapi/docs/HelpMenuOptions.md create mode 100644 edu_sharing_openapi/docs/HomeFolderOptions.md create mode 100644 edu_sharing_openapi/docs/IAMV1Api.md create mode 100644 edu_sharing_openapi/docs/Icon.md create mode 100644 edu_sharing_openapi/docs/Image.md create mode 100644 edu_sharing_openapi/docs/Interface.md create mode 100644 edu_sharing_openapi/docs/InviteEventDTO.md create mode 100644 edu_sharing_openapi/docs/JSONObject.md create mode 100644 edu_sharing_openapi/docs/Job.md create mode 100644 edu_sharing_openapi/docs/JobBuilder.md create mode 100644 edu_sharing_openapi/docs/JobDataMap.md create mode 100644 edu_sharing_openapi/docs/JobDescription.md create mode 100644 edu_sharing_openapi/docs/JobDetail.md create mode 100644 edu_sharing_openapi/docs/JobDetailJobDataMap.md create mode 100644 edu_sharing_openapi/docs/JobEntry.md create mode 100644 edu_sharing_openapi/docs/JobFieldDescription.md create mode 100644 edu_sharing_openapi/docs/JobInfo.md create mode 100644 edu_sharing_openapi/docs/JobKey.md create mode 100644 edu_sharing_openapi/docs/KNOWLEDGEV1Api.md create mode 100644 edu_sharing_openapi/docs/KeyValuePair.md create mode 100644 edu_sharing_openapi/docs/LTIPlatformConfiguration.md create mode 100644 edu_sharing_openapi/docs/LTIPlatformV13Api.md create mode 100644 edu_sharing_openapi/docs/LTISession.md create mode 100644 edu_sharing_openapi/docs/LTIToolConfiguration.md create mode 100644 edu_sharing_openapi/docs/LTIV13Api.md create mode 100644 edu_sharing_openapi/docs/Language.md create mode 100644 edu_sharing_openapi/docs/Level.md create mode 100644 edu_sharing_openapi/docs/License.md create mode 100644 edu_sharing_openapi/docs/LicenseAgreement.md create mode 100644 edu_sharing_openapi/docs/LicenseAgreementNode.md create mode 100644 edu_sharing_openapi/docs/Licenses.md create mode 100644 edu_sharing_openapi/docs/Location.md create mode 100644 edu_sharing_openapi/docs/LogEntry.md create mode 100644 edu_sharing_openapi/docs/LoggerConfigResult.md create mode 100644 edu_sharing_openapi/docs/Login.md create mode 100644 edu_sharing_openapi/docs/LoginCredentials.md create mode 100644 edu_sharing_openapi/docs/LogoutInfo.md create mode 100644 edu_sharing_openapi/docs/MDSV1Api.md create mode 100644 edu_sharing_openapi/docs/MEDIACENTERV1Api.md create mode 100644 edu_sharing_openapi/docs/Mainnav.md create mode 100644 edu_sharing_openapi/docs/ManualRegistrationData.md create mode 100644 edu_sharing_openapi/docs/McOrgConnectResult.md create mode 100644 edu_sharing_openapi/docs/Mds.md create mode 100644 edu_sharing_openapi/docs/MdsColumn.md create mode 100644 edu_sharing_openapi/docs/MdsEntries.md create mode 100644 edu_sharing_openapi/docs/MdsGroup.md create mode 100644 edu_sharing_openapi/docs/MdsList.md create mode 100644 edu_sharing_openapi/docs/MdsQueryCriteria.md create mode 100644 edu_sharing_openapi/docs/MdsSort.md create mode 100644 edu_sharing_openapi/docs/MdsSortColumn.md create mode 100644 edu_sharing_openapi/docs/MdsSortDefault.md create mode 100644 edu_sharing_openapi/docs/MdsSubwidget.md create mode 100644 edu_sharing_openapi/docs/MdsValue.md create mode 100644 edu_sharing_openapi/docs/MdsView.md create mode 100644 edu_sharing_openapi/docs/MdsWidget.md create mode 100644 edu_sharing_openapi/docs/MdsWidgetCondition.md create mode 100644 edu_sharing_openapi/docs/Mediacenter.md create mode 100644 edu_sharing_openapi/docs/MediacenterProfileExtension.md create mode 100644 edu_sharing_openapi/docs/MediacentersImportResult.md create mode 100644 edu_sharing_openapi/docs/MenuEntry.md create mode 100644 edu_sharing_openapi/docs/Message.md create mode 100644 edu_sharing_openapi/docs/MetadataSetInfo.md create mode 100644 edu_sharing_openapi/docs/MetadataSuggestionEventDTO.md create mode 100644 edu_sharing_openapi/docs/NETWORKV1Api.md create mode 100644 edu_sharing_openapi/docs/NODEV1Api.md create mode 100644 edu_sharing_openapi/docs/NOTIFICATIONV1Api.md create mode 100644 edu_sharing_openapi/docs/Node.md create mode 100644 edu_sharing_openapi/docs/NodeCollectionProposalCount.md create mode 100644 edu_sharing_openapi/docs/NodeData.md create mode 100644 edu_sharing_openapi/docs/NodeDataDTO.md create mode 100644 edu_sharing_openapi/docs/NodeEntries.md create mode 100644 edu_sharing_openapi/docs/NodeEntry.md create mode 100644 edu_sharing_openapi/docs/NodeIssueEventDTO.md create mode 100644 edu_sharing_openapi/docs/NodeLTIDeepLink.md create mode 100644 edu_sharing_openapi/docs/NodeLocked.md create mode 100644 edu_sharing_openapi/docs/NodePermissionEntry.md create mode 100644 edu_sharing_openapi/docs/NodePermissions.md create mode 100644 edu_sharing_openapi/docs/NodeRef.md create mode 100644 edu_sharing_openapi/docs/NodeRelation.md create mode 100644 edu_sharing_openapi/docs/NodeRemote.md create mode 100644 edu_sharing_openapi/docs/NodeShare.md create mode 100644 edu_sharing_openapi/docs/NodeStats.md create mode 100644 edu_sharing_openapi/docs/NodeText.md create mode 100644 edu_sharing_openapi/docs/NodeVersion.md create mode 100644 edu_sharing_openapi/docs/NodeVersionEntries.md create mode 100644 edu_sharing_openapi/docs/NodeVersionEntry.md create mode 100644 edu_sharing_openapi/docs/NodeVersionRef.md create mode 100644 edu_sharing_openapi/docs/NodeVersionRefEntries.md create mode 100644 edu_sharing_openapi/docs/NotificationConfig.md create mode 100644 edu_sharing_openapi/docs/NotificationEventDTO.md create mode 100644 edu_sharing_openapi/docs/NotificationIntervals.md create mode 100644 edu_sharing_openapi/docs/NotificationResponsePage.md create mode 100644 edu_sharing_openapi/docs/NotifyEntry.md create mode 100644 edu_sharing_openapi/docs/ORGANIZATIONV1Api.md create mode 100644 edu_sharing_openapi/docs/OpenIdConfiguration.md create mode 100644 edu_sharing_openapi/docs/OpenIdRegistrationResult.md create mode 100644 edu_sharing_openapi/docs/OrganisationsImportResult.md create mode 100644 edu_sharing_openapi/docs/Organization.md create mode 100644 edu_sharing_openapi/docs/OrganizationEntries.md create mode 100644 edu_sharing_openapi/docs/Pageable.md create mode 100644 edu_sharing_openapi/docs/Pagination.md create mode 100644 edu_sharing_openapi/docs/Parameters.md create mode 100644 edu_sharing_openapi/docs/ParentEntries.md create mode 100644 edu_sharing_openapi/docs/Person.md create mode 100644 edu_sharing_openapi/docs/PersonDeleteOptions.md create mode 100644 edu_sharing_openapi/docs/PersonDeleteResult.md create mode 100644 edu_sharing_openapi/docs/PersonReport.md create mode 100644 edu_sharing_openapi/docs/PluginInfo.md create mode 100644 edu_sharing_openapi/docs/PluginStatus.md create mode 100644 edu_sharing_openapi/docs/Preferences.md create mode 100644 edu_sharing_openapi/docs/Preview.md create mode 100644 edu_sharing_openapi/docs/Profile.md create mode 100644 edu_sharing_openapi/docs/ProfileSettings.md create mode 100644 edu_sharing_openapi/docs/ProposeForCollectionEventDTO.md create mode 100644 edu_sharing_openapi/docs/Provider.md create mode 100644 edu_sharing_openapi/docs/Query.md create mode 100644 edu_sharing_openapi/docs/RATINGV1Api.md create mode 100644 edu_sharing_openapi/docs/REGISTERV1Api.md create mode 100644 edu_sharing_openapi/docs/RELATIONV1Api.md create mode 100644 edu_sharing_openapi/docs/RENDERINGV1Api.md create mode 100644 edu_sharing_openapi/docs/RatingData.md create mode 100644 edu_sharing_openapi/docs/RatingDetails.md create mode 100644 edu_sharing_openapi/docs/RatingEventDTO.md create mode 100644 edu_sharing_openapi/docs/RatingHistory.md create mode 100644 edu_sharing_openapi/docs/ReferenceEntries.md create mode 100644 edu_sharing_openapi/docs/Register.md create mode 100644 edu_sharing_openapi/docs/RegisterExists.md create mode 100644 edu_sharing_openapi/docs/RegisterInformation.md create mode 100644 edu_sharing_openapi/docs/RegistrationUrl.md create mode 100644 edu_sharing_openapi/docs/RelationData.md create mode 100644 edu_sharing_openapi/docs/Remote.md create mode 100644 edu_sharing_openapi/docs/RemoteAuthDescription.md create mode 100644 edu_sharing_openapi/docs/Rendering.md create mode 100644 edu_sharing_openapi/docs/RenderingDetailsEntry.md create mode 100644 edu_sharing_openapi/docs/RenderingGdpr.md create mode 100644 edu_sharing_openapi/docs/Repo.md create mode 100644 edu_sharing_openapi/docs/RepoEntries.md create mode 100644 edu_sharing_openapi/docs/RepositoryConfig.md create mode 100644 edu_sharing_openapi/docs/RepositoryVersionInfo.md create mode 100644 edu_sharing_openapi/docs/RestoreResult.md create mode 100644 edu_sharing_openapi/docs/RestoreResults.md create mode 100644 edu_sharing_openapi/docs/SEARCHV1Api.md create mode 100644 edu_sharing_openapi/docs/SHARINGV1Api.md create mode 100644 edu_sharing_openapi/docs/STATISTICV1Api.md create mode 100644 edu_sharing_openapi/docs/STREAMV1Api.md create mode 100644 edu_sharing_openapi/docs/SearchParameters.md create mode 100644 edu_sharing_openapi/docs/SearchParametersFacets.md create mode 100644 edu_sharing_openapi/docs/SearchResult.md create mode 100644 edu_sharing_openapi/docs/SearchResultElastic.md create mode 100644 edu_sharing_openapi/docs/SearchResultLrmi.md create mode 100644 edu_sharing_openapi/docs/SearchResultNode.md create mode 100644 edu_sharing_openapi/docs/SearchVCard.md create mode 100644 edu_sharing_openapi/docs/ServerUpdateInfo.md create mode 100644 edu_sharing_openapi/docs/Service.md create mode 100644 edu_sharing_openapi/docs/ServiceInstance.md create mode 100644 edu_sharing_openapi/docs/ServiceVersion.md create mode 100644 edu_sharing_openapi/docs/Services.md create mode 100644 edu_sharing_openapi/docs/SharedFolderOptions.md create mode 100644 edu_sharing_openapi/docs/SharingInfo.md create mode 100644 edu_sharing_openapi/docs/SimpleEdit.md create mode 100644 edu_sharing_openapi/docs/SimpleEditGlobalGroups.md create mode 100644 edu_sharing_openapi/docs/SimpleEditOrganization.md create mode 100644 edu_sharing_openapi/docs/Sort.md create mode 100644 edu_sharing_openapi/docs/StatisticEntity.md create mode 100644 edu_sharing_openapi/docs/StatisticEntry.md create mode 100644 edu_sharing_openapi/docs/Statistics.md create mode 100644 edu_sharing_openapi/docs/StatisticsGlobal.md create mode 100644 edu_sharing_openapi/docs/StatisticsGroup.md create mode 100644 edu_sharing_openapi/docs/StatisticsKeyGroup.md create mode 100644 edu_sharing_openapi/docs/StatisticsSubGroup.md create mode 100644 edu_sharing_openapi/docs/StatisticsUser.md create mode 100644 edu_sharing_openapi/docs/StoredService.md create mode 100644 edu_sharing_openapi/docs/Stream.md create mode 100644 edu_sharing_openapi/docs/StreamEntry.md create mode 100644 edu_sharing_openapi/docs/StreamEntryInput.md create mode 100644 edu_sharing_openapi/docs/StreamList.md create mode 100644 edu_sharing_openapi/docs/SubGroupItem.md create mode 100644 edu_sharing_openapi/docs/Suggest.md create mode 100644 edu_sharing_openapi/docs/Suggestion.md create mode 100644 edu_sharing_openapi/docs/SuggestionParam.md create mode 100644 edu_sharing_openapi/docs/Suggestions.md create mode 100644 edu_sharing_openapi/docs/TOOLV1Api.md create mode 100644 edu_sharing_openapi/docs/TRACKINGV1Api.md create mode 100644 edu_sharing_openapi/docs/Tool.md create mode 100644 edu_sharing_openapi/docs/Tools.md create mode 100644 edu_sharing_openapi/docs/Tracking.md create mode 100644 edu_sharing_openapi/docs/TrackingAuthority.md create mode 100644 edu_sharing_openapi/docs/TrackingNode.md create mode 100644 edu_sharing_openapi/docs/USAGEV1Api.md create mode 100644 edu_sharing_openapi/docs/UploadResult.md create mode 100644 edu_sharing_openapi/docs/Usage.md create mode 100644 edu_sharing_openapi/docs/Usages.md create mode 100644 edu_sharing_openapi/docs/User.md create mode 100644 edu_sharing_openapi/docs/UserCredential.md create mode 100644 edu_sharing_openapi/docs/UserDataDTO.md create mode 100644 edu_sharing_openapi/docs/UserEntries.md create mode 100644 edu_sharing_openapi/docs/UserEntry.md create mode 100644 edu_sharing_openapi/docs/UserProfile.md create mode 100644 edu_sharing_openapi/docs/UserProfileAppAuth.md create mode 100644 edu_sharing_openapi/docs/UserProfileEdit.md create mode 100644 edu_sharing_openapi/docs/UserQuota.md create mode 100644 edu_sharing_openapi/docs/UserSimple.md create mode 100644 edu_sharing_openapi/docs/UserStats.md create mode 100644 edu_sharing_openapi/docs/UserStatus.md create mode 100644 edu_sharing_openapi/docs/Value.md create mode 100644 edu_sharing_openapi/docs/ValueParameters.md create mode 100644 edu_sharing_openapi/docs/Values.md create mode 100644 edu_sharing_openapi/docs/Variables.md create mode 100644 edu_sharing_openapi/docs/Version.md create mode 100644 edu_sharing_openapi/docs/VersionBuild.md create mode 100644 edu_sharing_openapi/docs/VersionGit.md create mode 100644 edu_sharing_openapi/docs/VersionGitCommit.md create mode 100644 edu_sharing_openapi/docs/VersionMaven.md create mode 100644 edu_sharing_openapi/docs/VersionProject.md create mode 100644 edu_sharing_openapi/docs/VersionTimestamp.md create mode 100644 edu_sharing_openapi/docs/WebsiteInformation.md create mode 100644 edu_sharing_openapi/docs/WidgetDataDTO.md create mode 100644 edu_sharing_openapi/docs/WorkflowEventDTO.md create mode 100644 edu_sharing_openapi/docs/WorkflowHistory.md create mode 100644 edu_sharing_openapi/edu_sharing_client/__init__.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/__init__.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/about_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/adminv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/archivev1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/authenticationv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/bulkv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/clientutilsv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/collectionv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/commentv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/configv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/connectorv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/feedbackv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/iamv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/knowledgev1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/lti_platform_v13_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/ltiv13_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/mdsv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/mediacenterv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/networkv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/nodev1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/notificationv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/organizationv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/ratingv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/registerv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/relationv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/renderingv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/searchv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/sharingv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/statisticv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/streamv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/toolv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/trackingv1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api/usagev1_api.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api_client.py create mode 100644 edu_sharing_openapi/edu_sharing_client/api_response.py create mode 100644 edu_sharing_openapi/edu_sharing_client/configuration.py create mode 100644 edu_sharing_openapi/edu_sharing_client/exceptions.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/__init__.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/about.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/about_service.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/abstract_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/ace.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/acl.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/add_to_collection_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/admin.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/admin_statistics.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/application.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/audience.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/authentication_token.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/authority.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/authority_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/available_mds.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/banner.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/cache_cluster.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/cache_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/cache_member.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/catalog.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_counts.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_options.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_proposal_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collection_reference.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collections.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/collections_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/comment.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/comment_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/comments.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/condition.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_frontpage.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_privacy.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_publish.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_rating.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_remote.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_theme_color.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_theme_colors.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_tutorial.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_upload.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_workflow.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/config_workflow_list.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/connector.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/connector_file_type.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/connector_list.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/content.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/context_menu_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/contributor.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/counts.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/create.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/create_usage.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/delete_option.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/dynamic_config.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_token.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_tokens.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/element.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/error_response.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/excel_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/facet.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/feature_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/feedback_data.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/feedback_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/filter.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/filter_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/font_icon.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/frontpage.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/general.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/geo.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/group.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/group_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/group_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/group_profile.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/group_signup_details.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/guest.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/handle_param.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/help_menu_options.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/home_folder_options.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/icon.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/image.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/interface.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/invite_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_builder.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_data_map.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_description.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_detail.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_detail_job_data_map.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_field_description.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/job_key.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/json_object.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/key_value_pair.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/language.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/level.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/license.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/license_agreement.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/license_agreement_node.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/licenses.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/location.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/log_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/logger_config_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/login.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/login_credentials.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/logout_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/lti_platform_configuration.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/lti_session.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/lti_tool_configuration.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mainnav.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/manual_registration_data.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mc_org_connect_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_column.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_group.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_list.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_query_criteria.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_sort.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_sort_column.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_sort_default.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_subwidget.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_value.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_view.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_widget.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mds_widget_condition.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mediacenter.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mediacenter_profile_extension.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/mediacenters_import_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/menu_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/message.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/metadata_set_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/metadata_suggestion_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_collection_proposal_count.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_data.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_data_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_issue_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_locked.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_lti_deep_link.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_permission_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_permissions.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_ref.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_relation.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_remote.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_share.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_stats.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_text.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_version.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_version_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_version_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_version_ref.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/node_version_ref_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/notification_config.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/notification_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/notification_intervals.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/notification_response_page.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/notify_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/open_id_configuration.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/open_id_registration_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/organisations_import_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/organization.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/organization_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/pageable.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/pagination.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/parameters.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/parent_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/person.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/person_delete_options.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/person_delete_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/person_report.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/plugin_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/plugin_status.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/preferences.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/preview.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/profile.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/profile_settings.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/propose_for_collection_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/provider.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/query.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rating_data.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rating_details.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rating_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rating_history.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/reference_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/register.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/register_exists.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/register_information.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/registration_url.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/relation_data.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/remote.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/remote_auth_description.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rendering.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rendering_details_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/rendering_gdpr.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/repo.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/repo_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/repository_config.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/repository_version_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/restore_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/restore_results.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_parameters.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_parameters_facets.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_result_elastic.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_result_lrmi.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_result_node.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/search_v_card.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/server_update_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/service.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/service_instance.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/service_version.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/services.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/shared_folder_options.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/sharing_info.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/simple_edit.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/simple_edit_global_groups.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/simple_edit_organization.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/sort.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistic_entity.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistic_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistics.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistics_global.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistics_group.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistics_key_group.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistics_sub_group.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/statistics_user.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/stored_service.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/stream.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/stream_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/stream_entry_input.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/stream_list.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/sub_group_item.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/suggest.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/suggestion.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/suggestion_param.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/suggestions.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/tool.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/tools.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/tracking.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/tracking_authority.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/tracking_node.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/upload_result.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/usage.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/usages.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_credential.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_data_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_entries.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_entry.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_profile.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_profile_app_auth.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_profile_edit.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_quota.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_simple.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_stats.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/user_status.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/value.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/value_parameters.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/values.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/variables.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version_build.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version_git.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version_git_commit.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version_maven.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version_project.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/version_timestamp.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/website_information.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/widget_data_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/workflow_event_dto.py create mode 100644 edu_sharing_openapi/edu_sharing_client/models/workflow_history.py create mode 100644 edu_sharing_openapi/edu_sharing_client/py.typed create mode 100644 edu_sharing_openapi/edu_sharing_client/rest.py create mode 100644 edu_sharing_openapi/git_push.sh create mode 100644 edu_sharing_openapi/pyproject.toml create mode 100644 edu_sharing_openapi/requirements.txt create mode 100644 edu_sharing_openapi/setup.cfg create mode 100644 edu_sharing_openapi/setup.py create mode 100644 edu_sharing_openapi/test-requirements.txt create mode 100644 edu_sharing_openapi/test/__init__.py create mode 100644 edu_sharing_openapi/test/test_about.py create mode 100644 edu_sharing_openapi/test/test_about_api.py create mode 100644 edu_sharing_openapi/test/test_about_service.py create mode 100644 edu_sharing_openapi/test/test_abstract_entries.py create mode 100644 edu_sharing_openapi/test/test_ace.py create mode 100644 edu_sharing_openapi/test/test_acl.py create mode 100644 edu_sharing_openapi/test/test_add_to_collection_event_dto.py create mode 100644 edu_sharing_openapi/test/test_admin.py create mode 100644 edu_sharing_openapi/test/test_admin_statistics.py create mode 100644 edu_sharing_openapi/test/test_adminv1_api.py create mode 100644 edu_sharing_openapi/test/test_application.py create mode 100644 edu_sharing_openapi/test/test_archivev1_api.py create mode 100644 edu_sharing_openapi/test/test_audience.py create mode 100644 edu_sharing_openapi/test/test_authentication_token.py create mode 100644 edu_sharing_openapi/test/test_authenticationv1_api.py create mode 100644 edu_sharing_openapi/test/test_authority.py create mode 100644 edu_sharing_openapi/test/test_authority_entries.py create mode 100644 edu_sharing_openapi/test/test_available_mds.py create mode 100644 edu_sharing_openapi/test/test_banner.py create mode 100644 edu_sharing_openapi/test/test_bulkv1_api.py create mode 100644 edu_sharing_openapi/test/test_cache_cluster.py create mode 100644 edu_sharing_openapi/test/test_cache_info.py create mode 100644 edu_sharing_openapi/test/test_cache_member.py create mode 100644 edu_sharing_openapi/test/test_catalog.py create mode 100644 edu_sharing_openapi/test/test_clientutilsv1_api.py create mode 100644 edu_sharing_openapi/test/test_collection.py create mode 100644 edu_sharing_openapi/test/test_collection_counts.py create mode 100644 edu_sharing_openapi/test/test_collection_dto.py create mode 100644 edu_sharing_openapi/test/test_collection_entries.py create mode 100644 edu_sharing_openapi/test/test_collection_entry.py create mode 100644 edu_sharing_openapi/test/test_collection_options.py create mode 100644 edu_sharing_openapi/test/test_collection_proposal_entries.py create mode 100644 edu_sharing_openapi/test/test_collection_reference.py create mode 100644 edu_sharing_openapi/test/test_collections.py create mode 100644 edu_sharing_openapi/test/test_collections_result.py create mode 100644 edu_sharing_openapi/test/test_collectionv1_api.py create mode 100644 edu_sharing_openapi/test/test_comment.py create mode 100644 edu_sharing_openapi/test/test_comment_event_dto.py create mode 100644 edu_sharing_openapi/test/test_comments.py create mode 100644 edu_sharing_openapi/test/test_commentv1_api.py create mode 100644 edu_sharing_openapi/test/test_condition.py create mode 100644 edu_sharing_openapi/test/test_config.py create mode 100644 edu_sharing_openapi/test/test_config_frontpage.py create mode 100644 edu_sharing_openapi/test/test_config_privacy.py create mode 100644 edu_sharing_openapi/test/test_config_publish.py create mode 100644 edu_sharing_openapi/test/test_config_rating.py create mode 100644 edu_sharing_openapi/test/test_config_remote.py create mode 100644 edu_sharing_openapi/test/test_config_theme_color.py create mode 100644 edu_sharing_openapi/test/test_config_theme_colors.py create mode 100644 edu_sharing_openapi/test/test_config_tutorial.py create mode 100644 edu_sharing_openapi/test/test_config_upload.py create mode 100644 edu_sharing_openapi/test/test_config_workflow.py create mode 100644 edu_sharing_openapi/test/test_config_workflow_list.py create mode 100644 edu_sharing_openapi/test/test_configv1_api.py create mode 100644 edu_sharing_openapi/test/test_connector.py create mode 100644 edu_sharing_openapi/test/test_connector_file_type.py create mode 100644 edu_sharing_openapi/test/test_connector_list.py create mode 100644 edu_sharing_openapi/test/test_connectorv1_api.py create mode 100644 edu_sharing_openapi/test/test_content.py create mode 100644 edu_sharing_openapi/test/test_context_menu_entry.py create mode 100644 edu_sharing_openapi/test/test_contributor.py create mode 100644 edu_sharing_openapi/test/test_counts.py create mode 100644 edu_sharing_openapi/test/test_create.py create mode 100644 edu_sharing_openapi/test/test_create_usage.py create mode 100644 edu_sharing_openapi/test/test_delete_option.py create mode 100644 edu_sharing_openapi/test/test_dynamic_config.py create mode 100644 edu_sharing_openapi/test/test_dynamic_registration_token.py create mode 100644 edu_sharing_openapi/test/test_dynamic_registration_tokens.py create mode 100644 edu_sharing_openapi/test/test_element.py create mode 100644 edu_sharing_openapi/test/test_error_response.py create mode 100644 edu_sharing_openapi/test/test_excel_result.py create mode 100644 edu_sharing_openapi/test/test_facet.py create mode 100644 edu_sharing_openapi/test/test_feature_info.py create mode 100644 edu_sharing_openapi/test/test_feedback_data.py create mode 100644 edu_sharing_openapi/test/test_feedback_result.py create mode 100644 edu_sharing_openapi/test/test_feedbackv1_api.py create mode 100644 edu_sharing_openapi/test/test_filter.py create mode 100644 edu_sharing_openapi/test/test_filter_entry.py create mode 100644 edu_sharing_openapi/test/test_font_icon.py create mode 100644 edu_sharing_openapi/test/test_frontpage.py create mode 100644 edu_sharing_openapi/test/test_general.py create mode 100644 edu_sharing_openapi/test/test_geo.py create mode 100644 edu_sharing_openapi/test/test_group.py create mode 100644 edu_sharing_openapi/test/test_group_entries.py create mode 100644 edu_sharing_openapi/test/test_group_entry.py create mode 100644 edu_sharing_openapi/test/test_group_profile.py create mode 100644 edu_sharing_openapi/test/test_group_signup_details.py create mode 100644 edu_sharing_openapi/test/test_guest.py create mode 100644 edu_sharing_openapi/test/test_handle_param.py create mode 100644 edu_sharing_openapi/test/test_help_menu_options.py create mode 100644 edu_sharing_openapi/test/test_home_folder_options.py create mode 100644 edu_sharing_openapi/test/test_iamv1_api.py create mode 100644 edu_sharing_openapi/test/test_icon.py create mode 100644 edu_sharing_openapi/test/test_image.py create mode 100644 edu_sharing_openapi/test/test_interface.py create mode 100644 edu_sharing_openapi/test/test_invite_event_dto.py create mode 100644 edu_sharing_openapi/test/test_job.py create mode 100644 edu_sharing_openapi/test/test_job_builder.py create mode 100644 edu_sharing_openapi/test/test_job_data_map.py create mode 100644 edu_sharing_openapi/test/test_job_description.py create mode 100644 edu_sharing_openapi/test/test_job_detail.py create mode 100644 edu_sharing_openapi/test/test_job_detail_job_data_map.py create mode 100644 edu_sharing_openapi/test/test_job_entry.py create mode 100644 edu_sharing_openapi/test/test_job_field_description.py create mode 100644 edu_sharing_openapi/test/test_job_info.py create mode 100644 edu_sharing_openapi/test/test_job_key.py create mode 100644 edu_sharing_openapi/test/test_json_object.py create mode 100644 edu_sharing_openapi/test/test_key_value_pair.py create mode 100644 edu_sharing_openapi/test/test_knowledgev1_api.py create mode 100644 edu_sharing_openapi/test/test_language.py create mode 100644 edu_sharing_openapi/test/test_level.py create mode 100644 edu_sharing_openapi/test/test_license.py create mode 100644 edu_sharing_openapi/test/test_license_agreement.py create mode 100644 edu_sharing_openapi/test/test_license_agreement_node.py create mode 100644 edu_sharing_openapi/test/test_licenses.py create mode 100644 edu_sharing_openapi/test/test_location.py create mode 100644 edu_sharing_openapi/test/test_log_entry.py create mode 100644 edu_sharing_openapi/test/test_logger_config_result.py create mode 100644 edu_sharing_openapi/test/test_login.py create mode 100644 edu_sharing_openapi/test/test_login_credentials.py create mode 100644 edu_sharing_openapi/test/test_logout_info.py create mode 100644 edu_sharing_openapi/test/test_lti_platform_configuration.py create mode 100644 edu_sharing_openapi/test/test_lti_platform_v13_api.py create mode 100644 edu_sharing_openapi/test/test_lti_session.py create mode 100644 edu_sharing_openapi/test/test_lti_tool_configuration.py create mode 100644 edu_sharing_openapi/test/test_ltiv13_api.py create mode 100644 edu_sharing_openapi/test/test_mainnav.py create mode 100644 edu_sharing_openapi/test/test_manual_registration_data.py create mode 100644 edu_sharing_openapi/test/test_mc_org_connect_result.py create mode 100644 edu_sharing_openapi/test/test_mds.py create mode 100644 edu_sharing_openapi/test/test_mds_column.py create mode 100644 edu_sharing_openapi/test/test_mds_entries.py create mode 100644 edu_sharing_openapi/test/test_mds_group.py create mode 100644 edu_sharing_openapi/test/test_mds_list.py create mode 100644 edu_sharing_openapi/test/test_mds_query_criteria.py create mode 100644 edu_sharing_openapi/test/test_mds_sort.py create mode 100644 edu_sharing_openapi/test/test_mds_sort_column.py create mode 100644 edu_sharing_openapi/test/test_mds_sort_default.py create mode 100644 edu_sharing_openapi/test/test_mds_subwidget.py create mode 100644 edu_sharing_openapi/test/test_mds_value.py create mode 100644 edu_sharing_openapi/test/test_mds_view.py create mode 100644 edu_sharing_openapi/test/test_mds_widget.py create mode 100644 edu_sharing_openapi/test/test_mds_widget_condition.py create mode 100644 edu_sharing_openapi/test/test_mdsv1_api.py create mode 100644 edu_sharing_openapi/test/test_mediacenter.py create mode 100644 edu_sharing_openapi/test/test_mediacenter_profile_extension.py create mode 100644 edu_sharing_openapi/test/test_mediacenters_import_result.py create mode 100644 edu_sharing_openapi/test/test_mediacenterv1_api.py create mode 100644 edu_sharing_openapi/test/test_menu_entry.py create mode 100644 edu_sharing_openapi/test/test_message.py create mode 100644 edu_sharing_openapi/test/test_metadata_set_info.py create mode 100644 edu_sharing_openapi/test/test_metadata_suggestion_event_dto.py create mode 100644 edu_sharing_openapi/test/test_networkv1_api.py create mode 100644 edu_sharing_openapi/test/test_node.py create mode 100644 edu_sharing_openapi/test/test_node_collection_proposal_count.py create mode 100644 edu_sharing_openapi/test/test_node_data.py create mode 100644 edu_sharing_openapi/test/test_node_data_dto.py create mode 100644 edu_sharing_openapi/test/test_node_entries.py create mode 100644 edu_sharing_openapi/test/test_node_entry.py create mode 100644 edu_sharing_openapi/test/test_node_issue_event_dto.py create mode 100644 edu_sharing_openapi/test/test_node_locked.py create mode 100644 edu_sharing_openapi/test/test_node_lti_deep_link.py create mode 100644 edu_sharing_openapi/test/test_node_permission_entry.py create mode 100644 edu_sharing_openapi/test/test_node_permissions.py create mode 100644 edu_sharing_openapi/test/test_node_ref.py create mode 100644 edu_sharing_openapi/test/test_node_relation.py create mode 100644 edu_sharing_openapi/test/test_node_remote.py create mode 100644 edu_sharing_openapi/test/test_node_share.py create mode 100644 edu_sharing_openapi/test/test_node_stats.py create mode 100644 edu_sharing_openapi/test/test_node_text.py create mode 100644 edu_sharing_openapi/test/test_node_version.py create mode 100644 edu_sharing_openapi/test/test_node_version_entries.py create mode 100644 edu_sharing_openapi/test/test_node_version_entry.py create mode 100644 edu_sharing_openapi/test/test_node_version_ref.py create mode 100644 edu_sharing_openapi/test/test_node_version_ref_entries.py create mode 100644 edu_sharing_openapi/test/test_nodev1_api.py create mode 100644 edu_sharing_openapi/test/test_notification_config.py create mode 100644 edu_sharing_openapi/test/test_notification_event_dto.py create mode 100644 edu_sharing_openapi/test/test_notification_intervals.py create mode 100644 edu_sharing_openapi/test/test_notification_response_page.py create mode 100644 edu_sharing_openapi/test/test_notificationv1_api.py create mode 100644 edu_sharing_openapi/test/test_notify_entry.py create mode 100644 edu_sharing_openapi/test/test_open_id_configuration.py create mode 100644 edu_sharing_openapi/test/test_open_id_registration_result.py create mode 100644 edu_sharing_openapi/test/test_organisations_import_result.py create mode 100644 edu_sharing_openapi/test/test_organization.py create mode 100644 edu_sharing_openapi/test/test_organization_entries.py create mode 100644 edu_sharing_openapi/test/test_organizationv1_api.py create mode 100644 edu_sharing_openapi/test/test_pageable.py create mode 100644 edu_sharing_openapi/test/test_pagination.py create mode 100644 edu_sharing_openapi/test/test_parameters.py create mode 100644 edu_sharing_openapi/test/test_parent_entries.py create mode 100644 edu_sharing_openapi/test/test_person.py create mode 100644 edu_sharing_openapi/test/test_person_delete_options.py create mode 100644 edu_sharing_openapi/test/test_person_delete_result.py create mode 100644 edu_sharing_openapi/test/test_person_report.py create mode 100644 edu_sharing_openapi/test/test_plugin_info.py create mode 100644 edu_sharing_openapi/test/test_plugin_status.py create mode 100644 edu_sharing_openapi/test/test_preferences.py create mode 100644 edu_sharing_openapi/test/test_preview.py create mode 100644 edu_sharing_openapi/test/test_profile.py create mode 100644 edu_sharing_openapi/test/test_profile_settings.py create mode 100644 edu_sharing_openapi/test/test_propose_for_collection_event_dto.py create mode 100644 edu_sharing_openapi/test/test_provider.py create mode 100644 edu_sharing_openapi/test/test_query.py create mode 100644 edu_sharing_openapi/test/test_rating_data.py create mode 100644 edu_sharing_openapi/test/test_rating_details.py create mode 100644 edu_sharing_openapi/test/test_rating_event_dto.py create mode 100644 edu_sharing_openapi/test/test_rating_history.py create mode 100644 edu_sharing_openapi/test/test_ratingv1_api.py create mode 100644 edu_sharing_openapi/test/test_reference_entries.py create mode 100644 edu_sharing_openapi/test/test_register.py create mode 100644 edu_sharing_openapi/test/test_register_exists.py create mode 100644 edu_sharing_openapi/test/test_register_information.py create mode 100644 edu_sharing_openapi/test/test_registerv1_api.py create mode 100644 edu_sharing_openapi/test/test_registration_url.py create mode 100644 edu_sharing_openapi/test/test_relation_data.py create mode 100644 edu_sharing_openapi/test/test_relationv1_api.py create mode 100644 edu_sharing_openapi/test/test_remote.py create mode 100644 edu_sharing_openapi/test/test_remote_auth_description.py create mode 100644 edu_sharing_openapi/test/test_rendering.py create mode 100644 edu_sharing_openapi/test/test_rendering_details_entry.py create mode 100644 edu_sharing_openapi/test/test_rendering_gdpr.py create mode 100644 edu_sharing_openapi/test/test_renderingv1_api.py create mode 100644 edu_sharing_openapi/test/test_repo.py create mode 100644 edu_sharing_openapi/test/test_repo_entries.py create mode 100644 edu_sharing_openapi/test/test_repository_config.py create mode 100644 edu_sharing_openapi/test/test_repository_version_info.py create mode 100644 edu_sharing_openapi/test/test_restore_result.py create mode 100644 edu_sharing_openapi/test/test_restore_results.py create mode 100644 edu_sharing_openapi/test/test_search_parameters.py create mode 100644 edu_sharing_openapi/test/test_search_parameters_facets.py create mode 100644 edu_sharing_openapi/test/test_search_result.py create mode 100644 edu_sharing_openapi/test/test_search_result_elastic.py create mode 100644 edu_sharing_openapi/test/test_search_result_lrmi.py create mode 100644 edu_sharing_openapi/test/test_search_result_node.py create mode 100644 edu_sharing_openapi/test/test_search_v_card.py create mode 100644 edu_sharing_openapi/test/test_searchv1_api.py create mode 100644 edu_sharing_openapi/test/test_server_update_info.py create mode 100644 edu_sharing_openapi/test/test_service.py create mode 100644 edu_sharing_openapi/test/test_service_instance.py create mode 100644 edu_sharing_openapi/test/test_service_version.py create mode 100644 edu_sharing_openapi/test/test_services.py create mode 100644 edu_sharing_openapi/test/test_shared_folder_options.py create mode 100644 edu_sharing_openapi/test/test_sharing_info.py create mode 100644 edu_sharing_openapi/test/test_sharingv1_api.py create mode 100644 edu_sharing_openapi/test/test_simple_edit.py create mode 100644 edu_sharing_openapi/test/test_simple_edit_global_groups.py create mode 100644 edu_sharing_openapi/test/test_simple_edit_organization.py create mode 100644 edu_sharing_openapi/test/test_sort.py create mode 100644 edu_sharing_openapi/test/test_statistic_entity.py create mode 100644 edu_sharing_openapi/test/test_statistic_entry.py create mode 100644 edu_sharing_openapi/test/test_statistics.py create mode 100644 edu_sharing_openapi/test/test_statistics_global.py create mode 100644 edu_sharing_openapi/test/test_statistics_group.py create mode 100644 edu_sharing_openapi/test/test_statistics_key_group.py create mode 100644 edu_sharing_openapi/test/test_statistics_sub_group.py create mode 100644 edu_sharing_openapi/test/test_statistics_user.py create mode 100644 edu_sharing_openapi/test/test_statisticv1_api.py create mode 100644 edu_sharing_openapi/test/test_stored_service.py create mode 100644 edu_sharing_openapi/test/test_stream.py create mode 100644 edu_sharing_openapi/test/test_stream_entry.py create mode 100644 edu_sharing_openapi/test/test_stream_entry_input.py create mode 100644 edu_sharing_openapi/test/test_stream_list.py create mode 100644 edu_sharing_openapi/test/test_streamv1_api.py create mode 100644 edu_sharing_openapi/test/test_sub_group_item.py create mode 100644 edu_sharing_openapi/test/test_suggest.py create mode 100644 edu_sharing_openapi/test/test_suggestion.py create mode 100644 edu_sharing_openapi/test/test_suggestion_param.py create mode 100644 edu_sharing_openapi/test/test_suggestions.py create mode 100644 edu_sharing_openapi/test/test_tool.py create mode 100644 edu_sharing_openapi/test/test_tools.py create mode 100644 edu_sharing_openapi/test/test_toolv1_api.py create mode 100644 edu_sharing_openapi/test/test_tracking.py create mode 100644 edu_sharing_openapi/test/test_tracking_authority.py create mode 100644 edu_sharing_openapi/test/test_tracking_node.py create mode 100644 edu_sharing_openapi/test/test_trackingv1_api.py create mode 100644 edu_sharing_openapi/test/test_upload_result.py create mode 100644 edu_sharing_openapi/test/test_usage.py create mode 100644 edu_sharing_openapi/test/test_usages.py create mode 100644 edu_sharing_openapi/test/test_usagev1_api.py create mode 100644 edu_sharing_openapi/test/test_user.py create mode 100644 edu_sharing_openapi/test/test_user_credential.py create mode 100644 edu_sharing_openapi/test/test_user_data_dto.py create mode 100644 edu_sharing_openapi/test/test_user_entries.py create mode 100644 edu_sharing_openapi/test/test_user_entry.py create mode 100644 edu_sharing_openapi/test/test_user_profile.py create mode 100644 edu_sharing_openapi/test/test_user_profile_app_auth.py create mode 100644 edu_sharing_openapi/test/test_user_profile_edit.py create mode 100644 edu_sharing_openapi/test/test_user_quota.py create mode 100644 edu_sharing_openapi/test/test_user_simple.py create mode 100644 edu_sharing_openapi/test/test_user_stats.py create mode 100644 edu_sharing_openapi/test/test_user_status.py create mode 100644 edu_sharing_openapi/test/test_value.py create mode 100644 edu_sharing_openapi/test/test_value_parameters.py create mode 100644 edu_sharing_openapi/test/test_values.py create mode 100644 edu_sharing_openapi/test/test_variables.py create mode 100644 edu_sharing_openapi/test/test_version.py create mode 100644 edu_sharing_openapi/test/test_version_build.py create mode 100644 edu_sharing_openapi/test/test_version_git.py create mode 100644 edu_sharing_openapi/test/test_version_git_commit.py create mode 100644 edu_sharing_openapi/test/test_version_maven.py create mode 100644 edu_sharing_openapi/test/test_version_project.py create mode 100644 edu_sharing_openapi/test/test_version_timestamp.py create mode 100644 edu_sharing_openapi/test/test_website_information.py create mode 100644 edu_sharing_openapi/test/test_widget_data_dto.py create mode 100644 edu_sharing_openapi/test/test_workflow_event_dto.py create mode 100644 edu_sharing_openapi/test/test_workflow_history.py create mode 100644 edu_sharing_openapi/tox.ini diff --git a/edu_sharing_openapi/.github/workflows/python.yml b/edu_sharing_openapi/.github/workflows/python.yml new file mode 100644 index 00000000..11728db0 --- /dev/null +++ b/edu_sharing_openapi/.github/workflows/python.yml @@ -0,0 +1,38 @@ +# NOTE: This file is auto generated by OpenAPI Generator. +# URL: https://openapi-generator.tech +# +# ref: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: edu_sharing_client Python package + +on: [push, pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 pytest + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + if [ -f test-requirements.txt ]; then pip install -r test-requirements.txt; fi + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + - name: Test with pytest + run: | + pytest diff --git a/edu_sharing_openapi/.gitignore b/edu_sharing_openapi/.gitignore new file mode 100644 index 00000000..43995bd4 --- /dev/null +++ b/edu_sharing_openapi/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/edu_sharing_openapi/.gitlab-ci.yml b/edu_sharing_openapi/.gitlab-ci.yml new file mode 100644 index 00000000..0a48904f --- /dev/null +++ b/edu_sharing_openapi/.gitlab-ci.yml @@ -0,0 +1,31 @@ +# NOTE: This file is auto generated by OpenAPI Generator. +# URL: https://openapi-generator.tech +# +# ref: https://docs.gitlab.com/ee/ci/README.html +# ref: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Python.gitlab-ci.yml + +stages: + - test + +.pytest: + stage: test + script: + - pip install -r requirements.txt + - pip install -r test-requirements.txt + - pytest --cov=edu_sharing_client + +pytest-3.7: + extends: .pytest + image: python:3.7-alpine +pytest-3.8: + extends: .pytest + image: python:3.8-alpine +pytest-3.9: + extends: .pytest + image: python:3.9-alpine +pytest-3.10: + extends: .pytest + image: python:3.10-alpine +pytest-3.11: + extends: .pytest + image: python:3.11-alpine diff --git a/edu_sharing_openapi/.openapi-generator-ignore b/edu_sharing_openapi/.openapi-generator-ignore new file mode 100644 index 00000000..7484ee59 --- /dev/null +++ b/edu_sharing_openapi/.openapi-generator-ignore @@ -0,0 +1,23 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md diff --git a/edu_sharing_openapi/.openapi-generator/FILES b/edu_sharing_openapi/.openapi-generator/FILES new file mode 100644 index 00000000..065b5304 --- /dev/null +++ b/edu_sharing_openapi/.openapi-generator/FILES @@ -0,0 +1,947 @@ +.github/workflows/python.yml +.gitignore +.gitlab-ci.yml +.openapi-generator-ignore +.travis.yml +README.md +docs/ABOUTApi.md +docs/ACE.md +docs/ACL.md +docs/ADMINV1Api.md +docs/ARCHIVEV1Api.md +docs/AUTHENTICATIONV1Api.md +docs/About.md +docs/AboutService.md +docs/AbstractEntries.md +docs/AddToCollectionEventDTO.md +docs/Admin.md +docs/AdminStatistics.md +docs/Application.md +docs/Audience.md +docs/AuthenticationToken.md +docs/Authority.md +docs/AuthorityEntries.md +docs/AvailableMds.md +docs/BULKV1Api.md +docs/Banner.md +docs/CLIENTUTILSV1Api.md +docs/COLLECTIONV1Api.md +docs/COMMENTV1Api.md +docs/CONFIGV1Api.md +docs/CONNECTORV1Api.md +docs/CacheCluster.md +docs/CacheInfo.md +docs/CacheMember.md +docs/Catalog.md +docs/Collection.md +docs/CollectionCounts.md +docs/CollectionDTO.md +docs/CollectionEntries.md +docs/CollectionEntry.md +docs/CollectionOptions.md +docs/CollectionProposalEntries.md +docs/CollectionReference.md +docs/Collections.md +docs/CollectionsResult.md +docs/Comment.md +docs/CommentEventDTO.md +docs/Comments.md +docs/Condition.md +docs/Config.md +docs/ConfigFrontpage.md +docs/ConfigPrivacy.md +docs/ConfigPublish.md +docs/ConfigRating.md +docs/ConfigRemote.md +docs/ConfigThemeColor.md +docs/ConfigThemeColors.md +docs/ConfigTutorial.md +docs/ConfigUpload.md +docs/ConfigWorkflow.md +docs/ConfigWorkflowList.md +docs/Connector.md +docs/ConnectorFileType.md +docs/ConnectorList.md +docs/Content.md +docs/ContextMenuEntry.md +docs/Contributor.md +docs/Counts.md +docs/Create.md +docs/CreateUsage.md +docs/DeleteOption.md +docs/DynamicConfig.md +docs/DynamicRegistrationToken.md +docs/DynamicRegistrationTokens.md +docs/Element.md +docs/ErrorResponse.md +docs/ExcelResult.md +docs/FEEDBACKV1Api.md +docs/Facet.md +docs/FeatureInfo.md +docs/FeedbackData.md +docs/FeedbackResult.md +docs/Filter.md +docs/FilterEntry.md +docs/FontIcon.md +docs/Frontpage.md +docs/General.md +docs/Geo.md +docs/Group.md +docs/GroupEntries.md +docs/GroupEntry.md +docs/GroupProfile.md +docs/GroupSignupDetails.md +docs/Guest.md +docs/HandleParam.md +docs/HelpMenuOptions.md +docs/HomeFolderOptions.md +docs/IAMV1Api.md +docs/Icon.md +docs/Image.md +docs/Interface.md +docs/InviteEventDTO.md +docs/JSONObject.md +docs/Job.md +docs/JobBuilder.md +docs/JobDataMap.md +docs/JobDescription.md +docs/JobDetail.md +docs/JobDetailJobDataMap.md +docs/JobEntry.md +docs/JobFieldDescription.md +docs/JobInfo.md +docs/JobKey.md +docs/KNOWLEDGEV1Api.md +docs/KeyValuePair.md +docs/LTIPlatformConfiguration.md +docs/LTIPlatformV13Api.md +docs/LTISession.md +docs/LTIToolConfiguration.md +docs/LTIV13Api.md +docs/Language.md +docs/Level.md +docs/License.md +docs/LicenseAgreement.md +docs/LicenseAgreementNode.md +docs/Licenses.md +docs/Location.md +docs/LogEntry.md +docs/LoggerConfigResult.md +docs/Login.md +docs/LoginCredentials.md +docs/LogoutInfo.md +docs/MDSV1Api.md +docs/MEDIACENTERV1Api.md +docs/Mainnav.md +docs/ManualRegistrationData.md +docs/McOrgConnectResult.md +docs/Mds.md +docs/MdsColumn.md +docs/MdsEntries.md +docs/MdsGroup.md +docs/MdsList.md +docs/MdsQueryCriteria.md +docs/MdsSort.md +docs/MdsSortColumn.md +docs/MdsSortDefault.md +docs/MdsSubwidget.md +docs/MdsValue.md +docs/MdsView.md +docs/MdsWidget.md +docs/MdsWidgetCondition.md +docs/Mediacenter.md +docs/MediacenterProfileExtension.md +docs/MediacentersImportResult.md +docs/MenuEntry.md +docs/Message.md +docs/MetadataSetInfo.md +docs/MetadataSuggestionEventDTO.md +docs/NETWORKV1Api.md +docs/NODEV1Api.md +docs/NOTIFICATIONV1Api.md +docs/Node.md +docs/NodeCollectionProposalCount.md +docs/NodeData.md +docs/NodeDataDTO.md +docs/NodeEntries.md +docs/NodeEntry.md +docs/NodeIssueEventDTO.md +docs/NodeLTIDeepLink.md +docs/NodeLocked.md +docs/NodePermissionEntry.md +docs/NodePermissions.md +docs/NodeRef.md +docs/NodeRelation.md +docs/NodeRemote.md +docs/NodeShare.md +docs/NodeStats.md +docs/NodeText.md +docs/NodeVersion.md +docs/NodeVersionEntries.md +docs/NodeVersionEntry.md +docs/NodeVersionRef.md +docs/NodeVersionRefEntries.md +docs/NotificationConfig.md +docs/NotificationEventDTO.md +docs/NotificationIntervals.md +docs/NotificationResponsePage.md +docs/NotifyEntry.md +docs/ORGANIZATIONV1Api.md +docs/OpenIdConfiguration.md +docs/OpenIdRegistrationResult.md +docs/OrganisationsImportResult.md +docs/Organization.md +docs/OrganizationEntries.md +docs/Pageable.md +docs/Pagination.md +docs/Parameters.md +docs/ParentEntries.md +docs/Person.md +docs/PersonDeleteOptions.md +docs/PersonDeleteResult.md +docs/PersonReport.md +docs/PluginInfo.md +docs/PluginStatus.md +docs/Preferences.md +docs/Preview.md +docs/Profile.md +docs/ProfileSettings.md +docs/ProposeForCollectionEventDTO.md +docs/Provider.md +docs/Query.md +docs/RATINGV1Api.md +docs/REGISTERV1Api.md +docs/RELATIONV1Api.md +docs/RENDERINGV1Api.md +docs/RatingData.md +docs/RatingDetails.md +docs/RatingEventDTO.md +docs/RatingHistory.md +docs/ReferenceEntries.md +docs/Register.md +docs/RegisterExists.md +docs/RegisterInformation.md +docs/RegistrationUrl.md +docs/RelationData.md +docs/Remote.md +docs/RemoteAuthDescription.md +docs/Rendering.md +docs/RenderingDetailsEntry.md +docs/RenderingGdpr.md +docs/Repo.md +docs/RepoEntries.md +docs/RepositoryConfig.md +docs/RepositoryVersionInfo.md +docs/RestoreResult.md +docs/RestoreResults.md +docs/SEARCHV1Api.md +docs/SHARINGV1Api.md +docs/STATISTICV1Api.md +docs/STREAMV1Api.md +docs/SearchParameters.md +docs/SearchParametersFacets.md +docs/SearchResult.md +docs/SearchResultElastic.md +docs/SearchResultLrmi.md +docs/SearchResultNode.md +docs/SearchVCard.md +docs/ServerUpdateInfo.md +docs/Service.md +docs/ServiceInstance.md +docs/ServiceVersion.md +docs/Services.md +docs/SharedFolderOptions.md +docs/SharingInfo.md +docs/SimpleEdit.md +docs/SimpleEditGlobalGroups.md +docs/SimpleEditOrganization.md +docs/Sort.md +docs/StatisticEntity.md +docs/StatisticEntry.md +docs/Statistics.md +docs/StatisticsGlobal.md +docs/StatisticsGroup.md +docs/StatisticsKeyGroup.md +docs/StatisticsSubGroup.md +docs/StatisticsUser.md +docs/StoredService.md +docs/Stream.md +docs/StreamEntry.md +docs/StreamEntryInput.md +docs/StreamList.md +docs/SubGroupItem.md +docs/Suggest.md +docs/Suggestion.md +docs/SuggestionParam.md +docs/Suggestions.md +docs/TOOLV1Api.md +docs/TRACKINGV1Api.md +docs/Tool.md +docs/Tools.md +docs/Tracking.md +docs/TrackingAuthority.md +docs/TrackingNode.md +docs/USAGEV1Api.md +docs/UploadResult.md +docs/Usage.md +docs/Usages.md +docs/User.md +docs/UserCredential.md +docs/UserDataDTO.md +docs/UserEntries.md +docs/UserEntry.md +docs/UserProfile.md +docs/UserProfileAppAuth.md +docs/UserProfileEdit.md +docs/UserQuota.md +docs/UserSimple.md +docs/UserStats.md +docs/UserStatus.md +docs/Value.md +docs/ValueParameters.md +docs/Values.md +docs/Variables.md +docs/Version.md +docs/VersionBuild.md +docs/VersionGit.md +docs/VersionGitCommit.md +docs/VersionMaven.md +docs/VersionProject.md +docs/VersionTimestamp.md +docs/WebsiteInformation.md +docs/WidgetDataDTO.md +docs/WorkflowEventDTO.md +docs/WorkflowHistory.md +edu_sharing_client/__init__.py +edu_sharing_client/api/__init__.py +edu_sharing_client/api/about_api.py +edu_sharing_client/api/adminv1_api.py +edu_sharing_client/api/archivev1_api.py +edu_sharing_client/api/authenticationv1_api.py +edu_sharing_client/api/bulkv1_api.py +edu_sharing_client/api/clientutilsv1_api.py +edu_sharing_client/api/collectionv1_api.py +edu_sharing_client/api/commentv1_api.py +edu_sharing_client/api/configv1_api.py +edu_sharing_client/api/connectorv1_api.py +edu_sharing_client/api/feedbackv1_api.py +edu_sharing_client/api/iamv1_api.py +edu_sharing_client/api/knowledgev1_api.py +edu_sharing_client/api/lti_platform_v13_api.py +edu_sharing_client/api/ltiv13_api.py +edu_sharing_client/api/mdsv1_api.py +edu_sharing_client/api/mediacenterv1_api.py +edu_sharing_client/api/networkv1_api.py +edu_sharing_client/api/nodev1_api.py +edu_sharing_client/api/notificationv1_api.py +edu_sharing_client/api/organizationv1_api.py +edu_sharing_client/api/ratingv1_api.py +edu_sharing_client/api/registerv1_api.py +edu_sharing_client/api/relationv1_api.py +edu_sharing_client/api/renderingv1_api.py +edu_sharing_client/api/searchv1_api.py +edu_sharing_client/api/sharingv1_api.py +edu_sharing_client/api/statisticv1_api.py +edu_sharing_client/api/streamv1_api.py +edu_sharing_client/api/toolv1_api.py +edu_sharing_client/api/trackingv1_api.py +edu_sharing_client/api/usagev1_api.py +edu_sharing_client/api_client.py +edu_sharing_client/api_response.py +edu_sharing_client/configuration.py +edu_sharing_client/exceptions.py +edu_sharing_client/models/__init__.py +edu_sharing_client/models/about.py +edu_sharing_client/models/about_service.py +edu_sharing_client/models/abstract_entries.py +edu_sharing_client/models/ace.py +edu_sharing_client/models/acl.py +edu_sharing_client/models/add_to_collection_event_dto.py +edu_sharing_client/models/admin.py +edu_sharing_client/models/admin_statistics.py +edu_sharing_client/models/application.py +edu_sharing_client/models/audience.py +edu_sharing_client/models/authentication_token.py +edu_sharing_client/models/authority.py +edu_sharing_client/models/authority_entries.py +edu_sharing_client/models/available_mds.py +edu_sharing_client/models/banner.py +edu_sharing_client/models/cache_cluster.py +edu_sharing_client/models/cache_info.py +edu_sharing_client/models/cache_member.py +edu_sharing_client/models/catalog.py +edu_sharing_client/models/collection.py +edu_sharing_client/models/collection_counts.py +edu_sharing_client/models/collection_dto.py +edu_sharing_client/models/collection_entries.py +edu_sharing_client/models/collection_entry.py +edu_sharing_client/models/collection_options.py +edu_sharing_client/models/collection_proposal_entries.py +edu_sharing_client/models/collection_reference.py +edu_sharing_client/models/collections.py +edu_sharing_client/models/collections_result.py +edu_sharing_client/models/comment.py +edu_sharing_client/models/comment_event_dto.py +edu_sharing_client/models/comments.py +edu_sharing_client/models/condition.py +edu_sharing_client/models/config.py +edu_sharing_client/models/config_frontpage.py +edu_sharing_client/models/config_privacy.py +edu_sharing_client/models/config_publish.py +edu_sharing_client/models/config_rating.py +edu_sharing_client/models/config_remote.py +edu_sharing_client/models/config_theme_color.py +edu_sharing_client/models/config_theme_colors.py +edu_sharing_client/models/config_tutorial.py +edu_sharing_client/models/config_upload.py +edu_sharing_client/models/config_workflow.py +edu_sharing_client/models/config_workflow_list.py +edu_sharing_client/models/connector.py +edu_sharing_client/models/connector_file_type.py +edu_sharing_client/models/connector_list.py +edu_sharing_client/models/content.py +edu_sharing_client/models/context_menu_entry.py +edu_sharing_client/models/contributor.py +edu_sharing_client/models/counts.py +edu_sharing_client/models/create.py +edu_sharing_client/models/create_usage.py +edu_sharing_client/models/delete_option.py +edu_sharing_client/models/dynamic_config.py +edu_sharing_client/models/dynamic_registration_token.py +edu_sharing_client/models/dynamic_registration_tokens.py +edu_sharing_client/models/element.py +edu_sharing_client/models/error_response.py +edu_sharing_client/models/excel_result.py +edu_sharing_client/models/facet.py +edu_sharing_client/models/feature_info.py +edu_sharing_client/models/feedback_data.py +edu_sharing_client/models/feedback_result.py +edu_sharing_client/models/filter.py +edu_sharing_client/models/filter_entry.py +edu_sharing_client/models/font_icon.py +edu_sharing_client/models/frontpage.py +edu_sharing_client/models/general.py +edu_sharing_client/models/geo.py +edu_sharing_client/models/group.py +edu_sharing_client/models/group_entries.py +edu_sharing_client/models/group_entry.py +edu_sharing_client/models/group_profile.py +edu_sharing_client/models/group_signup_details.py +edu_sharing_client/models/guest.py +edu_sharing_client/models/handle_param.py +edu_sharing_client/models/help_menu_options.py +edu_sharing_client/models/home_folder_options.py +edu_sharing_client/models/icon.py +edu_sharing_client/models/image.py +edu_sharing_client/models/interface.py +edu_sharing_client/models/invite_event_dto.py +edu_sharing_client/models/job.py +edu_sharing_client/models/job_builder.py +edu_sharing_client/models/job_data_map.py +edu_sharing_client/models/job_description.py +edu_sharing_client/models/job_detail.py +edu_sharing_client/models/job_detail_job_data_map.py +edu_sharing_client/models/job_entry.py +edu_sharing_client/models/job_field_description.py +edu_sharing_client/models/job_info.py +edu_sharing_client/models/job_key.py +edu_sharing_client/models/json_object.py +edu_sharing_client/models/key_value_pair.py +edu_sharing_client/models/language.py +edu_sharing_client/models/level.py +edu_sharing_client/models/license.py +edu_sharing_client/models/license_agreement.py +edu_sharing_client/models/license_agreement_node.py +edu_sharing_client/models/licenses.py +edu_sharing_client/models/location.py +edu_sharing_client/models/log_entry.py +edu_sharing_client/models/logger_config_result.py +edu_sharing_client/models/login.py +edu_sharing_client/models/login_credentials.py +edu_sharing_client/models/logout_info.py +edu_sharing_client/models/lti_platform_configuration.py +edu_sharing_client/models/lti_session.py +edu_sharing_client/models/lti_tool_configuration.py +edu_sharing_client/models/mainnav.py +edu_sharing_client/models/manual_registration_data.py +edu_sharing_client/models/mc_org_connect_result.py +edu_sharing_client/models/mds.py +edu_sharing_client/models/mds_column.py +edu_sharing_client/models/mds_entries.py +edu_sharing_client/models/mds_group.py +edu_sharing_client/models/mds_list.py +edu_sharing_client/models/mds_query_criteria.py +edu_sharing_client/models/mds_sort.py +edu_sharing_client/models/mds_sort_column.py +edu_sharing_client/models/mds_sort_default.py +edu_sharing_client/models/mds_subwidget.py +edu_sharing_client/models/mds_value.py +edu_sharing_client/models/mds_view.py +edu_sharing_client/models/mds_widget.py +edu_sharing_client/models/mds_widget_condition.py +edu_sharing_client/models/mediacenter.py +edu_sharing_client/models/mediacenter_profile_extension.py +edu_sharing_client/models/mediacenters_import_result.py +edu_sharing_client/models/menu_entry.py +edu_sharing_client/models/message.py +edu_sharing_client/models/metadata_set_info.py +edu_sharing_client/models/metadata_suggestion_event_dto.py +edu_sharing_client/models/node.py +edu_sharing_client/models/node_collection_proposal_count.py +edu_sharing_client/models/node_data.py +edu_sharing_client/models/node_data_dto.py +edu_sharing_client/models/node_entries.py +edu_sharing_client/models/node_entry.py +edu_sharing_client/models/node_issue_event_dto.py +edu_sharing_client/models/node_locked.py +edu_sharing_client/models/node_lti_deep_link.py +edu_sharing_client/models/node_permission_entry.py +edu_sharing_client/models/node_permissions.py +edu_sharing_client/models/node_ref.py +edu_sharing_client/models/node_relation.py +edu_sharing_client/models/node_remote.py +edu_sharing_client/models/node_share.py +edu_sharing_client/models/node_stats.py +edu_sharing_client/models/node_text.py +edu_sharing_client/models/node_version.py +edu_sharing_client/models/node_version_entries.py +edu_sharing_client/models/node_version_entry.py +edu_sharing_client/models/node_version_ref.py +edu_sharing_client/models/node_version_ref_entries.py +edu_sharing_client/models/notification_config.py +edu_sharing_client/models/notification_event_dto.py +edu_sharing_client/models/notification_intervals.py +edu_sharing_client/models/notification_response_page.py +edu_sharing_client/models/notify_entry.py +edu_sharing_client/models/open_id_configuration.py +edu_sharing_client/models/open_id_registration_result.py +edu_sharing_client/models/organisations_import_result.py +edu_sharing_client/models/organization.py +edu_sharing_client/models/organization_entries.py +edu_sharing_client/models/pageable.py +edu_sharing_client/models/pagination.py +edu_sharing_client/models/parameters.py +edu_sharing_client/models/parent_entries.py +edu_sharing_client/models/person.py +edu_sharing_client/models/person_delete_options.py +edu_sharing_client/models/person_delete_result.py +edu_sharing_client/models/person_report.py +edu_sharing_client/models/plugin_info.py +edu_sharing_client/models/plugin_status.py +edu_sharing_client/models/preferences.py +edu_sharing_client/models/preview.py +edu_sharing_client/models/profile.py +edu_sharing_client/models/profile_settings.py +edu_sharing_client/models/propose_for_collection_event_dto.py +edu_sharing_client/models/provider.py +edu_sharing_client/models/query.py +edu_sharing_client/models/rating_data.py +edu_sharing_client/models/rating_details.py +edu_sharing_client/models/rating_event_dto.py +edu_sharing_client/models/rating_history.py +edu_sharing_client/models/reference_entries.py +edu_sharing_client/models/register.py +edu_sharing_client/models/register_exists.py +edu_sharing_client/models/register_information.py +edu_sharing_client/models/registration_url.py +edu_sharing_client/models/relation_data.py +edu_sharing_client/models/remote.py +edu_sharing_client/models/remote_auth_description.py +edu_sharing_client/models/rendering.py +edu_sharing_client/models/rendering_details_entry.py +edu_sharing_client/models/rendering_gdpr.py +edu_sharing_client/models/repo.py +edu_sharing_client/models/repo_entries.py +edu_sharing_client/models/repository_config.py +edu_sharing_client/models/repository_version_info.py +edu_sharing_client/models/restore_result.py +edu_sharing_client/models/restore_results.py +edu_sharing_client/models/search_parameters.py +edu_sharing_client/models/search_parameters_facets.py +edu_sharing_client/models/search_result.py +edu_sharing_client/models/search_result_elastic.py +edu_sharing_client/models/search_result_lrmi.py +edu_sharing_client/models/search_result_node.py +edu_sharing_client/models/search_v_card.py +edu_sharing_client/models/server_update_info.py +edu_sharing_client/models/service.py +edu_sharing_client/models/service_instance.py +edu_sharing_client/models/service_version.py +edu_sharing_client/models/services.py +edu_sharing_client/models/shared_folder_options.py +edu_sharing_client/models/sharing_info.py +edu_sharing_client/models/simple_edit.py +edu_sharing_client/models/simple_edit_global_groups.py +edu_sharing_client/models/simple_edit_organization.py +edu_sharing_client/models/sort.py +edu_sharing_client/models/statistic_entity.py +edu_sharing_client/models/statistic_entry.py +edu_sharing_client/models/statistics.py +edu_sharing_client/models/statistics_global.py +edu_sharing_client/models/statistics_group.py +edu_sharing_client/models/statistics_key_group.py +edu_sharing_client/models/statistics_sub_group.py +edu_sharing_client/models/statistics_user.py +edu_sharing_client/models/stored_service.py +edu_sharing_client/models/stream.py +edu_sharing_client/models/stream_entry.py +edu_sharing_client/models/stream_entry_input.py +edu_sharing_client/models/stream_list.py +edu_sharing_client/models/sub_group_item.py +edu_sharing_client/models/suggest.py +edu_sharing_client/models/suggestion.py +edu_sharing_client/models/suggestion_param.py +edu_sharing_client/models/suggestions.py +edu_sharing_client/models/tool.py +edu_sharing_client/models/tools.py +edu_sharing_client/models/tracking.py +edu_sharing_client/models/tracking_authority.py +edu_sharing_client/models/tracking_node.py +edu_sharing_client/models/upload_result.py +edu_sharing_client/models/usage.py +edu_sharing_client/models/usages.py +edu_sharing_client/models/user.py +edu_sharing_client/models/user_credential.py +edu_sharing_client/models/user_data_dto.py +edu_sharing_client/models/user_entries.py +edu_sharing_client/models/user_entry.py +edu_sharing_client/models/user_profile.py +edu_sharing_client/models/user_profile_app_auth.py +edu_sharing_client/models/user_profile_edit.py +edu_sharing_client/models/user_quota.py +edu_sharing_client/models/user_simple.py +edu_sharing_client/models/user_stats.py +edu_sharing_client/models/user_status.py +edu_sharing_client/models/value.py +edu_sharing_client/models/value_parameters.py +edu_sharing_client/models/values.py +edu_sharing_client/models/variables.py +edu_sharing_client/models/version.py +edu_sharing_client/models/version_build.py +edu_sharing_client/models/version_git.py +edu_sharing_client/models/version_git_commit.py +edu_sharing_client/models/version_maven.py +edu_sharing_client/models/version_project.py +edu_sharing_client/models/version_timestamp.py +edu_sharing_client/models/website_information.py +edu_sharing_client/models/widget_data_dto.py +edu_sharing_client/models/workflow_event_dto.py +edu_sharing_client/models/workflow_history.py +edu_sharing_client/py.typed +edu_sharing_client/rest.py +git_push.sh +pyproject.toml +requirements.txt +setup.cfg +setup.py +test-requirements.txt +test/__init__.py +test/test_about.py +test/test_about_api.py +test/test_about_service.py +test/test_abstract_entries.py +test/test_ace.py +test/test_acl.py +test/test_add_to_collection_event_dto.py +test/test_admin.py +test/test_admin_statistics.py +test/test_adminv1_api.py +test/test_application.py +test/test_archivev1_api.py +test/test_audience.py +test/test_authentication_token.py +test/test_authenticationv1_api.py +test/test_authority.py +test/test_authority_entries.py +test/test_available_mds.py +test/test_banner.py +test/test_bulkv1_api.py +test/test_cache_cluster.py +test/test_cache_info.py +test/test_cache_member.py +test/test_catalog.py +test/test_clientutilsv1_api.py +test/test_collection.py +test/test_collection_counts.py +test/test_collection_dto.py +test/test_collection_entries.py +test/test_collection_entry.py +test/test_collection_options.py +test/test_collection_proposal_entries.py +test/test_collection_reference.py +test/test_collections.py +test/test_collections_result.py +test/test_collectionv1_api.py +test/test_comment.py +test/test_comment_event_dto.py +test/test_comments.py +test/test_commentv1_api.py +test/test_condition.py +test/test_config.py +test/test_config_frontpage.py +test/test_config_privacy.py +test/test_config_publish.py +test/test_config_rating.py +test/test_config_remote.py +test/test_config_theme_color.py +test/test_config_theme_colors.py +test/test_config_tutorial.py +test/test_config_upload.py +test/test_config_workflow.py +test/test_config_workflow_list.py +test/test_configv1_api.py +test/test_connector.py +test/test_connector_file_type.py +test/test_connector_list.py +test/test_connectorv1_api.py +test/test_content.py +test/test_context_menu_entry.py +test/test_contributor.py +test/test_counts.py +test/test_create.py +test/test_create_usage.py +test/test_delete_option.py +test/test_dynamic_config.py +test/test_dynamic_registration_token.py +test/test_dynamic_registration_tokens.py +test/test_element.py +test/test_error_response.py +test/test_excel_result.py +test/test_facet.py +test/test_feature_info.py +test/test_feedback_data.py +test/test_feedback_result.py +test/test_feedbackv1_api.py +test/test_filter.py +test/test_filter_entry.py +test/test_font_icon.py +test/test_frontpage.py +test/test_general.py +test/test_geo.py +test/test_group.py +test/test_group_entries.py +test/test_group_entry.py +test/test_group_profile.py +test/test_group_signup_details.py +test/test_guest.py +test/test_handle_param.py +test/test_help_menu_options.py +test/test_home_folder_options.py +test/test_iamv1_api.py +test/test_icon.py +test/test_image.py +test/test_interface.py +test/test_invite_event_dto.py +test/test_job.py +test/test_job_builder.py +test/test_job_data_map.py +test/test_job_description.py +test/test_job_detail.py +test/test_job_detail_job_data_map.py +test/test_job_entry.py +test/test_job_field_description.py +test/test_job_info.py +test/test_job_key.py +test/test_json_object.py +test/test_key_value_pair.py +test/test_knowledgev1_api.py +test/test_language.py +test/test_level.py +test/test_license.py +test/test_license_agreement.py +test/test_license_agreement_node.py +test/test_licenses.py +test/test_location.py +test/test_log_entry.py +test/test_logger_config_result.py +test/test_login.py +test/test_login_credentials.py +test/test_logout_info.py +test/test_lti_platform_configuration.py +test/test_lti_platform_v13_api.py +test/test_lti_session.py +test/test_lti_tool_configuration.py +test/test_ltiv13_api.py +test/test_mainnav.py +test/test_manual_registration_data.py +test/test_mc_org_connect_result.py +test/test_mds.py +test/test_mds_column.py +test/test_mds_entries.py +test/test_mds_group.py +test/test_mds_list.py +test/test_mds_query_criteria.py +test/test_mds_sort.py +test/test_mds_sort_column.py +test/test_mds_sort_default.py +test/test_mds_subwidget.py +test/test_mds_value.py +test/test_mds_view.py +test/test_mds_widget.py +test/test_mds_widget_condition.py +test/test_mdsv1_api.py +test/test_mediacenter.py +test/test_mediacenter_profile_extension.py +test/test_mediacenters_import_result.py +test/test_mediacenterv1_api.py +test/test_menu_entry.py +test/test_message.py +test/test_metadata_set_info.py +test/test_metadata_suggestion_event_dto.py +test/test_networkv1_api.py +test/test_node.py +test/test_node_collection_proposal_count.py +test/test_node_data.py +test/test_node_data_dto.py +test/test_node_entries.py +test/test_node_entry.py +test/test_node_issue_event_dto.py +test/test_node_locked.py +test/test_node_lti_deep_link.py +test/test_node_permission_entry.py +test/test_node_permissions.py +test/test_node_ref.py +test/test_node_relation.py +test/test_node_remote.py +test/test_node_share.py +test/test_node_stats.py +test/test_node_text.py +test/test_node_version.py +test/test_node_version_entries.py +test/test_node_version_entry.py +test/test_node_version_ref.py +test/test_node_version_ref_entries.py +test/test_nodev1_api.py +test/test_notification_config.py +test/test_notification_event_dto.py +test/test_notification_intervals.py +test/test_notification_response_page.py +test/test_notificationv1_api.py +test/test_notify_entry.py +test/test_open_id_configuration.py +test/test_open_id_registration_result.py +test/test_organisations_import_result.py +test/test_organization.py +test/test_organization_entries.py +test/test_organizationv1_api.py +test/test_pageable.py +test/test_pagination.py +test/test_parameters.py +test/test_parent_entries.py +test/test_person.py +test/test_person_delete_options.py +test/test_person_delete_result.py +test/test_person_report.py +test/test_plugin_info.py +test/test_plugin_status.py +test/test_preferences.py +test/test_preview.py +test/test_profile.py +test/test_profile_settings.py +test/test_propose_for_collection_event_dto.py +test/test_provider.py +test/test_query.py +test/test_rating_data.py +test/test_rating_details.py +test/test_rating_event_dto.py +test/test_rating_history.py +test/test_ratingv1_api.py +test/test_reference_entries.py +test/test_register.py +test/test_register_exists.py +test/test_register_information.py +test/test_registerv1_api.py +test/test_registration_url.py +test/test_relation_data.py +test/test_relationv1_api.py +test/test_remote.py +test/test_remote_auth_description.py +test/test_rendering.py +test/test_rendering_details_entry.py +test/test_rendering_gdpr.py +test/test_renderingv1_api.py +test/test_repo.py +test/test_repo_entries.py +test/test_repository_config.py +test/test_repository_version_info.py +test/test_restore_result.py +test/test_restore_results.py +test/test_search_parameters.py +test/test_search_parameters_facets.py +test/test_search_result.py +test/test_search_result_elastic.py +test/test_search_result_lrmi.py +test/test_search_result_node.py +test/test_search_v_card.py +test/test_searchv1_api.py +test/test_server_update_info.py +test/test_service.py +test/test_service_instance.py +test/test_service_version.py +test/test_services.py +test/test_shared_folder_options.py +test/test_sharing_info.py +test/test_sharingv1_api.py +test/test_simple_edit.py +test/test_simple_edit_global_groups.py +test/test_simple_edit_organization.py +test/test_sort.py +test/test_statistic_entity.py +test/test_statistic_entry.py +test/test_statistics.py +test/test_statistics_global.py +test/test_statistics_group.py +test/test_statistics_key_group.py +test/test_statistics_sub_group.py +test/test_statistics_user.py +test/test_statisticv1_api.py +test/test_stored_service.py +test/test_stream.py +test/test_stream_entry.py +test/test_stream_entry_input.py +test/test_stream_list.py +test/test_streamv1_api.py +test/test_sub_group_item.py +test/test_suggest.py +test/test_suggestion.py +test/test_suggestion_param.py +test/test_suggestions.py +test/test_tool.py +test/test_tools.py +test/test_toolv1_api.py +test/test_tracking.py +test/test_tracking_authority.py +test/test_tracking_node.py +test/test_trackingv1_api.py +test/test_upload_result.py +test/test_usage.py +test/test_usages.py +test/test_usagev1_api.py +test/test_user.py +test/test_user_credential.py +test/test_user_data_dto.py +test/test_user_entries.py +test/test_user_entry.py +test/test_user_profile.py +test/test_user_profile_app_auth.py +test/test_user_profile_edit.py +test/test_user_quota.py +test/test_user_simple.py +test/test_user_stats.py +test/test_user_status.py +test/test_value.py +test/test_value_parameters.py +test/test_values.py +test/test_variables.py +test/test_version.py +test/test_version_build.py +test/test_version_git.py +test/test_version_git_commit.py +test/test_version_maven.py +test/test_version_project.py +test/test_version_timestamp.py +test/test_website_information.py +test/test_widget_data_dto.py +test/test_workflow_event_dto.py +test/test_workflow_history.py +tox.ini diff --git a/edu_sharing_openapi/.openapi-generator/VERSION b/edu_sharing_openapi/.openapi-generator/VERSION new file mode 100644 index 00000000..6116b14d --- /dev/null +++ b/edu_sharing_openapi/.openapi-generator/VERSION @@ -0,0 +1 @@ +7.8.0-SNAPSHOT diff --git a/edu_sharing_openapi/.travis.yml b/edu_sharing_openapi/.travis.yml new file mode 100644 index 00000000..725f3395 --- /dev/null +++ b/edu_sharing_openapi/.travis.yml @@ -0,0 +1,17 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + # uncomment the following if needed + #- "3.11-dev" # 3.11 development branch + #- "nightly" # nightly build +# command to install dependencies +install: + - "pip install -r requirements.txt" + - "pip install -r test-requirements.txt" +# command to run tests +script: pytest --cov=edu_sharing_client diff --git a/edu_sharing_openapi/README.md b/edu_sharing_openapi/README.md new file mode 100644 index 00000000..3aba2949 --- /dev/null +++ b/edu_sharing_openapi/README.md @@ -0,0 +1,672 @@ +# edu-sharing-client +The public restful API of the edu-sharing repository. + +This Python package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project: + +- API version: 1.1 +- Package version: 1.0.0 +- Generator version: 7.8.0-SNAPSHOT +- Build package: org.openapitools.codegen.languages.PythonClientCodegen + +## Requirements. + +Python 3.7+ + +## Installation & Usage +### pip install + +If the python package is hosted on a repository, you can install directly using: + +```sh +pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git +``` +(you may need to run `pip` with root permission: `sudo pip install git+https://github.com/GIT_USER_ID/GIT_REPO_ID.git`) + +Then import the package: +```python +import edu_sharing_client +``` + +### Setuptools + +Install via [Setuptools](http://pypi.python.org/pypi/setuptools). + +```sh +python setup.py install --user +``` +(or `sudo python setup.py install` to install the package for all users) + +Then import the package: +```python +import edu_sharing_client +``` + +### Tests + +Execute `pytest` to run the tests. + +## Getting Started + +Please follow the [installation procedure](#installation--usage) and then run the following: + +```python + +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ABOUTApi(api_client) + + try: + # Discover the API. + api_response = api_instance.about() + print("The response of ABOUTApi->about:\n") + pprint(api_response) + except ApiException as e: + print("Exception when calling ABOUTApi->about: %s\n" % e) + +``` + +## Documentation for API Endpoints + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Class | Method | HTTP request | Description +------------ | ------------- | ------------- | ------------- +*ABOUTApi* | [**about**](docs/ABOUTApi.md#about) | **GET** /_about | Discover the API. +*ABOUTApi* | [**licenses**](docs/ABOUTApi.md#licenses) | **GET** /_about/licenses | License information. +*ABOUTApi* | [**status**](docs/ABOUTApi.md#status) | **GET** /_about/status/{mode} | status of repo services +*ADMINV1Api* | [**add_application**](docs/ADMINV1Api.md#add_application) | **PUT** /admin/v1/applications/xml | register/add an application via xml file +*ADMINV1Api* | [**add_application1**](docs/ADMINV1Api.md#add_application1) | **PUT** /admin/v1/applications | register/add an application +*ADMINV1Api* | [**add_toolpermission**](docs/ADMINV1Api.md#add_toolpermission) | **POST** /admin/v1/toolpermissions/add/{name} | add a new toolpermissions +*ADMINV1Api* | [**apply_template**](docs/ADMINV1Api.md#apply_template) | **POST** /admin/v1/applyTemplate | apply a folder template +*ADMINV1Api* | [**cancel_job**](docs/ADMINV1Api.md#cancel_job) | **DELETE** /admin/v1/jobs/{job} | cancel a running job +*ADMINV1Api* | [**change_logging**](docs/ADMINV1Api.md#change_logging) | **POST** /admin/v1/log/config | Change the loglevel for classes at runtime. +*ADMINV1Api* | [**clear_cache**](docs/ADMINV1Api.md#clear_cache) | **POST** /admin/v1/cache/clearCache | clear cache +*ADMINV1Api* | [**create_preview**](docs/ADMINV1Api.md#create_preview) | **GET** /admin/v1/nodes/preview/{node} | create preview. +*ADMINV1Api* | [**delete_person**](docs/ADMINV1Api.md#delete_person) | **PUT** /admin/v1/deletePersons | delete persons +*ADMINV1Api* | [**export_by_lucene**](docs/ADMINV1Api.md#export_by_lucene) | **GET** /admin/v1/lucene/export | Search for custom lucene query and choose specific properties to load +*ADMINV1Api* | [**export_lom**](docs/ADMINV1Api.md#export_lom) | **GET** /admin/v1/export/lom | Export Nodes with LOM Metadata Format +*ADMINV1Api* | [**get_all_jobs**](docs/ADMINV1Api.md#get_all_jobs) | **GET** /admin/v1/jobs/all | get all available jobs +*ADMINV1Api* | [**get_all_toolpermissions**](docs/ADMINV1Api.md#get_all_toolpermissions) | **GET** /admin/v1/toolpermissions/{authority} | get all toolpermissions for an authority +*ADMINV1Api* | [**get_application_xml**](docs/ADMINV1Api.md#get_application_xml) | **GET** /admin/v1/applications/{xml} | list any xml properties (like from homeApplication.properties.xml) +*ADMINV1Api* | [**get_applications**](docs/ADMINV1Api.md#get_applications) | **GET** /admin/v1/applications | list applications +*ADMINV1Api* | [**get_cache_entries**](docs/ADMINV1Api.md#get_cache_entries) | **GET** /admin/v1/cache/cacheEntries/{id} | Get entries of a cache +*ADMINV1Api* | [**get_cache_info**](docs/ADMINV1Api.md#get_cache_info) | **GET** /admin/v1/cache/cacheInfo/{id} | Get information about a cache +*ADMINV1Api* | [**get_catalina_out**](docs/ADMINV1Api.md#get_catalina_out) | **GET** /admin/v1/catalina | Get last info from catalina out +*ADMINV1Api* | [**get_cluster**](docs/ADMINV1Api.md#get_cluster) | **GET** /admin/v1/clusterInfo | Get information about the Cluster +*ADMINV1Api* | [**get_clusters**](docs/ADMINV1Api.md#get_clusters) | **GET** /admin/v1/clusterInfos | Get information about the Cluster +*ADMINV1Api* | [**get_config**](docs/ADMINV1Api.md#get_config) | **GET** /admin/v1/repositoryConfig | get the repository config object +*ADMINV1Api* | [**get_config_file**](docs/ADMINV1Api.md#get_config_file) | **GET** /admin/v1/configFile | get a base system config file (e.g. edu-sharing.conf) +*ADMINV1Api* | [**get_enabled_plugins**](docs/ADMINV1Api.md#get_enabled_plugins) | **GET** /admin/v1/plugins | get enabled system plugins +*ADMINV1Api* | [**get_global_groups**](docs/ADMINV1Api.md#get_global_groups) | **GET** /admin/v1/globalGroups | Get global groups +*ADMINV1Api* | [**get_jobs**](docs/ADMINV1Api.md#get_jobs) | **GET** /admin/v1/jobs | get all running jobs +*ADMINV1Api* | [**get_lightbend_config**](docs/ADMINV1Api.md#get_lightbend_config) | **GET** /admin/v1/config/merged | +*ADMINV1Api* | [**get_logging_runtime**](docs/ADMINV1Api.md#get_logging_runtime) | **GET** /admin/v1/log/config | get the logger config +*ADMINV1Api* | [**get_oai_classes**](docs/ADMINV1Api.md#get_oai_classes) | **GET** /admin/v1/import/oai/classes | Get OAI class names +*ADMINV1Api* | [**get_property_to_mds**](docs/ADMINV1Api.md#get_property_to_mds) | **GET** /admin/v1/propertyToMds | Get a Mds Valuespace for all values of the given properties +*ADMINV1Api* | [**get_statistics**](docs/ADMINV1Api.md#get_statistics) | **GET** /admin/v1/statistics | get statistics +*ADMINV1Api* | [**get_version**](docs/ADMINV1Api.md#get_version) | **GET** /admin/v1/version | get detailed version information +*ADMINV1Api* | [**import_collections**](docs/ADMINV1Api.md#import_collections) | **POST** /admin/v1/import/collections | import collections via a xml file +*ADMINV1Api* | [**import_excel**](docs/ADMINV1Api.md#import_excel) | **POST** /admin/v1/import/excel | Import excel data +*ADMINV1Api* | [**import_oai**](docs/ADMINV1Api.md#import_oai) | **POST** /admin/v1/import/oai | Import oai data +*ADMINV1Api* | [**import_oai_xml**](docs/ADMINV1Api.md#import_oai_xml) | **POST** /admin/v1/import/oai/xml | Import single xml via oai (for testing) +*ADMINV1Api* | [**refresh_app_info**](docs/ADMINV1Api.md#refresh_app_info) | **POST** /admin/v1/refreshAppInfo | refresh app info +*ADMINV1Api* | [**refresh_cache**](docs/ADMINV1Api.md#refresh_cache) | **POST** /admin/v1/import/refreshCache/{folder} | Refresh cache +*ADMINV1Api* | [**refresh_edu_group_cache**](docs/ADMINV1Api.md#refresh_edu_group_cache) | **POST** /admin/v1/cache/refreshEduGroupCache | Refresh the Edu Group Cache +*ADMINV1Api* | [**remove_application**](docs/ADMINV1Api.md#remove_application) | **DELETE** /admin/v1/applications/{id} | remove an application +*ADMINV1Api* | [**remove_cache_entry**](docs/ADMINV1Api.md#remove_cache_entry) | **POST** /admin/v1/cache/removeCacheEntry | remove cache entry +*ADMINV1Api* | [**remove_oai_imports**](docs/ADMINV1Api.md#remove_oai_imports) | **DELETE** /admin/v1/import/oai | Remove deleted imports +*ADMINV1Api* | [**search_by_elastic_dsl**](docs/ADMINV1Api.md#search_by_elastic_dsl) | **GET** /admin/v1/elastic | Search for custom elastic DSL query +*ADMINV1Api* | [**search_by_lucene**](docs/ADMINV1Api.md#search_by_lucene) | **GET** /admin/v1/lucene | Search for custom lucene query +*ADMINV1Api* | [**server_update_list**](docs/ADMINV1Api.md#server_update_list) | **GET** /admin/v1/serverUpdate/list | list available update tasks +*ADMINV1Api* | [**server_update_list1**](docs/ADMINV1Api.md#server_update_list1) | **POST** /admin/v1/serverUpdate/run/{id} | Run an update tasks +*ADMINV1Api* | [**set_config**](docs/ADMINV1Api.md#set_config) | **PUT** /admin/v1/repositoryConfig | set/update the repository config object +*ADMINV1Api* | [**set_toolpermissions**](docs/ADMINV1Api.md#set_toolpermissions) | **PUT** /admin/v1/toolpermissions/{authority} | set toolpermissions for an authority +*ADMINV1Api* | [**start_job**](docs/ADMINV1Api.md#start_job) | **POST** /admin/v1/job/{jobClass} | Start a Job. +*ADMINV1Api* | [**start_job_sync**](docs/ADMINV1Api.md#start_job_sync) | **POST** /admin/v1/job/{jobClass}/sync | Start a Job. +*ADMINV1Api* | [**switch_authority**](docs/ADMINV1Api.md#switch_authority) | **POST** /admin/v1/authenticate/{authorityName} | switch the session to a known authority name +*ADMINV1Api* | [**test_mail**](docs/ADMINV1Api.md#test_mail) | **POST** /admin/v1/mail/{receiver}/{template} | Test a mail template +*ADMINV1Api* | [**update_application_xml**](docs/ADMINV1Api.md#update_application_xml) | **PUT** /admin/v1/applications/{xml} | edit any properties xml (like homeApplication.properties.xml) +*ADMINV1Api* | [**update_config_file**](docs/ADMINV1Api.md#update_config_file) | **PUT** /admin/v1/configFile | update a base system config file (e.g. edu-sharing.conf) +*ADMINV1Api* | [**upload_temp**](docs/ADMINV1Api.md#upload_temp) | **PUT** /admin/v1/upload/temp/{name} | Upload a file +*ARCHIVEV1Api* | [**purge**](docs/ARCHIVEV1Api.md#purge) | **DELETE** /archive/v1/purge/{repository} | Searches for archive nodes. +*ARCHIVEV1Api* | [**restore**](docs/ARCHIVEV1Api.md#restore) | **POST** /archive/v1/restore/{repository} | restore archived nodes. +*ARCHIVEV1Api* | [**search_archive**](docs/ARCHIVEV1Api.md#search_archive) | **GET** /archive/v1/search/{repository}/{pattern} | Searches for archive nodes. +*ARCHIVEV1Api* | [**search_archive_person**](docs/ARCHIVEV1Api.md#search_archive_person) | **GET** /archive/v1/search/{repository}/{pattern}/{person} | Searches for archive nodes. +*AUTHENTICATIONV1Api* | [**authenticate**](docs/AUTHENTICATIONV1Api.md#authenticate) | **POST** /authentication/v1/appauth/{userId} | authenticate user of an registered application. +*AUTHENTICATIONV1Api* | [**has_access_to_scope**](docs/AUTHENTICATIONV1Api.md#has_access_to_scope) | **GET** /authentication/v1/hasAccessToScope | Returns true if the current user has access to the given scope +*AUTHENTICATIONV1Api* | [**login**](docs/AUTHENTICATIONV1Api.md#login) | **GET** /authentication/v1/validateSession | Validates the Basic Auth Credentials and check if the session is a logged in user +*AUTHENTICATIONV1Api* | [**login_to_scope**](docs/AUTHENTICATIONV1Api.md#login_to_scope) | **POST** /authentication/v1/loginToScope | Validates the Basic Auth Credentials and check if the session is a logged in user +*AUTHENTICATIONV1Api* | [**logout**](docs/AUTHENTICATIONV1Api.md#logout) | **GET** /authentication/v1/destroySession | Destroys the current session and logout the user +*BULKV1Api* | [**find**](docs/BULKV1Api.md#find) | **POST** /bulk/v1/find | gets a given node +*BULKV1Api* | [**sync**](docs/BULKV1Api.md#sync) | **PUT** /bulk/v1/sync/{group} | Create or update a given node +*CLIENTUTILSV1Api* | [**get_website_information**](docs/CLIENTUTILSV1Api.md#get_website_information) | **GET** /clientUtils/v1/getWebsiteInformation | Read generic information about a webpage +*COLLECTIONV1Api* | [**add_to_collection**](docs/COLLECTIONV1Api.md#add_to_collection) | **PUT** /collection/v1/collections/{repository}/{collection}/references/{node} | Add a node to a collection. +*COLLECTIONV1Api* | [**change_icon_of_collection**](docs/COLLECTIONV1Api.md#change_icon_of_collection) | **POST** /collection/v1/collections/{repository}/{collection}/icon | Writes Preview Image of a collection. +*COLLECTIONV1Api* | [**create_collection**](docs/COLLECTIONV1Api.md#create_collection) | **POST** /collection/v1/collections/{repository}/{collection}/children | Create a new collection. +*COLLECTIONV1Api* | [**delete_collection**](docs/COLLECTIONV1Api.md#delete_collection) | **DELETE** /collection/v1/collections/{repository}/{collection} | Delete a collection. +*COLLECTIONV1Api* | [**delete_from_collection**](docs/COLLECTIONV1Api.md#delete_from_collection) | **DELETE** /collection/v1/collections/{repository}/{collection}/references/{node} | Delete a node from a collection. +*COLLECTIONV1Api* | [**get_collection**](docs/COLLECTIONV1Api.md#get_collection) | **GET** /collection/v1/collections/{repository}/{collectionId} | Get a collection. +*COLLECTIONV1Api* | [**get_collections_containing_proposals**](docs/COLLECTIONV1Api.md#get_collections_containing_proposals) | **GET** /collection/v1/collections/{repository}/children/proposals/collections | Get all collections containing proposals with a given state (via search index) +*COLLECTIONV1Api* | [**get_collections_proposals**](docs/COLLECTIONV1Api.md#get_collections_proposals) | **GET** /collection/v1/collections/{repository}/{collection}/children/proposals | Get proposed objects for collection (requires edit permissions on collection). +*COLLECTIONV1Api* | [**get_collections_references**](docs/COLLECTIONV1Api.md#get_collections_references) | **GET** /collection/v1/collections/{repository}/{collection}/children/references | Get references objects for collection. +*COLLECTIONV1Api* | [**get_collections_subcollections**](docs/COLLECTIONV1Api.md#get_collections_subcollections) | **GET** /collection/v1/collections/{repository}/{collection}/children/collections | Get child collections for collection (or root). +*COLLECTIONV1Api* | [**remove_icon_of_collection**](docs/COLLECTIONV1Api.md#remove_icon_of_collection) | **DELETE** /collection/v1/collections/{repository}/{collection}/icon | Deletes Preview Image of a collection. +*COLLECTIONV1Api* | [**search_collections**](docs/COLLECTIONV1Api.md#search_collections) | **GET** /collection/v1/collections/{repository}/search | Search collections. +*COLLECTIONV1Api* | [**set_collection_order**](docs/COLLECTIONV1Api.md#set_collection_order) | **POST** /collection/v1/collections/{repository}/{collection}/order | Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection +*COLLECTIONV1Api* | [**set_pinned_collections**](docs/COLLECTIONV1Api.md#set_pinned_collections) | **POST** /collection/v1/collections/{repository}/pinning | Set pinned collections. +*COLLECTIONV1Api* | [**update_collection**](docs/COLLECTIONV1Api.md#update_collection) | **PUT** /collection/v1/collections/{repository}/{collection} | Update a collection. +*COMMENTV1Api* | [**add_comment**](docs/COMMENTV1Api.md#add_comment) | **PUT** /comment/v1/comments/{repository}/{node} | create a new comment +*COMMENTV1Api* | [**delete_comment**](docs/COMMENTV1Api.md#delete_comment) | **DELETE** /comment/v1/comments/{repository}/{comment} | delete a comment +*COMMENTV1Api* | [**edit_comment**](docs/COMMENTV1Api.md#edit_comment) | **POST** /comment/v1/comments/{repository}/{comment} | edit a comment +*COMMENTV1Api* | [**get_comments**](docs/COMMENTV1Api.md#get_comments) | **GET** /comment/v1/comments/{repository}/{node} | list comments +*CONFIGV1Api* | [**get_config1**](docs/CONFIGV1Api.md#get_config1) | **GET** /config/v1/values | get repository config values +*CONFIGV1Api* | [**get_dynamic_value**](docs/CONFIGV1Api.md#get_dynamic_value) | **GET** /config/v1/dynamic/{key} | Get a config entry (appropriate rights for the entry are required) +*CONFIGV1Api* | [**get_language**](docs/CONFIGV1Api.md#get_language) | **GET** /config/v1/language | get override strings for the current language +*CONFIGV1Api* | [**get_language_defaults**](docs/CONFIGV1Api.md#get_language_defaults) | **GET** /config/v1/language/defaults | get all inital language strings for angular +*CONFIGV1Api* | [**get_variables**](docs/CONFIGV1Api.md#get_variables) | **GET** /config/v1/variables | get global config variables +*CONFIGV1Api* | [**set_dynamic_value**](docs/CONFIGV1Api.md#set_dynamic_value) | **POST** /config/v1/dynamic/{key} | Set a config entry (admin rights required) +*CONNECTORV1Api* | [**list_connectors**](docs/CONNECTORV1Api.md#list_connectors) | **GET** /connector/v1/connectors/{repository}/list | List all available connectors +*FEEDBACKV1Api* | [**add_feedback**](docs/FEEDBACKV1Api.md#add_feedback) | **PUT** /feedback/v1/feedback/{repository}/{node}/add | Give feedback on a node +*FEEDBACKV1Api* | [**get_feedbacks**](docs/FEEDBACKV1Api.md#get_feedbacks) | **GET** /feedback/v1/feedback/{repository}/{node}/list | Get given feedback on a node +*IAMV1Api* | [**add_membership**](docs/IAMV1Api.md#add_membership) | **PUT** /iam/v1/groups/{repository}/{group}/members/{member} | Add member to the group. +*IAMV1Api* | [**add_node_list**](docs/IAMV1Api.md#add_node_list) | **PUT** /iam/v1/people/{repository}/{person}/nodeList/{list}/{node} | Add a node to node a list of a user +*IAMV1Api* | [**change_group_profile**](docs/IAMV1Api.md#change_group_profile) | **PUT** /iam/v1/groups/{repository}/{group}/profile | Set profile of the group. +*IAMV1Api* | [**change_user_avatar**](docs/IAMV1Api.md#change_user_avatar) | **PUT** /iam/v1/people/{repository}/{person}/avatar | Set avatar of the user. +*IAMV1Api* | [**change_user_password**](docs/IAMV1Api.md#change_user_password) | **PUT** /iam/v1/people/{repository}/{person}/credential | Change/Set password of the user. +*IAMV1Api* | [**change_user_profile**](docs/IAMV1Api.md#change_user_profile) | **PUT** /iam/v1/people/{repository}/{person}/profile | Set profile of the user. +*IAMV1Api* | [**confirm_signup**](docs/IAMV1Api.md#confirm_signup) | **PUT** /iam/v1/groups/{repository}/{group}/signup/list/{user} | put the pending user into the group +*IAMV1Api* | [**create_group**](docs/IAMV1Api.md#create_group) | **POST** /iam/v1/groups/{repository}/{group} | Create a new group. +*IAMV1Api* | [**create_user**](docs/IAMV1Api.md#create_user) | **POST** /iam/v1/people/{repository}/{person} | Create a new user. +*IAMV1Api* | [**delete_group**](docs/IAMV1Api.md#delete_group) | **DELETE** /iam/v1/groups/{repository}/{group} | Delete the group. +*IAMV1Api* | [**delete_membership**](docs/IAMV1Api.md#delete_membership) | **DELETE** /iam/v1/groups/{repository}/{group}/members/{member} | Delete member from the group. +*IAMV1Api* | [**delete_user**](docs/IAMV1Api.md#delete_user) | **DELETE** /iam/v1/people/{repository}/{person} | Delete the user. +*IAMV1Api* | [**get_group**](docs/IAMV1Api.md#get_group) | **GET** /iam/v1/groups/{repository}/{group} | Get the group. +*IAMV1Api* | [**get_membership**](docs/IAMV1Api.md#get_membership) | **GET** /iam/v1/groups/{repository}/{group}/members | Get all members of the group. +*IAMV1Api* | [**get_node_list**](docs/IAMV1Api.md#get_node_list) | **GET** /iam/v1/people/{repository}/{person}/nodeList/{list} | Get a specific node list for a user +*IAMV1Api* | [**get_preferences**](docs/IAMV1Api.md#get_preferences) | **GET** /iam/v1/people/{repository}/{person}/preferences | Get preferences stored for user +*IAMV1Api* | [**get_profile_settings**](docs/IAMV1Api.md#get_profile_settings) | **GET** /iam/v1/people/{repository}/{person}/profileSettings | Get profileSettings configuration +*IAMV1Api* | [**get_recently_invited**](docs/IAMV1Api.md#get_recently_invited) | **GET** /iam/v1/authorities/{repository}/recent | Get recently invited authorities. +*IAMV1Api* | [**get_subgroup_by_type**](docs/IAMV1Api.md#get_subgroup_by_type) | **GET** /iam/v1/groups/{repository}/{group}/type/{type} | Get a subgroup by the specified type +*IAMV1Api* | [**get_user**](docs/IAMV1Api.md#get_user) | **GET** /iam/v1/people/{repository}/{person} | Get the user. +*IAMV1Api* | [**get_user_groups**](docs/IAMV1Api.md#get_user_groups) | **GET** /iam/v1/people/{repository}/{person}/memberships | Get all groups the given user is member of. +*IAMV1Api* | [**get_user_stats**](docs/IAMV1Api.md#get_user_stats) | **GET** /iam/v1/people/{repository}/{person}/stats | Get the user stats. +*IAMV1Api* | [**reject_signup**](docs/IAMV1Api.md#reject_signup) | **DELETE** /iam/v1/groups/{repository}/{group}/signup/list/{user} | reject the pending user +*IAMV1Api* | [**remove_node_list**](docs/IAMV1Api.md#remove_node_list) | **DELETE** /iam/v1/people/{repository}/{person}/nodeList/{list}/{node} | Delete a node of a node list of a user +*IAMV1Api* | [**remove_user_avatar**](docs/IAMV1Api.md#remove_user_avatar) | **DELETE** /iam/v1/people/{repository}/{person}/avatar | Remove avatar of the user. +*IAMV1Api* | [**search_authorities**](docs/IAMV1Api.md#search_authorities) | **GET** /iam/v1/authorities/{repository} | Search authorities. +*IAMV1Api* | [**search_groups**](docs/IAMV1Api.md#search_groups) | **GET** /iam/v1/groups/{repository} | Search groups. +*IAMV1Api* | [**search_user**](docs/IAMV1Api.md#search_user) | **GET** /iam/v1/people/{repository} | Search users. +*IAMV1Api* | [**set_preferences**](docs/IAMV1Api.md#set_preferences) | **PUT** /iam/v1/people/{repository}/{person}/preferences | Set preferences for user +*IAMV1Api* | [**set_profile_settings**](docs/IAMV1Api.md#set_profile_settings) | **PUT** /iam/v1/people/{repository}/{person}/profileSettings | Set profileSettings Configuration +*IAMV1Api* | [**signup_group**](docs/IAMV1Api.md#signup_group) | **POST** /iam/v1/groups/{repository}/{group}/signup | let the current user signup to the given group +*IAMV1Api* | [**signup_group_details**](docs/IAMV1Api.md#signup_group_details) | **POST** /iam/v1/groups/{repository}/{group}/signup/config | requires admin rights +*IAMV1Api* | [**signup_group_list**](docs/IAMV1Api.md#signup_group_list) | **GET** /iam/v1/groups/{repository}/{group}/signup/list | list pending users that want to join this group +*IAMV1Api* | [**update_user_status**](docs/IAMV1Api.md#update_user_status) | **PUT** /iam/v1/people/{repository}/{person}/status/{status} | update the user status. +*KNOWLEDGEV1Api* | [**get_analyzing_job_status**](docs/KNOWLEDGEV1Api.md#get_analyzing_job_status) | **GET** /knowledge/v1/analyze/jobs/{job} | Get analyzing job status. +*KNOWLEDGEV1Api* | [**run_analyzing_job**](docs/KNOWLEDGEV1Api.md#run_analyzing_job) | **POST** /knowledge/v1/analyze/jobs | Run analyzing job. +*LTIPlatformV13Api* | [**auth**](docs/LTIPlatformV13Api.md#auth) | **GET** /ltiplatform/v13/auth | LTI Platform oidc endpoint. responds to a login authentication request +*LTIPlatformV13Api* | [**auth_token_endpoint**](docs/LTIPlatformV13Api.md#auth_token_endpoint) | **GET** /ltiplatform/v13/token | LTIPlatform auth token endpoint +*LTIPlatformV13Api* | [**change_content**](docs/LTIPlatformV13Api.md#change_content) | **POST** /ltiplatform/v13/content | Custom edu-sharing endpoint to change content of node. +*LTIPlatformV13Api* | [**convert_to_resourcelink**](docs/LTIPlatformV13Api.md#convert_to_resourcelink) | **POST** /ltiplatform/v13/convert2resourcelink | manual convertion of an io to an resource link without deeplinking +*LTIPlatformV13Api* | [**deep_linking_response**](docs/LTIPlatformV13Api.md#deep_linking_response) | **POST** /ltiplatform/v13/deeplinking-response | receiving deeplink response messages. +*LTIPlatformV13Api* | [**generate_login_initiation_form**](docs/LTIPlatformV13Api.md#generate_login_initiation_form) | **GET** /ltiplatform/v13/generateLoginInitiationForm | generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. +*LTIPlatformV13Api* | [**generate_login_initiation_form_resource_link**](docs/LTIPlatformV13Api.md#generate_login_initiation_form_resource_link) | **GET** /ltiplatform/v13/generateLoginInitiationFormResourceLink | generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. +*LTIPlatformV13Api* | [**get_content**](docs/LTIPlatformV13Api.md#get_content) | **GET** /ltiplatform/v13/content | Custom edu-sharing endpoint to get content of node. +*LTIPlatformV13Api* | [**manual_registration**](docs/LTIPlatformV13Api.md#manual_registration) | **POST** /ltiplatform/v13/manual-registration | manual registration endpoint for registration of tools. +*LTIPlatformV13Api* | [**open_id_registration**](docs/LTIPlatformV13Api.md#open_id_registration) | **POST** /ltiplatform/v13/openid-registration | registration endpoint the tool uses to register at platform. +*LTIPlatformV13Api* | [**openid_configuration**](docs/LTIPlatformV13Api.md#openid_configuration) | **GET** /ltiplatform/v13/openid-configuration | LTIPlatform openid configuration +*LTIPlatformV13Api* | [**start_dynamic_registration**](docs/LTIPlatformV13Api.md#start_dynamic_registration) | **POST** /ltiplatform/v13/start-dynamic-registration | starts lti dynamic registration. +*LTIPlatformV13Api* | [**start_dynamic_registration_get**](docs/LTIPlatformV13Api.md#start_dynamic_registration_get) | **GET** /ltiplatform/v13/start-dynamic-registration | starts lti dynamic registration. +*LTIPlatformV13Api* | [**test_token**](docs/LTIPlatformV13Api.md#test_token) | **PUT** /ltiplatform/v13/testToken | test creates a token signed with homeapp. +*LTIPlatformV13Api* | [**tools**](docs/LTIPlatformV13Api.md#tools) | **GET** /ltiplatform/v13/tools | List of tools registered +*LTIV13Api* | [**generate_deep_linking_response**](docs/LTIV13Api.md#generate_deep_linking_response) | **GET** /lti/v13/generateDeepLinkingResponse | generate DeepLinkingResponse +*LTIV13Api* | [**get_details_snippet**](docs/LTIV13Api.md#get_details_snippet) | **GET** /lti/v13/details/{repository}/{node} | get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow +*LTIV13Api* | [**jwks_uri**](docs/LTIV13Api.md#jwks_uri) | **GET** /lti/v13/jwks | LTI - returns repository JSON Web Key Sets +*LTIV13Api* | [**login_initiations**](docs/LTIV13Api.md#login_initiations) | **POST** /lti/v13/oidc/login_initiations | lti authentication process preparation. +*LTIV13Api* | [**login_initiations_get**](docs/LTIV13Api.md#login_initiations_get) | **GET** /lti/v13/oidc/login_initiations | lti authentication process preparation. +*LTIV13Api* | [**lti**](docs/LTIV13Api.md#lti) | **POST** /lti/v13/lti13 | lti tool redirect. +*LTIV13Api* | [**lti_registration_dynamic**](docs/LTIV13Api.md#lti_registration_dynamic) | **GET** /lti/v13/registration/dynamic/{token} | LTI Dynamic Registration - Initiate registration +*LTIV13Api* | [**lti_registration_url**](docs/LTIV13Api.md#lti_registration_url) | **GET** /lti/v13/registration/url | LTI Dynamic Registration - generates url for platform +*LTIV13Api* | [**lti_target**](docs/LTIV13Api.md#lti_target) | **POST** /lti/v13/lti13/{nodeId} | lti tool resource link target. +*LTIV13Api* | [**register_by_type**](docs/LTIV13Api.md#register_by_type) | **POST** /lti/v13/registration/{type} | register LTI platform +*LTIV13Api* | [**register_test**](docs/LTIV13Api.md#register_test) | **POST** /lti/v13/registration/static | register LTI platform +*LTIV13Api* | [**remove_lti_registration_url**](docs/LTIV13Api.md#remove_lti_registration_url) | **DELETE** /lti/v13/registration/url/{token} | LTI Dynamic Regitration - delete url +*MDSV1Api* | [**get_metadata_set**](docs/MDSV1Api.md#get_metadata_set) | **GET** /mds/v1/metadatasets/{repository}/{metadataset} | Get metadata set new. +*MDSV1Api* | [**get_metadata_sets**](docs/MDSV1Api.md#get_metadata_sets) | **GET** /mds/v1/metadatasets/{repository} | Get metadata sets V2 of repository. +*MDSV1Api* | [**get_values**](docs/MDSV1Api.md#get_values) | **POST** /mds/v1/metadatasets/{repository}/{metadataset}/values | Get values. +*MDSV1Api* | [**get_values4_keys**](docs/MDSV1Api.md#get_values4_keys) | **POST** /mds/v1/metadatasets/{repository}/{metadataset}/values_for_keys | Get values for keys. +*MDSV1Api* | [**suggest_value**](docs/MDSV1Api.md#suggest_value) | **POST** /mds/v1/metadatasets/{repository}/{metadataset}/values/{widget}/suggest | Suggest a value. +*MEDIACENTERV1Api* | [**add_mediacenter_group**](docs/MEDIACENTERV1Api.md#add_mediacenter_group) | **PUT** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group} | add a group that is managed by the given mediacenter +*MEDIACENTERV1Api* | [**create_mediacenter**](docs/MEDIACENTERV1Api.md#create_mediacenter) | **POST** /mediacenter/v1/mediacenter/{repository}/{mediacenter} | create new mediacenter in repository. +*MEDIACENTERV1Api* | [**delete_mediacenter**](docs/MEDIACENTERV1Api.md#delete_mediacenter) | **DELETE** /mediacenter/v1/mediacenter/{repository}/{mediacenter} | delete a mediacenter group and it's admin group and proxy group +*MEDIACENTERV1Api* | [**edit_mediacenter**](docs/MEDIACENTERV1Api.md#edit_mediacenter) | **PUT** /mediacenter/v1/mediacenter/{repository}/{mediacenter} | edit a mediacenter in repository. +*MEDIACENTERV1Api* | [**export_mediacenter_licensed_nodes**](docs/MEDIACENTERV1Api.md#export_mediacenter_licensed_nodes) | **POST** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses/export | get nodes that are licensed by the given mediacenter +*MEDIACENTERV1Api* | [**get_mediacenter_groups**](docs/MEDIACENTERV1Api.md#get_mediacenter_groups) | **GET** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages | get groups that are managed by the given mediacenter +*MEDIACENTERV1Api* | [**get_mediacenter_licensed_nodes**](docs/MEDIACENTERV1Api.md#get_mediacenter_licensed_nodes) | **POST** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses | get nodes that are licensed by the given mediacenter +*MEDIACENTERV1Api* | [**get_mediacenters**](docs/MEDIACENTERV1Api.md#get_mediacenters) | **GET** /mediacenter/v1/mediacenter/{repository} | get mediacenters in the repository. +*MEDIACENTERV1Api* | [**import_mc_org_connections**](docs/MEDIACENTERV1Api.md#import_mc_org_connections) | **POST** /mediacenter/v1/import/mc_org | Import Mediacenter Organisation Connection +*MEDIACENTERV1Api* | [**import_mediacenters**](docs/MEDIACENTERV1Api.md#import_mediacenters) | **POST** /mediacenter/v1/import/mediacenters | Import mediacenters +*MEDIACENTERV1Api* | [**import_organisations**](docs/MEDIACENTERV1Api.md#import_organisations) | **POST** /mediacenter/v1/import/organisations | Import Organisations +*MEDIACENTERV1Api* | [**remove_mediacenter_group**](docs/MEDIACENTERV1Api.md#remove_mediacenter_group) | **DELETE** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group} | delete a group that is managed by the given mediacenter +*NETWORKV1Api* | [**add_service**](docs/NETWORKV1Api.md#add_service) | **POST** /network/v1/services | Register service. +*NETWORKV1Api* | [**get_repositories**](docs/NETWORKV1Api.md#get_repositories) | **GET** /network/v1/repositories | Get repositories. +*NETWORKV1Api* | [**get_service**](docs/NETWORKV1Api.md#get_service) | **GET** /network/v1/service | Get own service. +*NETWORKV1Api* | [**get_services**](docs/NETWORKV1Api.md#get_services) | **GET** /network/v1/services | Get services. +*NETWORKV1Api* | [**update_service**](docs/NETWORKV1Api.md#update_service) | **PUT** /network/v1/services/{id} | Update a service. +*NODEV1Api* | [**add_aspects**](docs/NODEV1Api.md#add_aspects) | **PUT** /node/v1/nodes/{repository}/{node}/aspects | Add aspect to node. +*NODEV1Api* | [**add_workflow_history**](docs/NODEV1Api.md#add_workflow_history) | **PUT** /node/v1/nodes/{repository}/{node}/workflow | Add workflow. +*NODEV1Api* | [**change_content1**](docs/NODEV1Api.md#change_content1) | **POST** /node/v1/nodes/{repository}/{node}/content | Change content of node. +*NODEV1Api* | [**change_content_as_text**](docs/NODEV1Api.md#change_content_as_text) | **POST** /node/v1/nodes/{repository}/{node}/textContent | Change content of node as text. +*NODEV1Api* | [**change_metadata**](docs/NODEV1Api.md#change_metadata) | **PUT** /node/v1/nodes/{repository}/{node}/metadata | Change metadata of node. +*NODEV1Api* | [**change_metadata_with_versioning**](docs/NODEV1Api.md#change_metadata_with_versioning) | **POST** /node/v1/nodes/{repository}/{node}/metadata | Change metadata of node (new version). +*NODEV1Api* | [**change_preview**](docs/NODEV1Api.md#change_preview) | **POST** /node/v1/nodes/{repository}/{node}/preview | Change preview of node. +*NODEV1Api* | [**change_template_metadata**](docs/NODEV1Api.md#change_template_metadata) | **PUT** /node/v1/nodes/{repository}/{node}/metadata/template | Set the metadata template for this folder. +*NODEV1Api* | [**copy_metadata**](docs/NODEV1Api.md#copy_metadata) | **PUT** /node/v1/nodes/{repository}/{node}/metadata/copy/{from} | Copy metadata from another node. +*NODEV1Api* | [**create_child**](docs/NODEV1Api.md#create_child) | **POST** /node/v1/nodes/{repository}/{node}/children | Create a new child. +*NODEV1Api* | [**create_child_by_copying**](docs/NODEV1Api.md#create_child_by_copying) | **POST** /node/v1/nodes/{repository}/{node}/children/_copy | Create a new child by copying. +*NODEV1Api* | [**create_child_by_moving**](docs/NODEV1Api.md#create_child_by_moving) | **POST** /node/v1/nodes/{repository}/{node}/children/_move | Create a new child by moving. +*NODEV1Api* | [**create_fork_of_node**](docs/NODEV1Api.md#create_fork_of_node) | **POST** /node/v1/nodes/{repository}/{node}/children/_fork | Create a copy of a node by creating a forked version (variant). +*NODEV1Api* | [**create_share**](docs/NODEV1Api.md#create_share) | **PUT** /node/v1/nodes/{repository}/{node}/shares | Create a share for a node. +*NODEV1Api* | [**delete**](docs/NODEV1Api.md#delete) | **DELETE** /node/v1/nodes/{repository}/{node} | Delete node. +*NODEV1Api* | [**delete_preview**](docs/NODEV1Api.md#delete_preview) | **DELETE** /node/v1/nodes/{repository}/{node}/preview | Delete preview of node. +*NODEV1Api* | [**get_assocs**](docs/NODEV1Api.md#get_assocs) | **GET** /node/v1/nodes/{repository}/{node}/assocs | Get related nodes. +*NODEV1Api* | [**get_children**](docs/NODEV1Api.md#get_children) | **GET** /node/v1/nodes/{repository}/{node}/children | Get children of node. +*NODEV1Api* | [**get_lrmi_data**](docs/NODEV1Api.md#get_lrmi_data) | **GET** /node/v1/nodes/{repository}/{node}/lrmi | Get lrmi data. +*NODEV1Api* | [**get_metadata**](docs/NODEV1Api.md#get_metadata) | **GET** /node/v1/nodes/{repository}/{node}/metadata | Get metadata of node. +*NODEV1Api* | [**get_nodes**](docs/NODEV1Api.md#get_nodes) | **POST** /node/v1/nodes/{repository} | Searching nodes. +*NODEV1Api* | [**get_notify_list**](docs/NODEV1Api.md#get_notify_list) | **GET** /node/v1/nodes/{repository}/{node}/notifys | Get notifys (sharing history) of the node. +*NODEV1Api* | [**get_parents**](docs/NODEV1Api.md#get_parents) | **GET** /node/v1/nodes/{repository}/{node}/parents | Get parents of node. +*NODEV1Api* | [**get_permission**](docs/NODEV1Api.md#get_permission) | **GET** /node/v1/nodes/{repository}/{node}/permissions | Get all permission of node. +*NODEV1Api* | [**get_published_copies**](docs/NODEV1Api.md#get_published_copies) | **GET** /node/v1/nodes/{repository}/{node}/publish | Publish +*NODEV1Api* | [**get_shares**](docs/NODEV1Api.md#get_shares) | **GET** /node/v1/nodes/{repository}/{node}/shares | Get shares of node. +*NODEV1Api* | [**get_stats**](docs/NODEV1Api.md#get_stats) | **GET** /node/v1/nodes/{repository}/{node}/stats | Get statistics of node. +*NODEV1Api* | [**get_template_metadata**](docs/NODEV1Api.md#get_template_metadata) | **GET** /node/v1/nodes/{repository}/{node}/metadata/template | Get the metadata template + status for this folder. +*NODEV1Api* | [**get_text_content**](docs/NODEV1Api.md#get_text_content) | **GET** /node/v1/nodes/{repository}/{node}/textContent | Get the text content of a document. +*NODEV1Api* | [**get_version_metadata**](docs/NODEV1Api.md#get_version_metadata) | **GET** /node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/metadata | Get metadata of node version. +*NODEV1Api* | [**get_versions**](docs/NODEV1Api.md#get_versions) | **GET** /node/v1/nodes/{repository}/{node}/versions | Get all versions of node. +*NODEV1Api* | [**get_versions1**](docs/NODEV1Api.md#get_versions1) | **GET** /node/v1/nodes/{repository}/{node}/versions/metadata | Get all versions of node, including it's metadata. +*NODEV1Api* | [**get_workflow_history**](docs/NODEV1Api.md#get_workflow_history) | **GET** /node/v1/nodes/{repository}/{node}/workflow | Get workflow history. +*NODEV1Api* | [**has_permission**](docs/NODEV1Api.md#has_permission) | **GET** /node/v1/nodes/{repository}/{node}/permissions/{user} | Which permissions has user/group for node. +*NODEV1Api* | [**import_node**](docs/NODEV1Api.md#import_node) | **POST** /node/v1/nodes/{repository}/{node}/import | Import node +*NODEV1Api* | [**islocked**](docs/NODEV1Api.md#islocked) | **GET** /node/v1/nodes/{repository}/{node}/lock/status | locked status of a node. +*NODEV1Api* | [**prepare_usage**](docs/NODEV1Api.md#prepare_usage) | **POST** /node/v1/nodes/{repository}/{node}/prepareUsage | create remote object and get properties. +*NODEV1Api* | [**publish_copy**](docs/NODEV1Api.md#publish_copy) | **POST** /node/v1/nodes/{repository}/{node}/publish | Publish +*NODEV1Api* | [**remove_share**](docs/NODEV1Api.md#remove_share) | **DELETE** /node/v1/nodes/{repository}/{node}/shares/{shareId} | Remove share of a node. +*NODEV1Api* | [**report_node**](docs/NODEV1Api.md#report_node) | **POST** /node/v1/nodes/{repository}/{node}/report | Report the node. +*NODEV1Api* | [**revert_version**](docs/NODEV1Api.md#revert_version) | **PUT** /node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/_revert | Revert to node version. +*NODEV1Api* | [**set_owner**](docs/NODEV1Api.md#set_owner) | **POST** /node/v1/nodes/{repository}/{node}/owner | Set owner of node. +*NODEV1Api* | [**set_permission**](docs/NODEV1Api.md#set_permission) | **POST** /node/v1/nodes/{repository}/{node}/permissions | Set local permissions of node. +*NODEV1Api* | [**set_property**](docs/NODEV1Api.md#set_property) | **POST** /node/v1/nodes/{repository}/{node}/property | Set single property of node. +*NODEV1Api* | [**store_x_api_data**](docs/NODEV1Api.md#store_x_api_data) | **POST** /node/v1/nodes/{repository}/{node}/xapi | Store xApi-Conform data for a given node +*NODEV1Api* | [**unlock**](docs/NODEV1Api.md#unlock) | **GET** /node/v1/nodes/{repository}/{node}/lock/unlock | unlock node. +*NODEV1Api* | [**update_share**](docs/NODEV1Api.md#update_share) | **POST** /node/v1/nodes/{repository}/{node}/shares/{shareId} | update share of a node. +*NOTIFICATIONV1Api* | [**delete_notification**](docs/NOTIFICATIONV1Api.md#delete_notification) | **DELETE** /notification/v1/notifications | Endpoint to delete notification by id +*NOTIFICATIONV1Api* | [**get_config2**](docs/NOTIFICATIONV1Api.md#get_config2) | **GET** /notification/v1/config | get the config for notifications of the current user +*NOTIFICATIONV1Api* | [**get_notifications**](docs/NOTIFICATIONV1Api.md#get_notifications) | **GET** /notification/v1/notifications | Retrieve stored notification, filtered by receiver and status +*NOTIFICATIONV1Api* | [**set_config1**](docs/NOTIFICATIONV1Api.md#set_config1) | **PUT** /notification/v1/config | Update the config for notifications of the current user +*NOTIFICATIONV1Api* | [**update_notification_status**](docs/NOTIFICATIONV1Api.md#update_notification_status) | **PUT** /notification/v1/notifications/status | Endpoint to update the notification status +*NOTIFICATIONV1Api* | [**update_notification_status_by_receiver_id**](docs/NOTIFICATIONV1Api.md#update_notification_status_by_receiver_id) | **PUT** /notification/v1/notifications/receiver/status | Endpoint to update the notification status +*ORGANIZATIONV1Api* | [**create_organizations**](docs/ORGANIZATIONV1Api.md#create_organizations) | **PUT** /organization/v1/organizations/{repository}/{organization} | create organization in repository. +*ORGANIZATIONV1Api* | [**delete_organizations**](docs/ORGANIZATIONV1Api.md#delete_organizations) | **DELETE** /organization/v1/organizations/{repository}/{organization} | Delete organization of repository. +*ORGANIZATIONV1Api* | [**get_organization**](docs/ORGANIZATIONV1Api.md#get_organization) | **GET** /organization/v1/organizations/{repository}/{organization} | Get organization by id. +*ORGANIZATIONV1Api* | [**get_organizations**](docs/ORGANIZATIONV1Api.md#get_organizations) | **GET** /organization/v1/organizations/{repository} | Get organizations of repository. +*ORGANIZATIONV1Api* | [**remove_from_organization**](docs/ORGANIZATIONV1Api.md#remove_from_organization) | **DELETE** /organization/v1/organizations/{repository}/{organization}/member/{member} | Remove member from organization. +*RATINGV1Api* | [**add_or_update_rating**](docs/RATINGV1Api.md#add_or_update_rating) | **PUT** /rating/v1/ratings/{repository}/{node} | create or update a rating +*RATINGV1Api* | [**delete_rating**](docs/RATINGV1Api.md#delete_rating) | **DELETE** /rating/v1/ratings/{repository}/{node} | delete a comment +*RATINGV1Api* | [**get_accumulated_ratings**](docs/RATINGV1Api.md#get_accumulated_ratings) | **GET** /rating/v1/ratings/{repository}/{node}/history | get the range of nodes which had tracked actions since a given timestamp +*RATINGV1Api* | [**get_nodes_altered_in_range**](docs/RATINGV1Api.md#get_nodes_altered_in_range) | **GET** /rating/v1/ratings/{repository}/nodes/altered | get the range of nodes which had tracked actions since a given timestamp +*REGISTERV1Api* | [**activate**](docs/REGISTERV1Api.md#activate) | **POST** /register/v1/activate/{key} | Activate a new user (by using a supplied key) +*REGISTERV1Api* | [**mail_exists**](docs/REGISTERV1Api.md#mail_exists) | **GET** /register/v1/exists/{mail} | Check if the given mail is already successfully registered +*REGISTERV1Api* | [**recover_password**](docs/REGISTERV1Api.md#recover_password) | **POST** /register/v1/recover/{mail} | Send a mail to recover/reset password +*REGISTERV1Api* | [**register**](docs/REGISTERV1Api.md#register) | **POST** /register/v1/register | Register a new user +*REGISTERV1Api* | [**resend_mail**](docs/REGISTERV1Api.md#resend_mail) | **POST** /register/v1/resend/{mail} | Resend a registration mail for a given mail address +*REGISTERV1Api* | [**reset_password**](docs/REGISTERV1Api.md#reset_password) | **POST** /register/v1/reset/{key}/{password} | Send a mail to recover/reset password +*RELATIONV1Api* | [**create_relation**](docs/RELATIONV1Api.md#create_relation) | **PUT** /relation/v1/relation/{repository}/{source}/{type}/{target} | create a relation between nodes +*RELATIONV1Api* | [**delete_relation**](docs/RELATIONV1Api.md#delete_relation) | **DELETE** /relation/v1/relation/{repository}/{source}/{type}/{target} | delete a relation between nodes +*RELATIONV1Api* | [**get_relations**](docs/RELATIONV1Api.md#get_relations) | **GET** /relation/v1/relation/{repository}/{node} | get all relation of the node +*RENDERINGV1Api* | [**get_details_snippet1**](docs/RENDERINGV1Api.md#get_details_snippet1) | **GET** /rendering/v1/details/{repository}/{node} | Get metadata of node. +*RENDERINGV1Api* | [**get_details_snippet_with_parameters**](docs/RENDERINGV1Api.md#get_details_snippet_with_parameters) | **POST** /rendering/v1/details/{repository}/{node} | Get metadata of node. +*SEARCHV1Api* | [**get_metdata**](docs/SEARCHV1Api.md#get_metdata) | **GET** /search/v1/metadata/{repository} | get nodes with metadata and collections +*SEARCHV1Api* | [**get_relevant_nodes**](docs/SEARCHV1Api.md#get_relevant_nodes) | **GET** /search/v1/relevant/{repository} | Get relevant nodes for the current user +*SEARCHV1Api* | [**load_save_search**](docs/SEARCHV1Api.md#load_save_search) | **GET** /search/v1/queries/load/{nodeId} | Load a saved search query. +*SEARCHV1Api* | [**save_search**](docs/SEARCHV1Api.md#save_search) | **POST** /search/v1/queries/{repository}/{metadataset}/{query}/save | Save a search query. +*SEARCHV1Api* | [**search**](docs/SEARCHV1Api.md#search) | **POST** /search/v1/queries/{repository}/{metadataset}/{query} | Perform queries based on metadata sets. +*SEARCHV1Api* | [**search_by_property**](docs/SEARCHV1Api.md#search_by_property) | **GET** /search/v1/custom/{repository} | Search for custom properties with custom values +*SEARCHV1Api* | [**search_contributor**](docs/SEARCHV1Api.md#search_contributor) | **GET** /search/v1/queries/{repository}/contributor | Search for contributors +*SEARCHV1Api* | [**search_facets**](docs/SEARCHV1Api.md#search_facets) | **POST** /search/v1/queries/{repository}/{metadataset}/{query}/facets | Search in facets. +*SEARCHV1Api* | [**search_fingerprint**](docs/SEARCHV1Api.md#search_fingerprint) | **POST** /search/v1/queries/{repository}/fingerprint/{nodeid} | Perform queries based on metadata sets. +*SEARCHV1Api* | [**search_lrmi**](docs/SEARCHV1Api.md#search_lrmi) | **POST** /search/v1/queries/{repository}/{metadataset}/{query}/lrmi | Perform queries based on metadata sets. +*SHARINGV1Api* | [**get_children1**](docs/SHARINGV1Api.md#get_children1) | **GET** /sharing/v1/sharing/{repository}/{node}/{share}/children | Get all children of this share. +*SHARINGV1Api* | [**get_info**](docs/SHARINGV1Api.md#get_info) | **GET** /sharing/v1/sharing/{repository}/{node}/{share} | Get general info of a share. +*STATISTICV1Api* | [**get**](docs/STATISTICV1Api.md#get) | **POST** /statistic/v1/facets/{context} | Get statistics of repository. +*STATISTICV1Api* | [**get_global_statistics**](docs/STATISTICV1Api.md#get_global_statistics) | **GET** /statistic/v1/public | Get stats. +*STATISTICV1Api* | [**get_node_data**](docs/STATISTICV1Api.md#get_node_data) | **GET** /statistic/v1/statistics/nodes/node/{id} | get the range of nodes which had tracked actions since a given timestamp +*STATISTICV1Api* | [**get_nodes_altered_in_range1**](docs/STATISTICV1Api.md#get_nodes_altered_in_range1) | **GET** /statistic/v1/statistics/nodes/altered | get the range of nodes which had tracked actions since a given timestamp +*STATISTICV1Api* | [**get_statistics_node**](docs/STATISTICV1Api.md#get_statistics_node) | **POST** /statistic/v1/statistics/nodes | get statistics for node actions +*STATISTICV1Api* | [**get_statistics_user**](docs/STATISTICV1Api.md#get_statistics_user) | **POST** /statistic/v1/statistics/users | get statistics for user actions (login, logout) +*STREAMV1Api* | [**add_entry**](docs/STREAMV1Api.md#add_entry) | **PUT** /stream/v1/add/{repository} | add a new stream object. +*STREAMV1Api* | [**can_access**](docs/STREAMV1Api.md#can_access) | **GET** /stream/v1/access/{repository}/{node} | test +*STREAMV1Api* | [**delete_entry**](docs/STREAMV1Api.md#delete_entry) | **DELETE** /stream/v1/delete/{repository}/{entry} | delete a stream object +*STREAMV1Api* | [**get_property_values**](docs/STREAMV1Api.md#get_property_values) | **GET** /stream/v1/properties/{repository}/{property} | Get top values for a property +*STREAMV1Api* | [**search1**](docs/STREAMV1Api.md#search1) | **POST** /stream/v1/search/{repository} | Get the stream content for the current user with the given status. +*STREAMV1Api* | [**update_entry**](docs/STREAMV1Api.md#update_entry) | **PUT** /stream/v1/status/{repository}/{entry} | update status for a stream object and authority +*TOOLV1Api* | [**create_tool_defintition**](docs/TOOLV1Api.md#create_tool_defintition) | **POST** /tool/v1/tools/{repository}/tooldefinitions | Create a new tool definition object. +*TOOLV1Api* | [**create_tool_instance**](docs/TOOLV1Api.md#create_tool_instance) | **POST** /tool/v1/tools/{repository}/{toolDefinition}/toolinstances | Create a new tool Instance object. +*TOOLV1Api* | [**create_tool_object**](docs/TOOLV1Api.md#create_tool_object) | **POST** /tool/v1/tools/{repository}/{toolinstance}/toolobject | Create a new tool object for a given tool instance. +*TOOLV1Api* | [**get_all_tool_definitions**](docs/TOOLV1Api.md#get_all_tool_definitions) | **GET** /tool/v1/tools/{repository}/tooldefinitions | Get all ToolDefinitions. +*TOOLV1Api* | [**get_instance**](docs/TOOLV1Api.md#get_instance) | **GET** /tool/v1/tools/{repository}/{nodeid}/toolinstance | Get Instances of a ToolDefinition. +*TOOLV1Api* | [**get_instances**](docs/TOOLV1Api.md#get_instances) | **GET** /tool/v1/tools/{repository}/{toolDefinition}/toolinstances | Get Instances of a ToolDefinition. +*TRACKINGV1Api* | [**track_event**](docs/TRACKINGV1Api.md#track_event) | **PUT** /tracking/v1/tracking/{repository}/{event} | Track a user interaction +*USAGEV1Api* | [**delete_usage**](docs/USAGEV1Api.md#delete_usage) | **DELETE** /usage/v1/usages/node/{nodeId}/{usageId} | Delete an usage of a node. +*USAGEV1Api* | [**get_usages**](docs/USAGEV1Api.md#get_usages) | **GET** /usage/v1/usages/{appId} | Get all usages of an application. +*USAGEV1Api* | [**get_usages1**](docs/USAGEV1Api.md#get_usages1) | **GET** /usage/v1/usages/repository/{repositoryId}/{nodeId} | +*USAGEV1Api* | [**get_usages_by_course**](docs/USAGEV1Api.md#get_usages_by_course) | **GET** /usage/v1/usages/course/{appId}/{courseId} | Get all usages of an course. +*USAGEV1Api* | [**get_usages_by_node**](docs/USAGEV1Api.md#get_usages_by_node) | **GET** /usage/v1/usages/node/{nodeId} | Get all usages of an node. +*USAGEV1Api* | [**get_usages_by_node_collections**](docs/USAGEV1Api.md#get_usages_by_node_collections) | **GET** /usage/v1/usages/node/{nodeId}/collections | Get all collections where this node is used. +*USAGEV1Api* | [**set_usage**](docs/USAGEV1Api.md#set_usage) | **POST** /usage/v1/usages/repository/{repositoryId} | Set a usage for a node. app signature headers and authenticated user required. + + +## Documentation For Models + + - [ACE](docs/ACE.md) + - [ACL](docs/ACL.md) + - [About](docs/About.md) + - [AboutService](docs/AboutService.md) + - [AbstractEntries](docs/AbstractEntries.md) + - [AddToCollectionEventDTO](docs/AddToCollectionEventDTO.md) + - [Admin](docs/Admin.md) + - [AdminStatistics](docs/AdminStatistics.md) + - [Application](docs/Application.md) + - [Audience](docs/Audience.md) + - [AuthenticationToken](docs/AuthenticationToken.md) + - [Authority](docs/Authority.md) + - [AuthorityEntries](docs/AuthorityEntries.md) + - [AvailableMds](docs/AvailableMds.md) + - [Banner](docs/Banner.md) + - [CacheCluster](docs/CacheCluster.md) + - [CacheInfo](docs/CacheInfo.md) + - [CacheMember](docs/CacheMember.md) + - [Catalog](docs/Catalog.md) + - [Collection](docs/Collection.md) + - [CollectionCounts](docs/CollectionCounts.md) + - [CollectionDTO](docs/CollectionDTO.md) + - [CollectionEntries](docs/CollectionEntries.md) + - [CollectionEntry](docs/CollectionEntry.md) + - [CollectionOptions](docs/CollectionOptions.md) + - [CollectionProposalEntries](docs/CollectionProposalEntries.md) + - [CollectionReference](docs/CollectionReference.md) + - [Collections](docs/Collections.md) + - [CollectionsResult](docs/CollectionsResult.md) + - [Comment](docs/Comment.md) + - [CommentEventDTO](docs/CommentEventDTO.md) + - [Comments](docs/Comments.md) + - [Condition](docs/Condition.md) + - [Config](docs/Config.md) + - [ConfigFrontpage](docs/ConfigFrontpage.md) + - [ConfigPrivacy](docs/ConfigPrivacy.md) + - [ConfigPublish](docs/ConfigPublish.md) + - [ConfigRating](docs/ConfigRating.md) + - [ConfigRemote](docs/ConfigRemote.md) + - [ConfigThemeColor](docs/ConfigThemeColor.md) + - [ConfigThemeColors](docs/ConfigThemeColors.md) + - [ConfigTutorial](docs/ConfigTutorial.md) + - [ConfigUpload](docs/ConfigUpload.md) + - [ConfigWorkflow](docs/ConfigWorkflow.md) + - [ConfigWorkflowList](docs/ConfigWorkflowList.md) + - [Connector](docs/Connector.md) + - [ConnectorFileType](docs/ConnectorFileType.md) + - [ConnectorList](docs/ConnectorList.md) + - [Content](docs/Content.md) + - [ContextMenuEntry](docs/ContextMenuEntry.md) + - [Contributor](docs/Contributor.md) + - [Counts](docs/Counts.md) + - [Create](docs/Create.md) + - [CreateUsage](docs/CreateUsage.md) + - [DeleteOption](docs/DeleteOption.md) + - [DynamicConfig](docs/DynamicConfig.md) + - [DynamicRegistrationToken](docs/DynamicRegistrationToken.md) + - [DynamicRegistrationTokens](docs/DynamicRegistrationTokens.md) + - [Element](docs/Element.md) + - [ErrorResponse](docs/ErrorResponse.md) + - [ExcelResult](docs/ExcelResult.md) + - [Facet](docs/Facet.md) + - [FeatureInfo](docs/FeatureInfo.md) + - [FeedbackData](docs/FeedbackData.md) + - [FeedbackResult](docs/FeedbackResult.md) + - [Filter](docs/Filter.md) + - [FilterEntry](docs/FilterEntry.md) + - [FontIcon](docs/FontIcon.md) + - [Frontpage](docs/Frontpage.md) + - [General](docs/General.md) + - [Geo](docs/Geo.md) + - [Group](docs/Group.md) + - [GroupEntries](docs/GroupEntries.md) + - [GroupEntry](docs/GroupEntry.md) + - [GroupProfile](docs/GroupProfile.md) + - [GroupSignupDetails](docs/GroupSignupDetails.md) + - [Guest](docs/Guest.md) + - [HandleParam](docs/HandleParam.md) + - [HelpMenuOptions](docs/HelpMenuOptions.md) + - [HomeFolderOptions](docs/HomeFolderOptions.md) + - [Icon](docs/Icon.md) + - [Image](docs/Image.md) + - [Interface](docs/Interface.md) + - [InviteEventDTO](docs/InviteEventDTO.md) + - [JSONObject](docs/JSONObject.md) + - [Job](docs/Job.md) + - [JobBuilder](docs/JobBuilder.md) + - [JobDataMap](docs/JobDataMap.md) + - [JobDescription](docs/JobDescription.md) + - [JobDetail](docs/JobDetail.md) + - [JobDetailJobDataMap](docs/JobDetailJobDataMap.md) + - [JobEntry](docs/JobEntry.md) + - [JobFieldDescription](docs/JobFieldDescription.md) + - [JobInfo](docs/JobInfo.md) + - [JobKey](docs/JobKey.md) + - [KeyValuePair](docs/KeyValuePair.md) + - [LTIPlatformConfiguration](docs/LTIPlatformConfiguration.md) + - [LTISession](docs/LTISession.md) + - [LTIToolConfiguration](docs/LTIToolConfiguration.md) + - [Language](docs/Language.md) + - [Level](docs/Level.md) + - [License](docs/License.md) + - [LicenseAgreement](docs/LicenseAgreement.md) + - [LicenseAgreementNode](docs/LicenseAgreementNode.md) + - [Licenses](docs/Licenses.md) + - [Location](docs/Location.md) + - [LogEntry](docs/LogEntry.md) + - [LoggerConfigResult](docs/LoggerConfigResult.md) + - [Login](docs/Login.md) + - [LoginCredentials](docs/LoginCredentials.md) + - [LogoutInfo](docs/LogoutInfo.md) + - [Mainnav](docs/Mainnav.md) + - [ManualRegistrationData](docs/ManualRegistrationData.md) + - [McOrgConnectResult](docs/McOrgConnectResult.md) + - [Mds](docs/Mds.md) + - [MdsColumn](docs/MdsColumn.md) + - [MdsEntries](docs/MdsEntries.md) + - [MdsGroup](docs/MdsGroup.md) + - [MdsList](docs/MdsList.md) + - [MdsQueryCriteria](docs/MdsQueryCriteria.md) + - [MdsSort](docs/MdsSort.md) + - [MdsSortColumn](docs/MdsSortColumn.md) + - [MdsSortDefault](docs/MdsSortDefault.md) + - [MdsSubwidget](docs/MdsSubwidget.md) + - [MdsValue](docs/MdsValue.md) + - [MdsView](docs/MdsView.md) + - [MdsWidget](docs/MdsWidget.md) + - [MdsWidgetCondition](docs/MdsWidgetCondition.md) + - [Mediacenter](docs/Mediacenter.md) + - [MediacenterProfileExtension](docs/MediacenterProfileExtension.md) + - [MediacentersImportResult](docs/MediacentersImportResult.md) + - [MenuEntry](docs/MenuEntry.md) + - [Message](docs/Message.md) + - [MetadataSetInfo](docs/MetadataSetInfo.md) + - [MetadataSuggestionEventDTO](docs/MetadataSuggestionEventDTO.md) + - [Node](docs/Node.md) + - [NodeCollectionProposalCount](docs/NodeCollectionProposalCount.md) + - [NodeData](docs/NodeData.md) + - [NodeDataDTO](docs/NodeDataDTO.md) + - [NodeEntries](docs/NodeEntries.md) + - [NodeEntry](docs/NodeEntry.md) + - [NodeIssueEventDTO](docs/NodeIssueEventDTO.md) + - [NodeLTIDeepLink](docs/NodeLTIDeepLink.md) + - [NodeLocked](docs/NodeLocked.md) + - [NodePermissionEntry](docs/NodePermissionEntry.md) + - [NodePermissions](docs/NodePermissions.md) + - [NodeRef](docs/NodeRef.md) + - [NodeRelation](docs/NodeRelation.md) + - [NodeRemote](docs/NodeRemote.md) + - [NodeShare](docs/NodeShare.md) + - [NodeStats](docs/NodeStats.md) + - [NodeText](docs/NodeText.md) + - [NodeVersion](docs/NodeVersion.md) + - [NodeVersionEntries](docs/NodeVersionEntries.md) + - [NodeVersionEntry](docs/NodeVersionEntry.md) + - [NodeVersionRef](docs/NodeVersionRef.md) + - [NodeVersionRefEntries](docs/NodeVersionRefEntries.md) + - [NotificationConfig](docs/NotificationConfig.md) + - [NotificationEventDTO](docs/NotificationEventDTO.md) + - [NotificationIntervals](docs/NotificationIntervals.md) + - [NotificationResponsePage](docs/NotificationResponsePage.md) + - [NotifyEntry](docs/NotifyEntry.md) + - [OpenIdConfiguration](docs/OpenIdConfiguration.md) + - [OpenIdRegistrationResult](docs/OpenIdRegistrationResult.md) + - [OrganisationsImportResult](docs/OrganisationsImportResult.md) + - [Organization](docs/Organization.md) + - [OrganizationEntries](docs/OrganizationEntries.md) + - [Pageable](docs/Pageable.md) + - [Pagination](docs/Pagination.md) + - [Parameters](docs/Parameters.md) + - [ParentEntries](docs/ParentEntries.md) + - [Person](docs/Person.md) + - [PersonDeleteOptions](docs/PersonDeleteOptions.md) + - [PersonDeleteResult](docs/PersonDeleteResult.md) + - [PersonReport](docs/PersonReport.md) + - [PluginInfo](docs/PluginInfo.md) + - [PluginStatus](docs/PluginStatus.md) + - [Preferences](docs/Preferences.md) + - [Preview](docs/Preview.md) + - [Profile](docs/Profile.md) + - [ProfileSettings](docs/ProfileSettings.md) + - [ProposeForCollectionEventDTO](docs/ProposeForCollectionEventDTO.md) + - [Provider](docs/Provider.md) + - [Query](docs/Query.md) + - [RatingData](docs/RatingData.md) + - [RatingDetails](docs/RatingDetails.md) + - [RatingEventDTO](docs/RatingEventDTO.md) + - [RatingHistory](docs/RatingHistory.md) + - [ReferenceEntries](docs/ReferenceEntries.md) + - [Register](docs/Register.md) + - [RegisterExists](docs/RegisterExists.md) + - [RegisterInformation](docs/RegisterInformation.md) + - [RegistrationUrl](docs/RegistrationUrl.md) + - [RelationData](docs/RelationData.md) + - [Remote](docs/Remote.md) + - [RemoteAuthDescription](docs/RemoteAuthDescription.md) + - [Rendering](docs/Rendering.md) + - [RenderingDetailsEntry](docs/RenderingDetailsEntry.md) + - [RenderingGdpr](docs/RenderingGdpr.md) + - [Repo](docs/Repo.md) + - [RepoEntries](docs/RepoEntries.md) + - [RepositoryConfig](docs/RepositoryConfig.md) + - [RepositoryVersionInfo](docs/RepositoryVersionInfo.md) + - [RestoreResult](docs/RestoreResult.md) + - [RestoreResults](docs/RestoreResults.md) + - [SearchParameters](docs/SearchParameters.md) + - [SearchParametersFacets](docs/SearchParametersFacets.md) + - [SearchResult](docs/SearchResult.md) + - [SearchResultElastic](docs/SearchResultElastic.md) + - [SearchResultLrmi](docs/SearchResultLrmi.md) + - [SearchResultNode](docs/SearchResultNode.md) + - [SearchVCard](docs/SearchVCard.md) + - [ServerUpdateInfo](docs/ServerUpdateInfo.md) + - [Service](docs/Service.md) + - [ServiceInstance](docs/ServiceInstance.md) + - [ServiceVersion](docs/ServiceVersion.md) + - [Services](docs/Services.md) + - [SharedFolderOptions](docs/SharedFolderOptions.md) + - [SharingInfo](docs/SharingInfo.md) + - [SimpleEdit](docs/SimpleEdit.md) + - [SimpleEditGlobalGroups](docs/SimpleEditGlobalGroups.md) + - [SimpleEditOrganization](docs/SimpleEditOrganization.md) + - [Sort](docs/Sort.md) + - [StatisticEntity](docs/StatisticEntity.md) + - [StatisticEntry](docs/StatisticEntry.md) + - [Statistics](docs/Statistics.md) + - [StatisticsGlobal](docs/StatisticsGlobal.md) + - [StatisticsGroup](docs/StatisticsGroup.md) + - [StatisticsKeyGroup](docs/StatisticsKeyGroup.md) + - [StatisticsSubGroup](docs/StatisticsSubGroup.md) + - [StatisticsUser](docs/StatisticsUser.md) + - [StoredService](docs/StoredService.md) + - [Stream](docs/Stream.md) + - [StreamEntry](docs/StreamEntry.md) + - [StreamEntryInput](docs/StreamEntryInput.md) + - [StreamList](docs/StreamList.md) + - [SubGroupItem](docs/SubGroupItem.md) + - [Suggest](docs/Suggest.md) + - [Suggestion](docs/Suggestion.md) + - [SuggestionParam](docs/SuggestionParam.md) + - [Suggestions](docs/Suggestions.md) + - [Tool](docs/Tool.md) + - [Tools](docs/Tools.md) + - [Tracking](docs/Tracking.md) + - [TrackingAuthority](docs/TrackingAuthority.md) + - [TrackingNode](docs/TrackingNode.md) + - [UploadResult](docs/UploadResult.md) + - [Usage](docs/Usage.md) + - [Usages](docs/Usages.md) + - [User](docs/User.md) + - [UserCredential](docs/UserCredential.md) + - [UserDataDTO](docs/UserDataDTO.md) + - [UserEntries](docs/UserEntries.md) + - [UserEntry](docs/UserEntry.md) + - [UserProfile](docs/UserProfile.md) + - [UserProfileAppAuth](docs/UserProfileAppAuth.md) + - [UserProfileEdit](docs/UserProfileEdit.md) + - [UserQuota](docs/UserQuota.md) + - [UserSimple](docs/UserSimple.md) + - [UserStats](docs/UserStats.md) + - [UserStatus](docs/UserStatus.md) + - [Value](docs/Value.md) + - [ValueParameters](docs/ValueParameters.md) + - [Values](docs/Values.md) + - [Variables](docs/Variables.md) + - [Version](docs/Version.md) + - [VersionBuild](docs/VersionBuild.md) + - [VersionGit](docs/VersionGit.md) + - [VersionGitCommit](docs/VersionGitCommit.md) + - [VersionMaven](docs/VersionMaven.md) + - [VersionProject](docs/VersionProject.md) + - [VersionTimestamp](docs/VersionTimestamp.md) + - [WebsiteInformation](docs/WebsiteInformation.md) + - [WidgetDataDTO](docs/WidgetDataDTO.md) + - [WorkflowEventDTO](docs/WorkflowEventDTO.md) + - [WorkflowHistory](docs/WorkflowHistory.md) + + + +## Documentation For Authorization + +Endpoints do not require authorization. + + +## Author + + + + diff --git a/edu_sharing_openapi/docs/ABOUTApi.md b/edu_sharing_openapi/docs/ABOUTApi.md new file mode 100644 index 00000000..d04580a8 --- /dev/null +++ b/edu_sharing_openapi/docs/ABOUTApi.md @@ -0,0 +1,216 @@ +# edu_sharing_client.ABOUTApi + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**about**](ABOUTApi.md#about) | **GET** /_about | Discover the API. +[**licenses**](ABOUTApi.md#licenses) | **GET** /_about/licenses | License information. +[**status**](ABOUTApi.md#status) | **GET** /_about/status/{mode} | status of repo services + + +# **about** +> About about() + +Discover the API. + +Get all services provided by this API. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.about import About +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ABOUTApi(api_client) + + try: + # Discover the API. + api_response = api_instance.about() + print("The response of ABOUTApi->about:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ABOUTApi->about: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**About**](About.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**401** | Authorization failed. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **licenses** +> Licenses licenses() + +License information. + +Get information about used 3rd-party licenses. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.licenses import Licenses +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ABOUTApi(api_client) + + try: + # License information. + api_response = api_instance.licenses() + print("The response of ABOUTApi->licenses:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ABOUTApi->licenses: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**Licenses**](Licenses.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **status** +> str status(mode, timeout_seconds=timeout_seconds) + +status of repo services + +returns http status 200 when ok + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ABOUTApi(api_client) + mode = 'mode_example' # str | + timeout_seconds = 10 # int | (optional) (default to 10) + + try: + # status of repo services + api_response = api_instance.status(mode, timeout_seconds=timeout_seconds) + print("The response of ABOUTApi->status:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ABOUTApi->status: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **mode** | **str**| | + **timeout_seconds** | **int**| | [optional] [default to 10] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/ACE.md b/edu_sharing_openapi/docs/ACE.md new file mode 100644 index 00000000..28cd2ed6 --- /dev/null +++ b/edu_sharing_openapi/docs/ACE.md @@ -0,0 +1,33 @@ +# ACE + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**editable** | **bool** | | [optional] +**authority** | [**Authority**](Authority.md) | | +**user** | [**UserProfile**](UserProfile.md) | | [optional] +**group** | [**GroupProfile**](GroupProfile.md) | | [optional] +**permissions** | **List[str]** | | + +## Example + +```python +from edu_sharing_client.models.ace import ACE + +# TODO update the JSON string below +json = "{}" +# create an instance of ACE from a JSON string +ace_instance = ACE.from_json(json) +# print the JSON string representation of the object +print(ACE.to_json()) + +# convert the object into a dict +ace_dict = ace_instance.to_dict() +# create an instance of ACE from a dict +ace_from_dict = ACE.from_dict(ace_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ACL.md b/edu_sharing_openapi/docs/ACL.md new file mode 100644 index 00000000..4ef7f50a --- /dev/null +++ b/edu_sharing_openapi/docs/ACL.md @@ -0,0 +1,30 @@ +# ACL + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**inherited** | **bool** | | +**permissions** | [**List[ACE]**](ACE.md) | | + +## Example + +```python +from edu_sharing_client.models.acl import ACL + +# TODO update the JSON string below +json = "{}" +# create an instance of ACL from a JSON string +acl_instance = ACL.from_json(json) +# print the JSON string representation of the object +print(ACL.to_json()) + +# convert the object into a dict +acl_dict = acl_instance.to_dict() +# create an instance of ACL from a dict +acl_from_dict = ACL.from_dict(acl_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ADMINV1Api.md b/edu_sharing_openapi/docs/ADMINV1Api.md new file mode 100644 index 00000000..171daf1e --- /dev/null +++ b/edu_sharing_openapi/docs/ADMINV1Api.md @@ -0,0 +1,3955 @@ +# edu_sharing_client.ADMINV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_application**](ADMINV1Api.md#add_application) | **PUT** /admin/v1/applications/xml | register/add an application via xml file +[**add_application1**](ADMINV1Api.md#add_application1) | **PUT** /admin/v1/applications | register/add an application +[**add_toolpermission**](ADMINV1Api.md#add_toolpermission) | **POST** /admin/v1/toolpermissions/add/{name} | add a new toolpermissions +[**apply_template**](ADMINV1Api.md#apply_template) | **POST** /admin/v1/applyTemplate | apply a folder template +[**cancel_job**](ADMINV1Api.md#cancel_job) | **DELETE** /admin/v1/jobs/{job} | cancel a running job +[**change_logging**](ADMINV1Api.md#change_logging) | **POST** /admin/v1/log/config | Change the loglevel for classes at runtime. +[**clear_cache**](ADMINV1Api.md#clear_cache) | **POST** /admin/v1/cache/clearCache | clear cache +[**create_preview**](ADMINV1Api.md#create_preview) | **GET** /admin/v1/nodes/preview/{node} | create preview. +[**delete_person**](ADMINV1Api.md#delete_person) | **PUT** /admin/v1/deletePersons | delete persons +[**export_by_lucene**](ADMINV1Api.md#export_by_lucene) | **GET** /admin/v1/lucene/export | Search for custom lucene query and choose specific properties to load +[**export_lom**](ADMINV1Api.md#export_lom) | **GET** /admin/v1/export/lom | Export Nodes with LOM Metadata Format +[**get_all_jobs**](ADMINV1Api.md#get_all_jobs) | **GET** /admin/v1/jobs/all | get all available jobs +[**get_all_toolpermissions**](ADMINV1Api.md#get_all_toolpermissions) | **GET** /admin/v1/toolpermissions/{authority} | get all toolpermissions for an authority +[**get_application_xml**](ADMINV1Api.md#get_application_xml) | **GET** /admin/v1/applications/{xml} | list any xml properties (like from homeApplication.properties.xml) +[**get_applications**](ADMINV1Api.md#get_applications) | **GET** /admin/v1/applications | list applications +[**get_cache_entries**](ADMINV1Api.md#get_cache_entries) | **GET** /admin/v1/cache/cacheEntries/{id} | Get entries of a cache +[**get_cache_info**](ADMINV1Api.md#get_cache_info) | **GET** /admin/v1/cache/cacheInfo/{id} | Get information about a cache +[**get_catalina_out**](ADMINV1Api.md#get_catalina_out) | **GET** /admin/v1/catalina | Get last info from catalina out +[**get_cluster**](ADMINV1Api.md#get_cluster) | **GET** /admin/v1/clusterInfo | Get information about the Cluster +[**get_clusters**](ADMINV1Api.md#get_clusters) | **GET** /admin/v1/clusterInfos | Get information about the Cluster +[**get_config**](ADMINV1Api.md#get_config) | **GET** /admin/v1/repositoryConfig | get the repository config object +[**get_config_file**](ADMINV1Api.md#get_config_file) | **GET** /admin/v1/configFile | get a base system config file (e.g. edu-sharing.conf) +[**get_enabled_plugins**](ADMINV1Api.md#get_enabled_plugins) | **GET** /admin/v1/plugins | get enabled system plugins +[**get_global_groups**](ADMINV1Api.md#get_global_groups) | **GET** /admin/v1/globalGroups | Get global groups +[**get_jobs**](ADMINV1Api.md#get_jobs) | **GET** /admin/v1/jobs | get all running jobs +[**get_lightbend_config**](ADMINV1Api.md#get_lightbend_config) | **GET** /admin/v1/config/merged | +[**get_logging_runtime**](ADMINV1Api.md#get_logging_runtime) | **GET** /admin/v1/log/config | get the logger config +[**get_oai_classes**](ADMINV1Api.md#get_oai_classes) | **GET** /admin/v1/import/oai/classes | Get OAI class names +[**get_property_to_mds**](ADMINV1Api.md#get_property_to_mds) | **GET** /admin/v1/propertyToMds | Get a Mds Valuespace for all values of the given properties +[**get_statistics**](ADMINV1Api.md#get_statistics) | **GET** /admin/v1/statistics | get statistics +[**get_version**](ADMINV1Api.md#get_version) | **GET** /admin/v1/version | get detailed version information +[**import_collections**](ADMINV1Api.md#import_collections) | **POST** /admin/v1/import/collections | import collections via a xml file +[**import_excel**](ADMINV1Api.md#import_excel) | **POST** /admin/v1/import/excel | Import excel data +[**import_oai**](ADMINV1Api.md#import_oai) | **POST** /admin/v1/import/oai | Import oai data +[**import_oai_xml**](ADMINV1Api.md#import_oai_xml) | **POST** /admin/v1/import/oai/xml | Import single xml via oai (for testing) +[**refresh_app_info**](ADMINV1Api.md#refresh_app_info) | **POST** /admin/v1/refreshAppInfo | refresh app info +[**refresh_cache**](ADMINV1Api.md#refresh_cache) | **POST** /admin/v1/import/refreshCache/{folder} | Refresh cache +[**refresh_edu_group_cache**](ADMINV1Api.md#refresh_edu_group_cache) | **POST** /admin/v1/cache/refreshEduGroupCache | Refresh the Edu Group Cache +[**remove_application**](ADMINV1Api.md#remove_application) | **DELETE** /admin/v1/applications/{id} | remove an application +[**remove_cache_entry**](ADMINV1Api.md#remove_cache_entry) | **POST** /admin/v1/cache/removeCacheEntry | remove cache entry +[**remove_oai_imports**](ADMINV1Api.md#remove_oai_imports) | **DELETE** /admin/v1/import/oai | Remove deleted imports +[**search_by_elastic_dsl**](ADMINV1Api.md#search_by_elastic_dsl) | **GET** /admin/v1/elastic | Search for custom elastic DSL query +[**search_by_lucene**](ADMINV1Api.md#search_by_lucene) | **GET** /admin/v1/lucene | Search for custom lucene query +[**server_update_list**](ADMINV1Api.md#server_update_list) | **GET** /admin/v1/serverUpdate/list | list available update tasks +[**server_update_list1**](ADMINV1Api.md#server_update_list1) | **POST** /admin/v1/serverUpdate/run/{id} | Run an update tasks +[**set_config**](ADMINV1Api.md#set_config) | **PUT** /admin/v1/repositoryConfig | set/update the repository config object +[**set_toolpermissions**](ADMINV1Api.md#set_toolpermissions) | **PUT** /admin/v1/toolpermissions/{authority} | set toolpermissions for an authority +[**start_job**](ADMINV1Api.md#start_job) | **POST** /admin/v1/job/{jobClass} | Start a Job. +[**start_job_sync**](ADMINV1Api.md#start_job_sync) | **POST** /admin/v1/job/{jobClass}/sync | Start a Job. +[**switch_authority**](ADMINV1Api.md#switch_authority) | **POST** /admin/v1/authenticate/{authorityName} | switch the session to a known authority name +[**test_mail**](ADMINV1Api.md#test_mail) | **POST** /admin/v1/mail/{receiver}/{template} | Test a mail template +[**update_application_xml**](ADMINV1Api.md#update_application_xml) | **PUT** /admin/v1/applications/{xml} | edit any properties xml (like homeApplication.properties.xml) +[**update_config_file**](ADMINV1Api.md#update_config_file) | **PUT** /admin/v1/configFile | update a base system config file (e.g. edu-sharing.conf) +[**upload_temp**](ADMINV1Api.md#upload_temp) | **PUT** /admin/v1/upload/temp/{name} | Upload a file + + +# **add_application** +> str add_application(xml) + +register/add an application via xml file + +register the xml file provided. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + xml = None # object | XML file for app to register + + try: + # register/add an application via xml file + api_response = api_instance.add_application(xml) + print("The response of ADMINV1Api->add_application:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->add_application: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **xml** | [**object**](object.md)| XML file for app to register | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **add_application1** +> str add_application1(url) + +register/add an application + +register the specified application. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + url = 'url_example' # str | Remote application metadata url + + try: + # register/add an application + api_response = api_instance.add_application1(url) + print("The response of ADMINV1Api->add_application1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->add_application1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **url** | **str**| Remote application metadata url | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **add_toolpermission** +> Node add_toolpermission(name) + +add a new toolpermissions + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node import Node +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + name = 'name_example' # str | Name/ID of toolpermission + + try: + # add a new toolpermissions + api_response = api_instance.add_toolpermission(name) + print("The response of ADMINV1Api->add_toolpermission:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->add_toolpermission: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **str**| Name/ID of toolpermission | + +### Return type + +[**Node**](Node.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **apply_template** +> apply_template(template, group, folder=folder) + +apply a folder template + +apply a folder template. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + template = 'template_example' # str | Template Filename + group = 'group_example' # str | Group name (authority name) + folder = 'folder_example' # str | Folder name (optional) + + try: + # apply a folder template + api_instance.apply_template(template, group, folder=folder) + except Exception as e: + print("Exception when calling ADMINV1Api->apply_template: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **template** | **str**| Template Filename | + **group** | **str**| Group name (authority name) | + **folder** | **str**| Folder name | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **cancel_job** +> cancel_job(job, force=force) + +cancel a running job + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + job = 'job_example' # str | + force = True # bool | (optional) + + try: + # cancel a running job + api_instance.cancel_job(job, force=force) + except Exception as e: + print("Exception when calling ADMINV1Api->cancel_job: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **job** | **str**| | + **force** | **bool**| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_logging** +> change_logging(name, loglevel, appender=appender) + +Change the loglevel for classes at runtime. + +Root appenders are used. Check the appender treshold. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + name = 'name_example' # str | name + loglevel = 'loglevel_example' # str | loglevel + appender = 'ConsoleAppender' # str | appender (optional) (default to 'ConsoleAppender') + + try: + # Change the loglevel for classes at runtime. + api_instance.change_logging(name, loglevel, appender=appender) + except Exception as e: + print("Exception when calling ADMINV1Api->change_logging: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **str**| name | + **loglevel** | **str**| loglevel | + **appender** | **str**| appender | [optional] [default to 'ConsoleAppender'] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **clear_cache** +> clear_cache(bean=bean) + +clear cache + +clear cache + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + bean = 'bean_example' # str | bean (optional) + + try: + # clear cache + api_instance.clear_cache(bean=bean) + except Exception as e: + print("Exception when calling ADMINV1Api->clear_cache: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **bean** | **str**| bean | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_preview** +> create_preview(node) + +create preview. + +create preview. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + node = 'node_example' # str | ID of node + + try: + # create preview. + api_instance.create_preview(node) + except Exception as e: + print("Exception when calling ADMINV1Api->create_preview: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_person** +> PersonReport delete_person(username, person_delete_options=person_delete_options) + +delete persons + +delete the given persons. Their status must be set to \"todelete\" + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions +from edu_sharing_client.models.person_report import PersonReport +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + username = ['username_example'] # List[str] | names of the users to delete + person_delete_options = edu_sharing_client.PersonDeleteOptions() # PersonDeleteOptions | options object what and how to delete user contents (optional) + + try: + # delete persons + api_response = api_instance.delete_person(username, person_delete_options=person_delete_options) + print("The response of ADMINV1Api->delete_person:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->delete_person: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **username** | [**List[str]**](str.md)| names of the users to delete | + **person_delete_options** | [**PersonDeleteOptions**](PersonDeleteOptions.md)| options object what and how to delete user contents | [optional] + +### Return type + +[**PersonReport**](PersonReport.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **export_by_lucene** +> str export_by_lucene(query=query, sort_properties=sort_properties, sort_ascending=sort_ascending, properties=properties, store=store, authority_scope=authority_scope) + +Search for custom lucene query and choose specific properties to load + +e.g. @cm\\:name:\"*\" + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + query = '@cm\\:name:"*"' # str | query (optional) (default to '@cm\\:name:"*"') + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + properties = ['properties_example'] # List[str] | properties to fetch, use parent:: to include parent property values (optional) + store = 'store_example' # str | store, workspace or archive (optional) + authority_scope = ['authority_scope_example'] # List[str] | authority scope to search for (optional) + + try: + # Search for custom lucene query and choose specific properties to load + api_response = api_instance.export_by_lucene(query=query, sort_properties=sort_properties, sort_ascending=sort_ascending, properties=properties, store=store, authority_scope=authority_scope) + print("The response of ADMINV1Api->export_by_lucene:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->export_by_lucene: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **query** | **str**| query | [optional] [default to '@cm\\:name:"*"'] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **properties** | [**List[str]**](str.md)| properties to fetch, use parent::<property> to include parent property values | [optional] + **store** | **str**| store, workspace or archive | [optional] + **authority_scope** | [**List[str]**](str.md)| authority scope to search for | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **export_lom** +> export_lom(filter_query, target_dir, sub_object_handler) + +Export Nodes with LOM Metadata Format + +Export Nodes with LOM Metadata Format. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + filter_query = 'filter_query_example' # str | filterQuery + target_dir = 'target_dir_example' # str | targetDir + sub_object_handler = True # bool | subObjectHandler + + try: + # Export Nodes with LOM Metadata Format + api_instance.export_lom(filter_query, target_dir, sub_object_handler) + except Exception as e: + print("Exception when calling ADMINV1Api->export_lom: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **filter_query** | **str**| filterQuery | + **target_dir** | **str**| targetDir | + **sub_object_handler** | **bool**| subObjectHandler | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_all_jobs** +> str get_all_jobs() + +get all available jobs + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # get all available jobs + api_response = api_instance.get_all_jobs() + print("The response of ADMINV1Api->get_all_jobs:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_all_jobs: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_all_toolpermissions** +> str get_all_toolpermissions(authority) + +get all toolpermissions for an authority + +Returns explicit (rights set for this authority) + effective (resulting rights for this authority) toolpermission + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + authority = 'authority_example' # str | Authority to load (user or group) + + try: + # get all toolpermissions for an authority + api_response = api_instance.get_all_toolpermissions(authority) + print("The response of ADMINV1Api->get_all_toolpermissions:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_all_toolpermissions: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **authority** | **str**| Authority to load (user or group) | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_application_xml** +> str get_application_xml(xml) + +list any xml properties (like from homeApplication.properties.xml) + +list any xml properties (like from homeApplication.properties.xml) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + xml = 'xml_example' # str | Properties Filename (*.xml) + + try: + # list any xml properties (like from homeApplication.properties.xml) + api_response = api_instance.get_application_xml(xml) + print("The response of ADMINV1Api->get_application_xml:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_application_xml: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **xml** | **str**| Properties Filename (*.xml) | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_applications** +> str get_applications() + +list applications + +List all registered applications. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # list applications + api_response = api_instance.get_applications() + print("The response of ADMINV1Api->get_applications:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_applications: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_cache_entries** +> str get_cache_entries(id) + +Get entries of a cache + +Get entries of a cache. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + id = 'id_example' # str | Id/bean name of the cache + + try: + # Get entries of a cache + api_response = api_instance.get_cache_entries(id) + print("The response of ADMINV1Api->get_cache_entries:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_cache_entries: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| Id/bean name of the cache | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_cache_info** +> CacheInfo get_cache_info(id) + +Get information about a cache + +Get information about a cache. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.cache_info import CacheInfo +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + id = 'id_example' # str | Id/bean name of the cache + + try: + # Get information about a cache + api_response = api_instance.get_cache_info(id) + print("The response of ADMINV1Api->get_cache_info:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_cache_info: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| Id/bean name of the cache | + +### Return type + +[**CacheInfo**](CacheInfo.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_catalina_out** +> str get_catalina_out() + +Get last info from catalina out + +Get catalina.out log. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # Get last info from catalina out + api_response = api_instance.get_catalina_out() + print("The response of ADMINV1Api->get_catalina_out:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_catalina_out: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_cluster** +> CacheCluster get_cluster() + +Get information about the Cluster + +Get information the Cluster + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.cache_cluster import CacheCluster +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # Get information about the Cluster + api_response = api_instance.get_cluster() + print("The response of ADMINV1Api->get_cluster:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_cluster: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**CacheCluster**](CacheCluster.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_clusters** +> CacheCluster get_clusters() + +Get information about the Cluster + +Get information the Cluster + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.cache_cluster import CacheCluster +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # Get information about the Cluster + api_response = api_instance.get_clusters() + print("The response of ADMINV1Api->get_clusters:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_clusters: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**CacheCluster**](CacheCluster.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_config** +> RepositoryConfig get_config() + +get the repository config object + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.repository_config import RepositoryConfig +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # get the repository config object + api_response = api_instance.get_config() + print("The response of ADMINV1Api->get_config:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_config: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**RepositoryConfig**](RepositoryConfig.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_config_file** +> str get_config_file(filename, path_prefix) + +get a base system config file (e.g. edu-sharing.conf) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + filename = 'filename_example' # str | filename to fetch + path_prefix = 'path_prefix_example' # str | path prefix this file belongs to + + try: + # get a base system config file (e.g. edu-sharing.conf) + api_response = api_instance.get_config_file(filename, path_prefix) + print("The response of ADMINV1Api->get_config_file:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_config_file: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **filename** | **str**| filename to fetch | + **path_prefix** | **str**| path prefix this file belongs to | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_enabled_plugins** +> str get_enabled_plugins() + +get enabled system plugins + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # get enabled system plugins + api_response = api_instance.get_enabled_plugins() + print("The response of ADMINV1Api->get_enabled_plugins:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_enabled_plugins: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_global_groups** +> str get_global_groups() + +Get global groups + +Get global groups (groups across repositories). + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # Get global groups + api_response = api_instance.get_global_groups() + print("The response of ADMINV1Api->get_global_groups:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_global_groups: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_jobs** +> str get_jobs() + +get all running jobs + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # get all running jobs + api_response = api_instance.get_jobs() + print("The response of ADMINV1Api->get_jobs:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_jobs: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_lightbend_config** +> object get_lightbend_config() + + + +Get the fully merged & parsed (lightbend) backend config + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + api_response = api_instance.get_lightbend_config() + print("The response of ADMINV1Api->get_lightbend_config:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_lightbend_config: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**object** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_logging_runtime** +> LoggerConfigResult get_logging_runtime(filters=filters, only_config=only_config) + +get the logger config + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.logger_config_result import LoggerConfigResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + filters = ['filters_example'] # List[str] | filters (optional) + only_config = True # bool | onlyConfig if true only loggers defined in log4j.xml or at runtime are returned (optional) + + try: + # get the logger config + api_response = api_instance.get_logging_runtime(filters=filters, only_config=only_config) + print("The response of ADMINV1Api->get_logging_runtime:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_logging_runtime: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **filters** | [**List[str]**](str.md)| filters | [optional] + **only_config** | **bool**| onlyConfig if true only loggers defined in log4j.xml or at runtime are returned | [optional] + +### Return type + +[**LoggerConfigResult**](LoggerConfigResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_oai_classes** +> str get_oai_classes() + +Get OAI class names + +Get available importer classes for OAI import. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # Get OAI class names + api_response = api_instance.get_oai_classes() + print("The response of ADMINV1Api->get_oai_classes:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_oai_classes: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_property_to_mds** +> str get_property_to_mds(properties) + +Get a Mds Valuespace for all values of the given properties + +Get a Mds Valuespace for all values of the given properties. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + properties = ['properties_example'] # List[str] | one or more properties + + try: + # Get a Mds Valuespace for all values of the given properties + api_response = api_instance.get_property_to_mds(properties) + print("The response of ADMINV1Api->get_property_to_mds:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_property_to_mds: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **properties** | [**List[str]**](str.md)| one or more properties | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_statistics** +> AdminStatistics get_statistics() + +get statistics + +get statistics. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.admin_statistics import AdminStatistics +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # get statistics + api_response = api_instance.get_statistics() + print("The response of ADMINV1Api->get_statistics:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_statistics: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**AdminStatistics**](AdminStatistics.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_version** +> RepositoryVersionInfo get_version() + +get detailed version information + +detailed information about the running system version + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.repository_version_info import RepositoryVersionInfo +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # get detailed version information + api_response = api_instance.get_version() + print("The response of ADMINV1Api->get_version:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->get_version: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**RepositoryVersionInfo**](RepositoryVersionInfo.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_collections** +> CollectionsResult import_collections(xml, parent=parent) + +import collections via a xml file + +xml file must be structured as defined by the xsd standard + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collections_result import CollectionsResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + xml = None # object | XML file to parse (or zip file containing exactly 1 xml file to parse) + parent = 'parent_example' # str | Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level (optional) + + try: + # import collections via a xml file + api_response = api_instance.import_collections(xml, parent=parent) + print("The response of ADMINV1Api->import_collections:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->import_collections: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **xml** | [**object**](object.md)| XML file to parse (or zip file containing exactly 1 xml file to parse) | + **parent** | **str**| Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level | [optional] + +### Return type + +[**CollectionsResult**](CollectionsResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_excel** +> ExcelResult import_excel(parent, add_to_collection, excel) + +Import excel data + +Import excel data. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.excel_result import ExcelResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + parent = 'parent_example' # str | parent + add_to_collection = False # bool | addToCollection (default to False) + excel = None # object | Excel file to import + + try: + # Import excel data + api_response = api_instance.import_excel(parent, add_to_collection, excel) + print("The response of ADMINV1Api->import_excel:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->import_excel: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **parent** | **str**| parent | + **add_to_collection** | **bool**| addToCollection | [default to False] + **excel** | [**object**](object.md)| Excel file to import | + +### Return type + +[**ExcelResult**](ExcelResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_oai** +> import_oai(base_url, set, metadata_prefix, class_name, metadataset=metadataset, importer_class_name=importer_class_name, record_handler_class_name=record_handler_class_name, binary_handler_class_name=binary_handler_class_name, persistent_handler_class_name=persistent_handler_class_name, file_url=file_url, oai_ids=oai_ids, force_update=force_update, var_from=var_from, until=until, period_in_days=period_in_days) + +Import oai data + +Import oai data. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + base_url = 'base_url_example' # str | base url + set = 'set_example' # str | set/catalog id + metadata_prefix = 'metadata_prefix_example' # str | metadata prefix + class_name = 'org.edu_sharing.repository.server.jobs.quartz.ImporterJob' # str | importer job class name (call /classes to obtain a list) (default to 'org.edu_sharing.repository.server.jobs.quartz.ImporterJob') + metadataset = 'metadataset_example' # str | id metadataset (optional) + importer_class_name = 'org.edu_sharing.repository.server.importer.OAIPMHLOMImporter' # str | importer class name (call /classes to obtain a list) (optional) (default to 'org.edu_sharing.repository.server.importer.OAIPMHLOMImporter') + record_handler_class_name = 'org.edu_sharing.repository.server.importer.RecordHandlerLOM' # str | RecordHandler class name (optional) (default to 'org.edu_sharing.repository.server.importer.RecordHandlerLOM') + binary_handler_class_name = 'binary_handler_class_name_example' # str | BinaryHandler class name (may be empty for none) (optional) + persistent_handler_class_name = 'persistent_handler_class_name_example' # str | PersistentHandlerClassName class name (may be empty for none) (optional) + file_url = 'file_url_example' # str | url to file (optional) + oai_ids = 'oai_ids_example' # str | OAI Ids to import, can be null than the whole set will be imported (optional) + force_update = False # bool | force Update of all entries (optional) (default to False) + var_from = 'var_from_example' # str | from: datestring yyyy-MM-dd) (optional) + until = 'until_example' # str | until: datestring yyyy-MM-dd) (optional) + period_in_days = 'period_in_days_example' # str | periodInDays: internal sets from and until. only effective if from/until not set) (optional) + + try: + # Import oai data + api_instance.import_oai(base_url, set, metadata_prefix, class_name, metadataset=metadataset, importer_class_name=importer_class_name, record_handler_class_name=record_handler_class_name, binary_handler_class_name=binary_handler_class_name, persistent_handler_class_name=persistent_handler_class_name, file_url=file_url, oai_ids=oai_ids, force_update=force_update, var_from=var_from, until=until, period_in_days=period_in_days) + except Exception as e: + print("Exception when calling ADMINV1Api->import_oai: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **base_url** | **str**| base url | + **set** | **str**| set/catalog id | + **metadata_prefix** | **str**| metadata prefix | + **class_name** | **str**| importer job class name (call /classes to obtain a list) | [default to 'org.edu_sharing.repository.server.jobs.quartz.ImporterJob'] + **metadataset** | **str**| id metadataset | [optional] + **importer_class_name** | **str**| importer class name (call /classes to obtain a list) | [optional] [default to 'org.edu_sharing.repository.server.importer.OAIPMHLOMImporter'] + **record_handler_class_name** | **str**| RecordHandler class name | [optional] [default to 'org.edu_sharing.repository.server.importer.RecordHandlerLOM'] + **binary_handler_class_name** | **str**| BinaryHandler class name (may be empty for none) | [optional] + **persistent_handler_class_name** | **str**| PersistentHandlerClassName class name (may be empty for none) | [optional] + **file_url** | **str**| url to file | [optional] + **oai_ids** | **str**| OAI Ids to import, can be null than the whole set will be imported | [optional] + **force_update** | **bool**| force Update of all entries | [optional] [default to False] + **var_from** | **str**| from: datestring yyyy-MM-dd) | [optional] + **until** | **str**| until: datestring yyyy-MM-dd) | [optional] + **period_in_days** | **str**| periodInDays: internal sets from and until. only effective if from/until not set) | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_oai_xml** +> Node import_oai_xml(record_handler_class_name=record_handler_class_name, binary_handler_class_name=binary_handler_class_name, xml=xml) + +Import single xml via oai (for testing) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node import Node +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + record_handler_class_name = 'org.edu_sharing.repository.server.importer.RecordHandlerLOM' # str | RecordHandler class name (optional) (default to 'org.edu_sharing.repository.server.importer.RecordHandlerLOM') + binary_handler_class_name = 'binary_handler_class_name_example' # str | BinaryHandler class name (may be empty for none) (optional) + xml = None # object | (optional) + + try: + # Import single xml via oai (for testing) + api_response = api_instance.import_oai_xml(record_handler_class_name=record_handler_class_name, binary_handler_class_name=binary_handler_class_name, xml=xml) + print("The response of ADMINV1Api->import_oai_xml:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->import_oai_xml: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **record_handler_class_name** | **str**| RecordHandler class name | [optional] [default to 'org.edu_sharing.repository.server.importer.RecordHandlerLOM'] + **binary_handler_class_name** | **str**| BinaryHandler class name (may be empty for none) | [optional] + **xml** | [**object**](object.md)| | [optional] + +### Return type + +[**Node**](Node.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **refresh_app_info** +> refresh_app_info() + +refresh app info + +Refresh the application info. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # refresh app info + api_instance.refresh_app_info() + except Exception as e: + print("Exception when calling ADMINV1Api->refresh_app_info: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **refresh_cache** +> refresh_cache(folder, sticky) + +Refresh cache + +Refresh importer cache. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + folder = '-userhome-' # str | refresh cache root folder id (default to '-userhome-') + sticky = False # bool | sticky (default to False) + + try: + # Refresh cache + api_instance.refresh_cache(folder, sticky) + except Exception as e: + print("Exception when calling ADMINV1Api->refresh_cache: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **folder** | **str**| refresh cache root folder id | [default to '-userhome-'] + **sticky** | **bool**| sticky | [default to False] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **refresh_edu_group_cache** +> refresh_edu_group_cache(keep_existing=keep_existing) + +Refresh the Edu Group Cache + +Refresh the Edu Group Cache. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + keep_existing = False # bool | keep existing (optional) (default to False) + + try: + # Refresh the Edu Group Cache + api_instance.refresh_edu_group_cache(keep_existing=keep_existing) + except Exception as e: + print("Exception when calling ADMINV1Api->refresh_edu_group_cache: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **keep_existing** | **bool**| keep existing | [optional] [default to False] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_application** +> remove_application(id) + +remove an application + +remove the specified application. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + id = 'id_example' # str | Application id + + try: + # remove an application + api_instance.remove_application(id) + except Exception as e: + print("Exception when calling ADMINV1Api->remove_application: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| Application id | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_cache_entry** +> remove_cache_entry(cache_index=cache_index, bean=bean) + +remove cache entry + +remove cache entry + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + cache_index = 56 # int | cacheIndex (optional) + bean = 'bean_example' # str | bean (optional) + + try: + # remove cache entry + api_instance.remove_cache_entry(cache_index=cache_index, bean=bean) + except Exception as e: + print("Exception when calling ADMINV1Api->remove_cache_entry: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **cache_index** | **int**| cacheIndex | [optional] + **bean** | **str**| bean | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_oai_imports** +> remove_oai_imports(base_url, set, metadata_prefix) + +Remove deleted imports + +Remove deleted imports. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + base_url = 'base_url_example' # str | base url + set = 'set_example' # str | set/catalog id + metadata_prefix = 'metadata_prefix_example' # str | metadata prefix + + try: + # Remove deleted imports + api_instance.remove_oai_imports(base_url, set, metadata_prefix) + except Exception as e: + print("Exception when calling ADMINV1Api->remove_oai_imports: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **base_url** | **str**| base url | + **set** | **str**| set/catalog id | + **metadata_prefix** | **str**| metadata prefix | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_by_elastic_dsl** +> SearchResultElastic search_by_elastic_dsl(dsl=dsl) + +Search for custom elastic DSL query + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result_elastic import SearchResultElastic +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + dsl = 'dsl_example' # str | dsl query (json encoded) (optional) + + try: + # Search for custom elastic DSL query + api_response = api_instance.search_by_elastic_dsl(dsl=dsl) + print("The response of ADMINV1Api->search_by_elastic_dsl:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->search_by_elastic_dsl: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **dsl** | **str**| dsl query (json encoded) | [optional] + +### Return type + +[**SearchResultElastic**](SearchResultElastic.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_by_lucene** +> SearchResult search_by_lucene(query=query, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter, store=store, authority_scope=authority_scope) + +Search for custom lucene query + +e.g. @cm\\:name:\"*\" + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + query = '@cm\\:name:"*"' # str | query (optional) (default to '@cm\\:name:"*"') + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + store = 'store_example' # str | store, workspace or archive (optional) + authority_scope = ['authority_scope_example'] # List[str] | authority scope to search for (optional) + + try: + # Search for custom lucene query + api_response = api_instance.search_by_lucene(query=query, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter, store=store, authority_scope=authority_scope) + print("The response of ADMINV1Api->search_by_lucene:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->search_by_lucene: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **query** | **str**| query | [optional] [default to '@cm\\:name:"*"'] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + **store** | **str**| store, workspace or archive | [optional] + **authority_scope** | [**List[str]**](str.md)| authority scope to search for | [optional] + +### Return type + +[**SearchResult**](SearchResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **server_update_list** +> str server_update_list() + +list available update tasks + +list available update tasks + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + + try: + # list available update tasks + api_response = api_instance.server_update_list() + print("The response of ADMINV1Api->server_update_list:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->server_update_list: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **server_update_list1** +> str server_update_list1(id, execute) + +Run an update tasks + +Run a specific update task (test or full update). + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + id = 'id_example' # str | Id of the update task + execute = False # bool | Actually execute (if false, just runs in test mode) (default to False) + + try: + # Run an update tasks + api_response = api_instance.server_update_list1(id, execute) + print("The response of ADMINV1Api->server_update_list1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->server_update_list1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| Id of the update task | + **execute** | **bool**| Actually execute (if false, just runs in test mode) | [default to False] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_config** +> set_config(repository_config=repository_config) + +set/update the repository config object + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.repository_config import RepositoryConfig +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + repository_config = edu_sharing_client.RepositoryConfig() # RepositoryConfig | (optional) + + try: + # set/update the repository config object + api_instance.set_config(repository_config=repository_config) + except Exception as e: + print("Exception when calling ADMINV1Api->set_config: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository_config** | [**RepositoryConfig**](RepositoryConfig.md)| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_toolpermissions** +> str set_toolpermissions(authority, request_body=request_body) + +set toolpermissions for an authority + +If a toolpermission has status UNDEFINED, it will remove explicit permissions for the authority + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + authority = 'authority_example' # str | Authority to set (user or group) + request_body = {'key': 'request_body_example'} # Dict[str, str] | (optional) + + try: + # set toolpermissions for an authority + api_response = api_instance.set_toolpermissions(authority, request_body=request_body) + print("The response of ADMINV1Api->set_toolpermissions:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->set_toolpermissions: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **authority** | **str**| Authority to set (user or group) | + **request_body** | [**Dict[str, str]**](str.md)| | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_job** +> start_job(job_class, request_body) + +Start a Job. + +Start a Job. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + job_class = 'job_class_example' # str | jobClass + request_body = None # Dict[str, object] | params + + try: + # Start a Job. + api_instance.start_job(job_class, request_body) + except Exception as e: + print("Exception when calling ADMINV1Api->start_job: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **job_class** | **str**| jobClass | + **request_body** | [**Dict[str, object]**](object.md)| params | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_job_sync** +> object start_job_sync(job_class, request_body) + +Start a Job. + +Start a Job. Wait for the result synchronously + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + job_class = 'job_class_example' # str | jobClass + request_body = None # Dict[str, object] | params + + try: + # Start a Job. + api_response = api_instance.start_job_sync(job_class, request_body) + print("The response of ADMINV1Api->start_job_sync:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->start_job_sync: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **job_class** | **str**| jobClass | + **request_body** | [**Dict[str, object]**](object.md)| params | + +### Return type + +**object** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **switch_authority** +> switch_authority(authority_name) + +switch the session to a known authority name + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + authority_name = 'authority_name_example' # str | the authority to use (must be a person) + + try: + # switch the session to a known authority name + api_instance.switch_authority(authority_name) + except Exception as e: + print("Exception when calling ADMINV1Api->switch_authority: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **authority_name** | **str**| the authority to use (must be a person) | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **test_mail** +> test_mail(receiver, template) + +Test a mail template + +Sends the given template as a test to the given receiver. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + receiver = 'receiver_example' # str | + template = 'template_example' # str | + + try: + # Test a mail template + api_instance.test_mail(receiver, template) + except Exception as e: + print("Exception when calling ADMINV1Api->test_mail: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **receiver** | **str**| | + **template** | **str**| | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_application_xml** +> update_application_xml(xml, request_body=request_body) + +edit any properties xml (like homeApplication.properties.xml) + +if the key exists, it will be overwritten. Otherwise, it will be created. You only need to transfer keys you want to edit + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + xml = 'xml_example' # str | Properties Filename (*.xml) + request_body = {'key': 'request_body_example'} # Dict[str, str] | (optional) + + try: + # edit any properties xml (like homeApplication.properties.xml) + api_instance.update_application_xml(xml, request_body=request_body) + except Exception as e: + print("Exception when calling ADMINV1Api->update_application_xml: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **xml** | **str**| Properties Filename (*.xml) | + **request_body** | [**Dict[str, str]**](str.md)| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_config_file** +> update_config_file(filename, path_prefix, body=body) + +update a base system config file (e.g. edu-sharing.conf) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + filename = 'filename_example' # str | filename to fetch + path_prefix = 'path_prefix_example' # str | path prefix this file belongs to + body = 'body_example' # str | (optional) + + try: + # update a base system config file (e.g. edu-sharing.conf) + api_instance.update_config_file(filename, path_prefix, body=body) + except Exception as e: + print("Exception when calling ADMINV1Api->update_config_file: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **filename** | **str**| filename to fetch | + **path_prefix** | **str**| path prefix this file belongs to | + **body** | **str**| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **upload_temp** +> UploadResult upload_temp(name, file) + +Upload a file + +Upload a file to tomcat temp directory, to use it on the server (e.g. an update) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.upload_result import UploadResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ADMINV1Api(api_client) + name = 'name_example' # str | filename + file = None # object | file to upload + + try: + # Upload a file + api_response = api_instance.upload_temp(name, file) + print("The response of ADMINV1Api->upload_temp:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ADMINV1Api->upload_temp: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **name** | **str**| filename | + **file** | [**object**](object.md)| file to upload | + +### Return type + +[**UploadResult**](UploadResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/ARCHIVEV1Api.md b/edu_sharing_openapi/docs/ARCHIVEV1Api.md new file mode 100644 index 00000000..c64005ae --- /dev/null +++ b/edu_sharing_openapi/docs/ARCHIVEV1Api.md @@ -0,0 +1,335 @@ +# edu_sharing_client.ARCHIVEV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**purge**](ARCHIVEV1Api.md#purge) | **DELETE** /archive/v1/purge/{repository} | Searches for archive nodes. +[**restore**](ARCHIVEV1Api.md#restore) | **POST** /archive/v1/restore/{repository} | restore archived nodes. +[**search_archive**](ARCHIVEV1Api.md#search_archive) | **GET** /archive/v1/search/{repository}/{pattern} | Searches for archive nodes. +[**search_archive_person**](ARCHIVEV1Api.md#search_archive_person) | **GET** /archive/v1/search/{repository}/{pattern}/{person} | Searches for archive nodes. + + +# **purge** +> str purge(repository, archived_node_ids) + +Searches for archive nodes. + +Searches for archive nodes. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ARCHIVEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + archived_node_ids = ['archived_node_ids_example'] # List[str] | archived node + + try: + # Searches for archive nodes. + api_response = api_instance.purge(repository, archived_node_ids) + print("The response of ARCHIVEV1Api->purge:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ARCHIVEV1Api->purge: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **archived_node_ids** | [**List[str]**](str.md)| archived node | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **restore** +> RestoreResults restore(repository, archived_node_ids, target=target) + +restore archived nodes. + +restores archived nodes. restoreStatus can have the following values: FALLBACK_PARENT_NOT_EXISTS, FALLBACK_PARENT_NO_PERMISSION, DUPLICATENAME, FINE + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.restore_results import RestoreResults +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ARCHIVEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + archived_node_ids = ['archived_node_ids_example'] # List[str] | archived nodes + target = 'target_example' # str | to target (optional) + + try: + # restore archived nodes. + api_response = api_instance.restore(repository, archived_node_ids, target=target) + print("The response of ARCHIVEV1Api->restore:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ARCHIVEV1Api->restore: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **archived_node_ids** | [**List[str]**](str.md)| archived nodes | + **target** | **str**| to target | [optional] + +### Return type + +[**RestoreResults**](RestoreResults.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_archive** +> SearchResult search_archive(repository, pattern, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Searches for archive nodes. + +Searches for archive nodes. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ARCHIVEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + pattern = 'pattern_example' # str | search pattern + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Searches for archive nodes. + api_response = api_instance.search_archive(repository, pattern, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of ARCHIVEV1Api->search_archive:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ARCHIVEV1Api->search_archive: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **pattern** | **str**| search pattern | + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResult**](SearchResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_archive_person** +> SearchResult search_archive_person(repository, pattern, person, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Searches for archive nodes. + +Searches for archive nodes. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ARCHIVEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + pattern = 'pattern_example' # str | search pattern + person = '-me-' # str | person (default to '-me-') + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Searches for archive nodes. + api_response = api_instance.search_archive_person(repository, pattern, person, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of ARCHIVEV1Api->search_archive_person:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ARCHIVEV1Api->search_archive_person: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **pattern** | **str**| search pattern | + **person** | **str**| person | [default to '-me-'] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResult**](SearchResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/AUTHENTICATIONV1Api.md b/edu_sharing_openapi/docs/AUTHENTICATIONV1Api.md new file mode 100644 index 00000000..4480ece7 --- /dev/null +++ b/edu_sharing_openapi/docs/AUTHENTICATIONV1Api.md @@ -0,0 +1,346 @@ +# edu_sharing_client.AUTHENTICATIONV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**authenticate**](AUTHENTICATIONV1Api.md#authenticate) | **POST** /authentication/v1/appauth/{userId} | authenticate user of an registered application. +[**has_access_to_scope**](AUTHENTICATIONV1Api.md#has_access_to_scope) | **GET** /authentication/v1/hasAccessToScope | Returns true if the current user has access to the given scope +[**login**](AUTHENTICATIONV1Api.md#login) | **GET** /authentication/v1/validateSession | Validates the Basic Auth Credentials and check if the session is a logged in user +[**login_to_scope**](AUTHENTICATIONV1Api.md#login_to_scope) | **POST** /authentication/v1/loginToScope | Validates the Basic Auth Credentials and check if the session is a logged in user +[**logout**](AUTHENTICATIONV1Api.md#logout) | **GET** /authentication/v1/destroySession | Destroys the current session and logout the user + + +# **authenticate** +> AuthenticationToken authenticate(user_id, user_profile_app_auth=user_profile_app_auth) + +authenticate user of an registered application. + +headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.authentication_token import AuthenticationToken +from edu_sharing_client.models.user_profile_app_auth import UserProfileAppAuth +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.AUTHENTICATIONV1Api(api_client) + user_id = 'user_id_example' # str | User Id + user_profile_app_auth = edu_sharing_client.UserProfileAppAuth() # UserProfileAppAuth | User Profile (optional) + + try: + # authenticate user of an registered application. + api_response = api_instance.authenticate(user_id, user_profile_app_auth=user_profile_app_auth) + print("The response of AUTHENTICATIONV1Api->authenticate:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling AUTHENTICATIONV1Api->authenticate: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **user_id** | **str**| User Id | + **user_profile_app_auth** | [**UserProfileAppAuth**](UserProfileAppAuth.md)| User Profile | [optional] + +### Return type + +[**AuthenticationToken**](AuthenticationToken.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **has_access_to_scope** +> has_access_to_scope(scope) + +Returns true if the current user has access to the given scope + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.AUTHENTICATIONV1Api(api_client) + scope = 'scope_example' # str | scope + + try: + # Returns true if the current user has access to the given scope + api_instance.has_access_to_scope(scope) + except Exception as e: + print("Exception when calling AUTHENTICATIONV1Api->has_access_to_scope: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **scope** | **str**| scope | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **login** +> Login login() + +Validates the Basic Auth Credentials and check if the session is a logged in user + +Use the Basic auth header field to transfer the credentials + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.login import Login +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.AUTHENTICATIONV1Api(api_client) + + try: + # Validates the Basic Auth Credentials and check if the session is a logged in user + api_response = api_instance.login() + print("The response of AUTHENTICATIONV1Api->login:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling AUTHENTICATIONV1Api->login: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**Login**](Login.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **login_to_scope** +> Login login_to_scope(login_credentials) + +Validates the Basic Auth Credentials and check if the session is a logged in user + +Use the Basic auth header field to transfer the credentials + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.login import Login +from edu_sharing_client.models.login_credentials import LoginCredentials +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.AUTHENTICATIONV1Api(api_client) + login_credentials = edu_sharing_client.LoginCredentials() # LoginCredentials | credentials, example: test,test + + try: + # Validates the Basic Auth Credentials and check if the session is a logged in user + api_response = api_instance.login_to_scope(login_credentials) + print("The response of AUTHENTICATIONV1Api->login_to_scope:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling AUTHENTICATIONV1Api->login_to_scope: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **login_credentials** | [**LoginCredentials**](LoginCredentials.md)| credentials, example: test,test | + +### Return type + +[**Login**](Login.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **logout** +> logout() + +Destroys the current session and logout the user + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.AUTHENTICATIONV1Api(api_client) + + try: + # Destroys the current session and logout the user + api_instance.logout() + except Exception as e: + print("Exception when calling AUTHENTICATIONV1Api->logout: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/About.md b/edu_sharing_openapi/docs/About.md new file mode 100644 index 00000000..3d73cea9 --- /dev/null +++ b/edu_sharing_openapi/docs/About.md @@ -0,0 +1,34 @@ +# About + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**plugins** | [**List[PluginInfo]**](PluginInfo.md) | | [optional] +**features** | [**List[FeatureInfo]**](FeatureInfo.md) | | [optional] +**themes_url** | **str** | | [optional] +**last_cache_update** | **int** | | [optional] +**version** | [**ServiceVersion**](ServiceVersion.md) | | +**services** | [**List[AboutService]**](AboutService.md) | | + +## Example + +```python +from edu_sharing_client.models.about import About + +# TODO update the JSON string below +json = "{}" +# create an instance of About from a JSON string +about_instance = About.from_json(json) +# print the JSON string representation of the object +print(About.to_json()) + +# convert the object into a dict +about_dict = about_instance.to_dict() +# create an instance of About from a dict +about_from_dict = About.from_dict(about_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AboutService.md b/edu_sharing_openapi/docs/AboutService.md new file mode 100644 index 00000000..501cccb7 --- /dev/null +++ b/edu_sharing_openapi/docs/AboutService.md @@ -0,0 +1,30 @@ +# AboutService + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**instances** | [**List[ServiceInstance]**](ServiceInstance.md) | | + +## Example + +```python +from edu_sharing_client.models.about_service import AboutService + +# TODO update the JSON string below +json = "{}" +# create an instance of AboutService from a JSON string +about_service_instance = AboutService.from_json(json) +# print the JSON string representation of the object +print(AboutService.to_json()) + +# convert the object into a dict +about_service_dict = about_service_instance.to_dict() +# create an instance of AboutService from a dict +about_service_from_dict = AboutService.from_dict(about_service_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AbstractEntries.md b/edu_sharing_openapi/docs/AbstractEntries.md new file mode 100644 index 00000000..74dc1ddc --- /dev/null +++ b/edu_sharing_openapi/docs/AbstractEntries.md @@ -0,0 +1,30 @@ +# AbstractEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**nodes** | **List[object]** | | +**pagination** | [**Pagination**](Pagination.md) | | + +## Example + +```python +from edu_sharing_client.models.abstract_entries import AbstractEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of AbstractEntries from a JSON string +abstract_entries_instance = AbstractEntries.from_json(json) +# print the JSON string representation of the object +print(AbstractEntries.to_json()) + +# convert the object into a dict +abstract_entries_dict = abstract_entries_instance.to_dict() +# create an instance of AbstractEntries from a dict +abstract_entries_from_dict = AbstractEntries.from_dict(abstract_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AddToCollectionEventDTO.md b/edu_sharing_openapi/docs/AddToCollectionEventDTO.md new file mode 100644 index 00000000..e6f5f80d --- /dev/null +++ b/edu_sharing_openapi/docs/AddToCollectionEventDTO.md @@ -0,0 +1,30 @@ +# AddToCollectionEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**collection** | [**CollectionDTO**](CollectionDTO.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.add_to_collection_event_dto import AddToCollectionEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of AddToCollectionEventDTO from a JSON string +add_to_collection_event_dto_instance = AddToCollectionEventDTO.from_json(json) +# print the JSON string representation of the object +print(AddToCollectionEventDTO.to_json()) + +# convert the object into a dict +add_to_collection_event_dto_dict = add_to_collection_event_dto_instance.to_dict() +# create an instance of AddToCollectionEventDTO from a dict +add_to_collection_event_dto_from_dict = AddToCollectionEventDTO.from_dict(add_to_collection_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Admin.md b/edu_sharing_openapi/docs/Admin.md new file mode 100644 index 00000000..732fa65d --- /dev/null +++ b/edu_sharing_openapi/docs/Admin.md @@ -0,0 +1,30 @@ +# Admin + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**statistics** | [**Statistics**](Statistics.md) | | [optional] +**editor_type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.admin import Admin + +# TODO update the JSON string below +json = "{}" +# create an instance of Admin from a JSON string +admin_instance = Admin.from_json(json) +# print the JSON string representation of the object +print(Admin.to_json()) + +# convert the object into a dict +admin_dict = admin_instance.to_dict() +# create an instance of Admin from a dict +admin_from_dict = Admin.from_dict(admin_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AdminStatistics.md b/edu_sharing_openapi/docs/AdminStatistics.md new file mode 100644 index 00000000..4ef3641b --- /dev/null +++ b/edu_sharing_openapi/docs/AdminStatistics.md @@ -0,0 +1,34 @@ +# AdminStatistics + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**active_sessions** | **int** | | [optional] +**number_of_previews** | **int** | | [optional] +**max_memory** | **int** | | [optional] +**allocated_memory** | **int** | | [optional] +**preview_cache_size** | **int** | | [optional] +**active_locks** | [**List[Node]**](Node.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.admin_statistics import AdminStatistics + +# TODO update the JSON string below +json = "{}" +# create an instance of AdminStatistics from a JSON string +admin_statistics_instance = AdminStatistics.from_json(json) +# print the JSON string representation of the object +print(AdminStatistics.to_json()) + +# convert the object into a dict +admin_statistics_dict = admin_statistics_instance.to_dict() +# create an instance of AdminStatistics from a dict +admin_statistics_from_dict = AdminStatistics.from_dict(admin_statistics_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Application.md b/edu_sharing_openapi/docs/Application.md new file mode 100644 index 00000000..0f8a66e0 --- /dev/null +++ b/edu_sharing_openapi/docs/Application.md @@ -0,0 +1,39 @@ +# Application + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**title** | **str** | | [optional] +**webserver_url** | **str** | | [optional] +**client_base_url** | **str** | | [optional] +**type** | **str** | | [optional] +**subtype** | **str** | | [optional] +**repository_type** | **str** | | [optional] +**xml** | **str** | | [optional] +**file** | **str** | | [optional] +**content_url** | **str** | | [optional] +**config_url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.application import Application + +# TODO update the JSON string below +json = "{}" +# create an instance of Application from a JSON string +application_instance = Application.from_json(json) +# print the JSON string representation of the object +print(Application.to_json()) + +# convert the object into a dict +application_dict = application_instance.to_dict() +# create an instance of Application from a dict +application_from_dict = Application.from_dict(application_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Audience.md b/edu_sharing_openapi/docs/Audience.md new file mode 100644 index 00000000..542ec462 --- /dev/null +++ b/edu_sharing_openapi/docs/Audience.md @@ -0,0 +1,29 @@ +# Audience + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.audience import Audience + +# TODO update the JSON string below +json = "{}" +# create an instance of Audience from a JSON string +audience_instance = Audience.from_json(json) +# print the JSON string representation of the object +print(Audience.to_json()) + +# convert the object into a dict +audience_dict = audience_instance.to_dict() +# create an instance of Audience from a dict +audience_from_dict = Audience.from_dict(audience_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AuthenticationToken.md b/edu_sharing_openapi/docs/AuthenticationToken.md new file mode 100644 index 00000000..a2c36ad1 --- /dev/null +++ b/edu_sharing_openapi/docs/AuthenticationToken.md @@ -0,0 +1,30 @@ +# AuthenticationToken + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**user_id** | **str** | | [optional] +**ticket** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.authentication_token import AuthenticationToken + +# TODO update the JSON string below +json = "{}" +# create an instance of AuthenticationToken from a JSON string +authentication_token_instance = AuthenticationToken.from_json(json) +# print the JSON string representation of the object +print(AuthenticationToken.to_json()) + +# convert the object into a dict +authentication_token_dict = authentication_token_instance.to_dict() +# create an instance of AuthenticationToken from a dict +authentication_token_from_dict = AuthenticationToken.from_dict(authentication_token_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Authority.md b/edu_sharing_openapi/docs/Authority.md new file mode 100644 index 00000000..04e5087b --- /dev/null +++ b/edu_sharing_openapi/docs/Authority.md @@ -0,0 +1,32 @@ +# Authority + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**editable** | **bool** | | [optional] +**authority_name** | **str** | | +**authority_type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.authority import Authority + +# TODO update the JSON string below +json = "{}" +# create an instance of Authority from a JSON string +authority_instance = Authority.from_json(json) +# print the JSON string representation of the object +print(Authority.to_json()) + +# convert the object into a dict +authority_dict = authority_instance.to_dict() +# create an instance of Authority from a dict +authority_from_dict = Authority.from_dict(authority_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AuthorityEntries.md b/edu_sharing_openapi/docs/AuthorityEntries.md new file mode 100644 index 00000000..69d43dde --- /dev/null +++ b/edu_sharing_openapi/docs/AuthorityEntries.md @@ -0,0 +1,30 @@ +# AuthorityEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**authorities** | [**List[Authority]**](Authority.md) | | +**pagination** | [**Pagination**](Pagination.md) | | + +## Example + +```python +from edu_sharing_client.models.authority_entries import AuthorityEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of AuthorityEntries from a JSON string +authority_entries_instance = AuthorityEntries.from_json(json) +# print the JSON string representation of the object +print(AuthorityEntries.to_json()) + +# convert the object into a dict +authority_entries_dict = authority_entries_instance.to_dict() +# create an instance of AuthorityEntries from a dict +authority_entries_from_dict = AuthorityEntries.from_dict(authority_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/AvailableMds.md b/edu_sharing_openapi/docs/AvailableMds.md new file mode 100644 index 00000000..11f2f820 --- /dev/null +++ b/edu_sharing_openapi/docs/AvailableMds.md @@ -0,0 +1,30 @@ +# AvailableMds + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repository** | **str** | | [optional] +**mds** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.available_mds import AvailableMds + +# TODO update the JSON string below +json = "{}" +# create an instance of AvailableMds from a JSON string +available_mds_instance = AvailableMds.from_json(json) +# print the JSON string representation of the object +print(AvailableMds.to_json()) + +# convert the object into a dict +available_mds_dict = available_mds_instance.to_dict() +# create an instance of AvailableMds from a dict +available_mds_from_dict = AvailableMds.from_dict(available_mds_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/BULKV1Api.md b/edu_sharing_openapi/docs/BULKV1Api.md new file mode 100644 index 00000000..1b21162d --- /dev/null +++ b/edu_sharing_openapi/docs/BULKV1Api.md @@ -0,0 +1,172 @@ +# edu_sharing_client.BULKV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**find**](BULKV1Api.md#find) | **POST** /bulk/v1/find | gets a given node +[**sync**](BULKV1Api.md#sync) | **PUT** /bulk/v1/sync/{group} | Create or update a given node + + +# **find** +> NodeEntry find(request_body, resolve_node=resolve_node) + +gets a given node + +Get a given node based on the posted, multiple criteria. Make sure that they'll provide an unique result + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.BULKV1Api(api_client) + request_body = None # Dict[str, List[str]] | properties that must match (with \"AND\" concatenated) + resolve_node = True # bool | Return the full node. If you don't need the data, set to false to only return the id (will improve performance) (optional) (default to True) + + try: + # gets a given node + api_response = api_instance.find(request_body, resolve_node=resolve_node) + print("The response of BULKV1Api->find:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling BULKV1Api->find: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **request_body** | [**Dict[str, List[str]]**](List.md)| properties that must match (with \"AND\" concatenated) | + **resolve_node** | **bool**| Return the full node. If you don't need the data, set to false to only return the id (will improve performance) | [optional] [default to True] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **sync** +> NodeEntry sync(group, match, type, request_body, group_by=group_by, aspects=aspects, resolve_node=resolve_node, reset_version=reset_version) + +Create or update a given node + +Depending on the given \"match\" properties either a new node will be created or the existing one will be updated + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.BULKV1Api(api_client) + group = 'group_example' # str | The group to which this node belongs to. Used for internal structuring. Please use simple names only + match = ['match_example'] # List[str] | The properties that must match to identify if this node exists. Multiple properties will be and combined and compared + type = 'type_example' # str | type of node. If the node already exists, this will not change the type afterwards + request_body = None # Dict[str, List[str]] | properties, they'll not get filtered via mds, so be careful what you add here + group_by = ['group_by_example'] # List[str] | The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) (optional) + aspects = ['aspects_example'] # List[str] | aspects of node (optional) + resolve_node = True # bool | Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance) (optional) (default to True) + reset_version = True # bool | reset all versions (like a complete reimport), all data inside edu-sharing will be lost (optional) + + try: + # Create or update a given node + api_response = api_instance.sync(group, match, type, request_body, group_by=group_by, aspects=aspects, resolve_node=resolve_node, reset_version=reset_version) + print("The response of BULKV1Api->sync:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling BULKV1Api->sync: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **group** | **str**| The group to which this node belongs to. Used for internal structuring. Please use simple names only | + **match** | [**List[str]**](str.md)| The properties that must match to identify if this node exists. Multiple properties will be and combined and compared | + **type** | **str**| type of node. If the node already exists, this will not change the type afterwards | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties, they'll not get filtered via mds, so be careful what you add here | + **group_by** | [**List[str]**](str.md)| The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) | [optional] + **aspects** | [**List[str]**](str.md)| aspects of node | [optional] + **resolve_node** | **bool**| Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance) | [optional] [default to True] + **reset_version** | **bool**| reset all versions (like a complete reimport), all data inside edu-sharing will be lost | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Banner.md b/edu_sharing_openapi/docs/Banner.md new file mode 100644 index 00000000..ab510896 --- /dev/null +++ b/edu_sharing_openapi/docs/Banner.md @@ -0,0 +1,31 @@ +# Banner + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] +**href** | **str** | | [optional] +**components** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.banner import Banner + +# TODO update the JSON string below +json = "{}" +# create an instance of Banner from a JSON string +banner_instance = Banner.from_json(json) +# print the JSON string representation of the object +print(Banner.to_json()) + +# convert the object into a dict +banner_dict = banner_instance.to_dict() +# create an instance of Banner from a dict +banner_from_dict = Banner.from_dict(banner_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CLIENTUTILSV1Api.md b/edu_sharing_openapi/docs/CLIENTUTILSV1Api.md new file mode 100644 index 00000000..304854a4 --- /dev/null +++ b/edu_sharing_openapi/docs/CLIENTUTILSV1Api.md @@ -0,0 +1,80 @@ +# edu_sharing_client.CLIENTUTILSV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_website_information**](CLIENTUTILSV1Api.md#get_website_information) | **GET** /clientUtils/v1/getWebsiteInformation | Read generic information about a webpage + + +# **get_website_information** +> WebsiteInformation get_website_information(url=url) + +Read generic information about a webpage + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.website_information import WebsiteInformation +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CLIENTUTILSV1Api(api_client) + url = 'url_example' # str | full url with http or https (optional) + + try: + # Read generic information about a webpage + api_response = api_instance.get_website_information(url=url) + print("The response of CLIENTUTILSV1Api->get_website_information:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CLIENTUTILSV1Api->get_website_information: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **url** | **str**| full url with http or https | [optional] + +### Return type + +[**WebsiteInformation**](WebsiteInformation.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/COLLECTIONV1Api.md b/edu_sharing_openapi/docs/COLLECTIONV1Api.md new file mode 100644 index 00000000..81911268 --- /dev/null +++ b/edu_sharing_openapi/docs/COLLECTIONV1Api.md @@ -0,0 +1,1191 @@ +# edu_sharing_client.COLLECTIONV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_to_collection**](COLLECTIONV1Api.md#add_to_collection) | **PUT** /collection/v1/collections/{repository}/{collection}/references/{node} | Add a node to a collection. +[**change_icon_of_collection**](COLLECTIONV1Api.md#change_icon_of_collection) | **POST** /collection/v1/collections/{repository}/{collection}/icon | Writes Preview Image of a collection. +[**create_collection**](COLLECTIONV1Api.md#create_collection) | **POST** /collection/v1/collections/{repository}/{collection}/children | Create a new collection. +[**delete_collection**](COLLECTIONV1Api.md#delete_collection) | **DELETE** /collection/v1/collections/{repository}/{collection} | Delete a collection. +[**delete_from_collection**](COLLECTIONV1Api.md#delete_from_collection) | **DELETE** /collection/v1/collections/{repository}/{collection}/references/{node} | Delete a node from a collection. +[**get_collection**](COLLECTIONV1Api.md#get_collection) | **GET** /collection/v1/collections/{repository}/{collectionId} | Get a collection. +[**get_collections_containing_proposals**](COLLECTIONV1Api.md#get_collections_containing_proposals) | **GET** /collection/v1/collections/{repository}/children/proposals/collections | Get all collections containing proposals with a given state (via search index) +[**get_collections_proposals**](COLLECTIONV1Api.md#get_collections_proposals) | **GET** /collection/v1/collections/{repository}/{collection}/children/proposals | Get proposed objects for collection (requires edit permissions on collection). +[**get_collections_references**](COLLECTIONV1Api.md#get_collections_references) | **GET** /collection/v1/collections/{repository}/{collection}/children/references | Get references objects for collection. +[**get_collections_subcollections**](COLLECTIONV1Api.md#get_collections_subcollections) | **GET** /collection/v1/collections/{repository}/{collection}/children/collections | Get child collections for collection (or root). +[**remove_icon_of_collection**](COLLECTIONV1Api.md#remove_icon_of_collection) | **DELETE** /collection/v1/collections/{repository}/{collection}/icon | Deletes Preview Image of a collection. +[**search_collections**](COLLECTIONV1Api.md#search_collections) | **GET** /collection/v1/collections/{repository}/search | Search collections. +[**set_collection_order**](COLLECTIONV1Api.md#set_collection_order) | **POST** /collection/v1/collections/{repository}/{collection}/order | Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection +[**set_pinned_collections**](COLLECTIONV1Api.md#set_pinned_collections) | **POST** /collection/v1/collections/{repository}/pinning | Set pinned collections. +[**update_collection**](COLLECTIONV1Api.md#update_collection) | **PUT** /collection/v1/collections/{repository}/{collection} | Update a collection. + + +# **add_to_collection** +> NodeEntry add_to_collection(repository, collection, node, source_repo=source_repo, allow_duplicate=allow_duplicate, as_proposal=as_proposal) + +Add a node to a collection. + +Add a node to a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + node = 'node_example' # str | ID of node + source_repo = 'source_repo_example' # str | ID of source repository (optional) + allow_duplicate = False # bool | Allow that a node that already is inside the collection can be added again (optional) (default to False) + as_proposal = False # bool | Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions (optional) (default to False) + + try: + # Add a node to a collection. + api_response = api_instance.add_to_collection(repository, collection, node, source_repo=source_repo, allow_duplicate=allow_duplicate, as_proposal=as_proposal) + print("The response of COLLECTIONV1Api->add_to_collection:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->add_to_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + **node** | **str**| ID of node | + **source_repo** | **str**| ID of source repository | [optional] + **allow_duplicate** | **bool**| Allow that a node that already is inside the collection can be added again | [optional] [default to False] + **as_proposal** | **bool**| Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions | [optional] [default to False] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_icon_of_collection** +> CollectionEntry change_icon_of_collection(repository, collection, mimetype, file=file) + +Writes Preview Image of a collection. + +Writes Preview Image of a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collection_entry import CollectionEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + mimetype = 'mimetype_example' # str | MIME-Type + file = None # object | (optional) + + try: + # Writes Preview Image of a collection. + api_response = api_instance.change_icon_of_collection(repository, collection, mimetype, file=file) + print("The response of COLLECTIONV1Api->change_icon_of_collection:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->change_icon_of_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + **mimetype** | **str**| MIME-Type | + **file** | [**object**](object.md)| | [optional] + +### Return type + +[**CollectionEntry**](CollectionEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_collection** +> CollectionEntry create_collection(repository, collection, node) + +Create a new collection. + +Create a new collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collection_entry import CollectionEntry +from edu_sharing_client.models.node import Node +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of parent collection (or \"-root-\" for level0 collections) + node = edu_sharing_client.Node() # Node | collection + + try: + # Create a new collection. + api_response = api_instance.create_collection(repository, collection, node) + print("The response of COLLECTIONV1Api->create_collection:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->create_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of parent collection (or \"-root-\" for level0 collections) | + **node** | [**Node**](Node.md)| collection | + +### Return type + +[**CollectionEntry**](CollectionEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_collection** +> delete_collection(repository, collection) + +Delete a collection. + +Delete a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + + try: + # Delete a collection. + api_instance.delete_collection(repository, collection) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->delete_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_from_collection** +> delete_from_collection(repository, collection, node) + +Delete a node from a collection. + +Delete a node from a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + node = 'node_example' # str | ID of node + + try: + # Delete a node from a collection. + api_instance.delete_from_collection(repository, collection, node) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->delete_from_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + **node** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_collection** +> CollectionEntry get_collection(repository, collection_id, track=track) + +Get a collection. + +Get a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collection_entry import CollectionEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection_id = 'collection_id_example' # str | ID of collection + track = True # bool | track this as a view of the collection (default: true) (optional) + + try: + # Get a collection. + api_response = api_instance.get_collection(repository, collection_id, track=track) + print("The response of COLLECTIONV1Api->get_collection:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->get_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection_id** | **str**| ID of collection | + **track** | **bool**| track this as a view of the collection (default: true) | [optional] + +### Return type + +[**CollectionEntry**](CollectionEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_collections_containing_proposals** +> CollectionProposalEntries get_collections_containing_proposals(repository, status=status, fetch_counts=fetch_counts, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Get all collections containing proposals with a given state (via search index) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collection_proposal_entries import CollectionProposalEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + status = PENDING # str | status of the proposals to search for (optional) (default to PENDING) + fetch_counts = True # bool | fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data (optional) (default to True) + max_items = 50 # int | maximum items per page (optional) (default to 50) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Get all collections containing proposals with a given state (via search index) + api_response = api_instance.get_collections_containing_proposals(repository, status=status, fetch_counts=fetch_counts, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of COLLECTIONV1Api->get_collections_containing_proposals:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->get_collections_containing_proposals: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **status** | **str**| status of the proposals to search for | [optional] [default to PENDING] + **fetch_counts** | **bool**| fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data | [optional] [default to True] + **max_items** | **int**| maximum items per page | [optional] [default to 50] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**CollectionProposalEntries**](CollectionProposalEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_collections_proposals** +> AbstractEntries get_collections_proposals(repository, collection, status) + +Get proposed objects for collection (requires edit permissions on collection). + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.abstract_entries import AbstractEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of parent collection + status = 'status_example' # str | Only show elements with given status + + try: + # Get proposed objects for collection (requires edit permissions on collection). + api_response = api_instance.get_collections_proposals(repository, collection, status) + print("The response of COLLECTIONV1Api->get_collections_proposals:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->get_collections_proposals: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of parent collection | + **status** | **str**| Only show elements with given status | + +### Return type + +[**AbstractEntries**](AbstractEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_collections_references** +> ReferenceEntries get_collections_references(repository, collection, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Get references objects for collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.reference_entries import ReferenceEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of parent collection + max_items = 500 # int | maximum items per page (optional) (default to 500) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get references objects for collection. + api_response = api_instance.get_collections_references(repository, collection, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of COLLECTIONV1Api->get_collections_references:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->get_collections_references: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of parent collection | + **max_items** | **int**| maximum items per page | [optional] [default to 500] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**ReferenceEntries**](ReferenceEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_collections_subcollections** +> CollectionEntries get_collections_subcollections(repository, collection, scope, fetch_counts=fetch_counts, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Get child collections for collection (or root). + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collection_entries import CollectionEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of parent collection (or \"-root-\" for level0 collections) + scope = MY # str | scope (only relevant if parent == -root-) (default to MY) + fetch_counts = True # bool | fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data (optional) (default to True) + max_items = 500 # int | maximum items per page (optional) (default to 500) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get child collections for collection (or root). + api_response = api_instance.get_collections_subcollections(repository, collection, scope, fetch_counts=fetch_counts, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of COLLECTIONV1Api->get_collections_subcollections:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->get_collections_subcollections: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of parent collection (or \"-root-\" for level0 collections) | + **scope** | **str**| scope (only relevant if parent == -root-) | [default to MY] + **fetch_counts** | **bool**| fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data | [optional] [default to True] + **max_items** | **int**| maximum items per page | [optional] [default to 500] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**CollectionEntries**](CollectionEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_icon_of_collection** +> remove_icon_of_collection(repository, collection) + +Deletes Preview Image of a collection. + +Deletes Preview Image of a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + + try: + # Deletes Preview Image of a collection. + api_instance.remove_icon_of_collection(repository, collection) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->remove_icon_of_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_collections** +> CollectionEntries search_collections(repository, query, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Search collections. + +Search collections. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.collection_entries import CollectionEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + query = 'query_example' # str | query string + max_items = 500 # int | maximum items per page (optional) (default to 500) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Search collections. + api_response = api_instance.search_collections(repository, query, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of COLLECTIONV1Api->search_collections:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->search_collections: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **query** | **str**| query string | + **max_items** | **int**| maximum items per page | [optional] [default to 500] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**CollectionEntries**](CollectionEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_collection_order** +> set_collection_order(repository, collection, request_body=request_body) + +Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection + +Current order will be overriden. Requires full permissions for the parent collection + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + request_body = ['request_body_example'] # List[str] | List of nodes in the order to be saved. If empty, custom order of the collection will be disabled (optional) + + try: + # Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection + api_instance.set_collection_order(repository, collection, request_body=request_body) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->set_collection_order: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + **request_body** | [**List[str]**](str.md)| List of nodes in the order to be saved. If empty, custom order of the collection will be disabled | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_pinned_collections** +> set_pinned_collections(repository, request_body) + +Set pinned collections. + +Remove all currently pinned collections and set them in the order send. Requires TOOLPERMISSION_COLLECTION_PINNING + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + request_body = ['request_body_example'] # List[str] | List of collections that should be pinned + + try: + # Set pinned collections. + api_instance.set_pinned_collections(repository, request_body) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->set_pinned_collections: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **request_body** | [**List[str]**](str.md)| List of collections that should be pinned | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_collection** +> update_collection(repository, collection, node) + +Update a collection. + +Update a collection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node import Node +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COLLECTIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + collection = 'collection_example' # str | ID of collection + node = edu_sharing_client.Node() # Node | collection node + + try: + # Update a collection. + api_instance.update_collection(repository, collection, node) + except Exception as e: + print("Exception when calling COLLECTIONV1Api->update_collection: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **collection** | **str**| ID of collection | + **node** | [**Node**](Node.md)| collection node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/COMMENTV1Api.md b/edu_sharing_openapi/docs/COMMENTV1Api.md new file mode 100644 index 00000000..513bdce1 --- /dev/null +++ b/edu_sharing_openapi/docs/COMMENTV1Api.md @@ -0,0 +1,309 @@ +# edu_sharing_client.COMMENTV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_comment**](COMMENTV1Api.md#add_comment) | **PUT** /comment/v1/comments/{repository}/{node} | create a new comment +[**delete_comment**](COMMENTV1Api.md#delete_comment) | **DELETE** /comment/v1/comments/{repository}/{comment} | delete a comment +[**edit_comment**](COMMENTV1Api.md#edit_comment) | **POST** /comment/v1/comments/{repository}/{comment} | edit a comment +[**get_comments**](COMMENTV1Api.md#get_comments) | **GET** /comment/v1/comments/{repository}/{node} | list comments + + +# **add_comment** +> add_comment(repository, node, body, comment_reference=comment_reference) + +create a new comment + +Adds a comment to the given node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COMMENTV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + body = 'body_example' # str | Text content of comment + comment_reference = 'comment_reference_example' # str | In reply to an other comment, can be null (optional) + + try: + # create a new comment + api_instance.add_comment(repository, node, body, comment_reference=comment_reference) + except Exception as e: + print("Exception when calling COMMENTV1Api->add_comment: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **body** | **str**| Text content of comment | + **comment_reference** | **str**| In reply to an other comment, can be null | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_comment** +> delete_comment(repository, comment) + +delete a comment + +Delete the comment with the given id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COMMENTV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + comment = 'comment_example' # str | id of the comment to delete + + try: + # delete a comment + api_instance.delete_comment(repository, comment) + except Exception as e: + print("Exception when calling COMMENTV1Api->delete_comment: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **comment** | **str**| id of the comment to delete | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **edit_comment** +> edit_comment(repository, comment, body) + +edit a comment + +Edit the comment with the given id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COMMENTV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + comment = 'comment_example' # str | id of the comment to edit + body = 'body_example' # str | Text content of comment + + try: + # edit a comment + api_instance.edit_comment(repository, comment, body) + except Exception as e: + print("Exception when calling COMMENTV1Api->edit_comment: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **comment** | **str**| id of the comment to edit | + **body** | **str**| Text content of comment | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_comments** +> Comments get_comments(repository, node) + +list comments + +List all comments + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.comments import Comments +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.COMMENTV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # list comments + api_response = api_instance.get_comments(repository, node) + print("The response of COMMENTV1Api->get_comments:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling COMMENTV1Api->get_comments: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**Comments**](Comments.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/CONFIGV1Api.md b/edu_sharing_openapi/docs/CONFIGV1Api.md new file mode 100644 index 00000000..1b176541 --- /dev/null +++ b/edu_sharing_openapi/docs/CONFIGV1Api.md @@ -0,0 +1,435 @@ +# edu_sharing_client.CONFIGV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_config1**](CONFIGV1Api.md#get_config1) | **GET** /config/v1/values | get repository config values +[**get_dynamic_value**](CONFIGV1Api.md#get_dynamic_value) | **GET** /config/v1/dynamic/{key} | Get a config entry (appropriate rights for the entry are required) +[**get_language**](CONFIGV1Api.md#get_language) | **GET** /config/v1/language | get override strings for the current language +[**get_language_defaults**](CONFIGV1Api.md#get_language_defaults) | **GET** /config/v1/language/defaults | get all inital language strings for angular +[**get_variables**](CONFIGV1Api.md#get_variables) | **GET** /config/v1/variables | get global config variables +[**set_dynamic_value**](CONFIGV1Api.md#set_dynamic_value) | **POST** /config/v1/dynamic/{key} | Set a config entry (admin rights required) + + +# **get_config1** +> Config get_config1() + +get repository config values + +Current is the actual (context-based) active config. Global is the default global config if no context is active (may be identical to the current) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.config import Config +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONFIGV1Api(api_client) + + try: + # get repository config values + api_response = api_instance.get_config1() + print("The response of CONFIGV1Api->get_config1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONFIGV1Api->get_config1: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**Config**](Config.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_dynamic_value** +> DynamicConfig get_dynamic_value(key) + +Get a config entry (appropriate rights for the entry are required) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.dynamic_config import DynamicConfig +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONFIGV1Api(api_client) + key = 'key_example' # str | Key of the config value that should be fetched + + try: + # Get a config entry (appropriate rights for the entry are required) + api_response = api_instance.get_dynamic_value(key) + print("The response of CONFIGV1Api->get_dynamic_value:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONFIGV1Api->get_dynamic_value: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **key** | **str**| Key of the config value that should be fetched | + +### Return type + +[**DynamicConfig**](DynamicConfig.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_language** +> Language get_language() + +get override strings for the current language + +Language strings + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.language import Language +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONFIGV1Api(api_client) + + try: + # get override strings for the current language + api_response = api_instance.get_language() + print("The response of CONFIGV1Api->get_language:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONFIGV1Api->get_language: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**Language**](Language.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_language_defaults** +> str get_language_defaults() + +get all inital language strings for angular + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONFIGV1Api(api_client) + + try: + # get all inital language strings for angular + api_response = api_instance.get_language_defaults() + print("The response of CONFIGV1Api->get_language_defaults:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONFIGV1Api->get_language_defaults: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_variables** +> Variables get_variables() + +get global config variables + +global config variables + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.variables import Variables +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONFIGV1Api(api_client) + + try: + # get global config variables + api_response = api_instance.get_variables() + print("The response of CONFIGV1Api->get_variables:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONFIGV1Api->get_variables: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**Variables**](Variables.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_dynamic_value** +> DynamicConfig set_dynamic_value(key, public, body) + +Set a config entry (admin rights required) + +the body must be a json encapsulated string + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.dynamic_config import DynamicConfig +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONFIGV1Api(api_client) + key = 'key_example' # str | Key of the config value that should be fetched + public = True # bool | Is everyone allowed to read the value + body = 'body_example' # str | Must be a json-encapsulated string + + try: + # Set a config entry (admin rights required) + api_response = api_instance.set_dynamic_value(key, public, body) + print("The response of CONFIGV1Api->set_dynamic_value:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONFIGV1Api->set_dynamic_value: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **key** | **str**| Key of the config value that should be fetched | + **public** | **bool**| Is everyone allowed to read the value | + **body** | **str**| Must be a json-encapsulated string | + +### Return type + +[**DynamicConfig**](DynamicConfig.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/CONNECTORV1Api.md b/edu_sharing_openapi/docs/CONNECTORV1Api.md new file mode 100644 index 00000000..5b9283d3 --- /dev/null +++ b/edu_sharing_openapi/docs/CONNECTORV1Api.md @@ -0,0 +1,80 @@ +# edu_sharing_client.CONNECTORV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**list_connectors**](CONNECTORV1Api.md#list_connectors) | **GET** /connector/v1/connectors/{repository}/list | List all available connectors + + +# **list_connectors** +> ConnectorList list_connectors(repository) + +List all available connectors + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.connector_list import ConnectorList +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.CONNECTORV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + + try: + # List all available connectors + api_response = api_instance.list_connectors(repository) + print("The response of CONNECTORV1Api->list_connectors:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling CONNECTORV1Api->list_connectors: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + +### Return type + +[**ConnectorList**](ConnectorList.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/CacheCluster.md b/edu_sharing_openapi/docs/CacheCluster.md new file mode 100644 index 00000000..5144f1e4 --- /dev/null +++ b/edu_sharing_openapi/docs/CacheCluster.md @@ -0,0 +1,37 @@ +# CacheCluster + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**instances** | [**List[CacheMember]**](CacheMember.md) | | [optional] +**cache_infos** | [**List[CacheInfo]**](CacheInfo.md) | | [optional] +**local_member** | **str** | | [optional] +**free_memory** | **int** | | [optional] +**total_memory** | **int** | | [optional] +**max_memory** | **int** | | [optional] +**available_processors** | **int** | | [optional] +**time_stamp** | **datetime** | | [optional] +**group_name** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.cache_cluster import CacheCluster + +# TODO update the JSON string below +json = "{}" +# create an instance of CacheCluster from a JSON string +cache_cluster_instance = CacheCluster.from_json(json) +# print the JSON string representation of the object +print(CacheCluster.to_json()) + +# convert the object into a dict +cache_cluster_dict = cache_cluster_instance.to_dict() +# create an instance of CacheCluster from a dict +cache_cluster_from_dict = CacheCluster.from_dict(cache_cluster_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CacheInfo.md b/edu_sharing_openapi/docs/CacheInfo.md new file mode 100644 index 00000000..760a40bb --- /dev/null +++ b/edu_sharing_openapi/docs/CacheInfo.md @@ -0,0 +1,41 @@ +# CacheInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**size** | **int** | | [optional] +**statistic_hits** | **int** | | [optional] +**name** | **str** | | [optional] +**backup_count** | **int** | | [optional] +**backup_entry_count** | **int** | | [optional] +**backup_entry_memory_cost** | **int** | | [optional] +**heap_cost** | **int** | | [optional] +**owned_entry_count** | **int** | | [optional] +**get_owned_entry_memory_cost** | **int** | | [optional] +**size_in_memory** | **int** | | [optional] +**member** | **str** | | [optional] +**group_name** | **str** | | [optional] +**max_size** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.cache_info import CacheInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of CacheInfo from a JSON string +cache_info_instance = CacheInfo.from_json(json) +# print the JSON string representation of the object +print(CacheInfo.to_json()) + +# convert the object into a dict +cache_info_dict = cache_info_instance.to_dict() +# create an instance of CacheInfo from a dict +cache_info_from_dict = CacheInfo.from_dict(cache_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CacheMember.md b/edu_sharing_openapi/docs/CacheMember.md new file mode 100644 index 00000000..c44ac9ef --- /dev/null +++ b/edu_sharing_openapi/docs/CacheMember.md @@ -0,0 +1,29 @@ +# CacheMember + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.cache_member import CacheMember + +# TODO update the JSON string below +json = "{}" +# create an instance of CacheMember from a JSON string +cache_member_instance = CacheMember.from_json(json) +# print the JSON string representation of the object +print(CacheMember.to_json()) + +# convert the object into a dict +cache_member_dict = cache_member_instance.to_dict() +# create an instance of CacheMember from a dict +cache_member_from_dict = CacheMember.from_dict(cache_member_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Catalog.md b/edu_sharing_openapi/docs/Catalog.md new file mode 100644 index 00000000..504bb726 --- /dev/null +++ b/edu_sharing_openapi/docs/Catalog.md @@ -0,0 +1,30 @@ +# Catalog + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.catalog import Catalog + +# TODO update the JSON string below +json = "{}" +# create an instance of Catalog from a JSON string +catalog_instance = Catalog.from_json(json) +# print the JSON string representation of the object +print(Catalog.to_json()) + +# convert the object into a dict +catalog_dict = catalog_instance.to_dict() +# create an instance of Catalog from a dict +catalog_from_dict = Catalog.from_dict(catalog_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Collection.md b/edu_sharing_openapi/docs/Collection.md new file mode 100644 index 00000000..a63e880a --- /dev/null +++ b/edu_sharing_openapi/docs/Collection.md @@ -0,0 +1,45 @@ +# Collection + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**scope** | **str** | | [optional] +**author_freetext** | **str** | | [optional] +**order_ascending** | **bool** | | [optional] +**level0** | **bool** | false | +**title** | **str** | | +**description** | **str** | | [optional] +**type** | **str** | | +**viewtype** | **str** | | +**order_mode** | **str** | | [optional] +**x** | **int** | | [optional] +**y** | **int** | | [optional] +**z** | **int** | | [optional] +**color** | **str** | | [optional] +**from_user** | **bool** | false | +**pinned** | **bool** | | [optional] +**child_collections_count** | **int** | | [optional] +**child_references_count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.collection import Collection + +# TODO update the JSON string below +json = "{}" +# create an instance of Collection from a JSON string +collection_instance = Collection.from_json(json) +# print the JSON string representation of the object +print(Collection.to_json()) + +# convert the object into a dict +collection_dict = collection_instance.to_dict() +# create an instance of Collection from a dict +collection_from_dict = Collection.from_dict(collection_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionCounts.md b/edu_sharing_openapi/docs/CollectionCounts.md new file mode 100644 index 00000000..3de424ef --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionCounts.md @@ -0,0 +1,30 @@ +# CollectionCounts + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**refs** | [**List[Element]**](Element.md) | | [optional] +**collections** | [**List[Element]**](Element.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.collection_counts import CollectionCounts + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionCounts from a JSON string +collection_counts_instance = CollectionCounts.from_json(json) +# print the JSON string representation of the object +print(CollectionCounts.to_json()) + +# convert the object into a dict +collection_counts_dict = collection_counts_instance.to_dict() +# create an instance of CollectionCounts from a dict +collection_counts_from_dict = CollectionCounts.from_dict(collection_counts_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionDTO.md b/edu_sharing_openapi/docs/CollectionDTO.md new file mode 100644 index 00000000..2fa1005b --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionDTO.md @@ -0,0 +1,31 @@ +# CollectionDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | **str** | | [optional] +**aspects** | **List[str]** | | [optional] +**properties** | **Dict[str, object]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.collection_dto import CollectionDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionDTO from a JSON string +collection_dto_instance = CollectionDTO.from_json(json) +# print the JSON string representation of the object +print(CollectionDTO.to_json()) + +# convert the object into a dict +collection_dto_dict = collection_dto_instance.to_dict() +# create an instance of CollectionDTO from a dict +collection_dto_from_dict = CollectionDTO.from_dict(collection_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionEntries.md b/edu_sharing_openapi/docs/CollectionEntries.md new file mode 100644 index 00000000..359c9359 --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionEntries.md @@ -0,0 +1,30 @@ +# CollectionEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pagination** | [**Pagination**](Pagination.md) | | [optional] +**collections** | [**List[Node]**](Node.md) | | + +## Example + +```python +from edu_sharing_client.models.collection_entries import CollectionEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionEntries from a JSON string +collection_entries_instance = CollectionEntries.from_json(json) +# print the JSON string representation of the object +print(CollectionEntries.to_json()) + +# convert the object into a dict +collection_entries_dict = collection_entries_instance.to_dict() +# create an instance of CollectionEntries from a dict +collection_entries_from_dict = CollectionEntries.from_dict(collection_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionEntry.md b/edu_sharing_openapi/docs/CollectionEntry.md new file mode 100644 index 00000000..292cd86c --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionEntry.md @@ -0,0 +1,29 @@ +# CollectionEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**collection** | [**Node**](Node.md) | | + +## Example + +```python +from edu_sharing_client.models.collection_entry import CollectionEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionEntry from a JSON string +collection_entry_instance = CollectionEntry.from_json(json) +# print the JSON string representation of the object +print(CollectionEntry.to_json()) + +# convert the object into a dict +collection_entry_dict = collection_entry_instance.to_dict() +# create an instance of CollectionEntry from a dict +collection_entry_from_dict = CollectionEntry.from_dict(collection_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionOptions.md b/edu_sharing_openapi/docs/CollectionOptions.md new file mode 100644 index 00000000..af40a615 --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionOptions.md @@ -0,0 +1,30 @@ +# CollectionOptions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**private_collections** | **str** | | [optional] +**public_collections** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.collection_options import CollectionOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionOptions from a JSON string +collection_options_instance = CollectionOptions.from_json(json) +# print the JSON string representation of the object +print(CollectionOptions.to_json()) + +# convert the object into a dict +collection_options_dict = collection_options_instance.to_dict() +# create an instance of CollectionOptions from a dict +collection_options_from_dict = CollectionOptions.from_dict(collection_options_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionProposalEntries.md b/edu_sharing_openapi/docs/CollectionProposalEntries.md new file mode 100644 index 00000000..5741b007 --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionProposalEntries.md @@ -0,0 +1,30 @@ +# CollectionProposalEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pagination** | [**Pagination**](Pagination.md) | | [optional] +**collections** | [**List[NodeCollectionProposalCount]**](NodeCollectionProposalCount.md) | | + +## Example + +```python +from edu_sharing_client.models.collection_proposal_entries import CollectionProposalEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionProposalEntries from a JSON string +collection_proposal_entries_instance = CollectionProposalEntries.from_json(json) +# print the JSON string representation of the object +print(CollectionProposalEntries.to_json()) + +# convert the object into a dict +collection_proposal_entries_dict = collection_proposal_entries_instance.to_dict() +# create an instance of CollectionProposalEntries from a dict +collection_proposal_entries_from_dict = CollectionProposalEntries.from_dict(collection_proposal_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionReference.md b/edu_sharing_openapi/docs/CollectionReference.md new file mode 100644 index 00000000..568c578a --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionReference.md @@ -0,0 +1,64 @@ +# CollectionReference + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_lti_deep_link** | [**NodeLTIDeepLink**](NodeLTIDeepLink.md) | | [optional] +**remote** | [**Remote**](Remote.md) | | [optional] +**content** | [**Content**](Content.md) | | [optional] +**license** | [**License**](License.md) | | [optional] +**is_directory** | **bool** | | [optional] +**comment_count** | **int** | | [optional] +**rating** | [**RatingDetails**](RatingDetails.md) | | [optional] +**used_in_collections** | [**List[Node]**](Node.md) | | [optional] +**relations** | [**Dict[str, Node]**](Node.md) | | [optional] +**contributors** | [**List[Contributor]**](Contributor.md) | | [optional] +**access_original** | **List[str]** | | [optional] +**original_restricted_access** | **bool** | | [optional] +**ref** | [**NodeRef**](NodeRef.md) | | +**parent** | [**NodeRef**](NodeRef.md) | | [optional] +**type** | **str** | | [optional] +**aspects** | **List[str]** | | [optional] +**name** | **str** | | +**title** | **str** | | [optional] +**metadataset** | **str** | | [optional] +**repository_type** | **str** | | [optional] +**created_at** | **datetime** | | +**created_by** | [**Person**](Person.md) | | +**modified_at** | **datetime** | | [optional] +**modified_by** | [**Person**](Person.md) | | [optional] +**access** | **List[str]** | | +**download_url** | **str** | | +**properties** | **Dict[str, List[str]]** | | [optional] +**mimetype** | **str** | | [optional] +**mediatype** | **str** | | [optional] +**size** | **str** | | [optional] +**preview** | [**Preview**](Preview.md) | | [optional] +**icon_url** | **str** | | [optional] +**collection** | [**Collection**](Collection.md) | | +**owner** | [**Person**](Person.md) | | +**original_id** | **str** | | [optional] +**is_public** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.collection_reference import CollectionReference + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionReference from a JSON string +collection_reference_instance = CollectionReference.from_json(json) +# print the JSON string representation of the object +print(CollectionReference.to_json()) + +# convert the object into a dict +collection_reference_dict = collection_reference_instance.to_dict() +# create an instance of CollectionReference from a dict +collection_reference_from_dict = CollectionReference.from_dict(collection_reference_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Collections.md b/edu_sharing_openapi/docs/Collections.md new file mode 100644 index 00000000..aa8a2092 --- /dev/null +++ b/edu_sharing_openapi/docs/Collections.md @@ -0,0 +1,29 @@ +# Collections + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**colors** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.collections import Collections + +# TODO update the JSON string below +json = "{}" +# create an instance of Collections from a JSON string +collections_instance = Collections.from_json(json) +# print the JSON string representation of the object +print(Collections.to_json()) + +# convert the object into a dict +collections_dict = collections_instance.to_dict() +# create an instance of Collections from a dict +collections_from_dict = Collections.from_dict(collections_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CollectionsResult.md b/edu_sharing_openapi/docs/CollectionsResult.md new file mode 100644 index 00000000..6591b2b4 --- /dev/null +++ b/edu_sharing_openapi/docs/CollectionsResult.md @@ -0,0 +1,29 @@ +# CollectionsResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.collections_result import CollectionsResult + +# TODO update the JSON string below +json = "{}" +# create an instance of CollectionsResult from a JSON string +collections_result_instance = CollectionsResult.from_json(json) +# print the JSON string representation of the object +print(CollectionsResult.to_json()) + +# convert the object into a dict +collections_result_dict = collections_result_instance.to_dict() +# create an instance of CollectionsResult from a dict +collections_result_from_dict = CollectionsResult.from_dict(collections_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Comment.md b/edu_sharing_openapi/docs/Comment.md new file mode 100644 index 00000000..d5bd3d6a --- /dev/null +++ b/edu_sharing_openapi/docs/Comment.md @@ -0,0 +1,33 @@ +# Comment + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ref** | [**NodeRef**](NodeRef.md) | | [optional] +**reply_to** | [**NodeRef**](NodeRef.md) | | [optional] +**creator** | [**UserSimple**](UserSimple.md) | | [optional] +**created** | **int** | | [optional] +**comment** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.comment import Comment + +# TODO update the JSON string below +json = "{}" +# create an instance of Comment from a JSON string +comment_instance = Comment.from_json(json) +# print the JSON string representation of the object +print(Comment.to_json()) + +# convert the object into a dict +comment_dict = comment_instance.to_dict() +# create an instance of Comment from a dict +comment_from_dict = Comment.from_dict(comment_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CommentEventDTO.md b/edu_sharing_openapi/docs/CommentEventDTO.md new file mode 100644 index 00000000..b0759c04 --- /dev/null +++ b/edu_sharing_openapi/docs/CommentEventDTO.md @@ -0,0 +1,32 @@ +# CommentEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**comment_content** | **str** | | [optional] +**comment_reference** | **str** | | [optional] +**event** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.comment_event_dto import CommentEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of CommentEventDTO from a JSON string +comment_event_dto_instance = CommentEventDTO.from_json(json) +# print the JSON string representation of the object +print(CommentEventDTO.to_json()) + +# convert the object into a dict +comment_event_dto_dict = comment_event_dto_instance.to_dict() +# create an instance of CommentEventDTO from a dict +comment_event_dto_from_dict = CommentEventDTO.from_dict(comment_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Comments.md b/edu_sharing_openapi/docs/Comments.md new file mode 100644 index 00000000..d956738f --- /dev/null +++ b/edu_sharing_openapi/docs/Comments.md @@ -0,0 +1,29 @@ +# Comments + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**comments** | [**List[Comment]**](Comment.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.comments import Comments + +# TODO update the JSON string below +json = "{}" +# create an instance of Comments from a JSON string +comments_instance = Comments.from_json(json) +# print the JSON string representation of the object +print(Comments.to_json()) + +# convert the object into a dict +comments_dict = comments_instance.to_dict() +# create an instance of Comments from a dict +comments_from_dict = Comments.from_dict(comments_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Condition.md b/edu_sharing_openapi/docs/Condition.md new file mode 100644 index 00000000..27c8d409 --- /dev/null +++ b/edu_sharing_openapi/docs/Condition.md @@ -0,0 +1,31 @@ +# Condition + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | **str** | | [optional] +**negate** | **bool** | | [optional] +**value** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.condition import Condition + +# TODO update the JSON string below +json = "{}" +# create an instance of Condition from a JSON string +condition_instance = Condition.from_json(json) +# print the JSON string representation of the object +print(Condition.to_json()) + +# convert the object into a dict +condition_dict = condition_instance.to_dict() +# create an instance of Condition from a dict +condition_from_dict = Condition.from_dict(condition_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Config.md b/edu_sharing_openapi/docs/Config.md new file mode 100644 index 00000000..c3394245 --- /dev/null +++ b/edu_sharing_openapi/docs/Config.md @@ -0,0 +1,31 @@ +# Config + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**current** | [**Values**](Values.md) | | [optional] +**var_global** | [**Values**](Values.md) | | [optional] +**language** | [**Language**](Language.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.config import Config + +# TODO update the JSON string below +json = "{}" +# create an instance of Config from a JSON string +config_instance = Config.from_json(json) +# print the JSON string representation of the object +print(Config.to_json()) + +# convert the object into a dict +config_dict = config_instance.to_dict() +# create an instance of Config from a dict +config_from_dict = Config.from_dict(config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigFrontpage.md b/edu_sharing_openapi/docs/ConfigFrontpage.md new file mode 100644 index 00000000..9d734847 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigFrontpage.md @@ -0,0 +1,29 @@ +# ConfigFrontpage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**enabled** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_frontpage import ConfigFrontpage + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigFrontpage from a JSON string +config_frontpage_instance = ConfigFrontpage.from_json(json) +# print the JSON string representation of the object +print(ConfigFrontpage.to_json()) + +# convert the object into a dict +config_frontpage_dict = config_frontpage_instance.to_dict() +# create an instance of ConfigFrontpage from a dict +config_frontpage_from_dict = ConfigFrontpage.from_dict(config_frontpage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigPrivacy.md b/edu_sharing_openapi/docs/ConfigPrivacy.md new file mode 100644 index 00000000..f92d0fb7 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigPrivacy.md @@ -0,0 +1,29 @@ +# ConfigPrivacy + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**cookie_disclaimer** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_privacy import ConfigPrivacy + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigPrivacy from a JSON string +config_privacy_instance = ConfigPrivacy.from_json(json) +# print the JSON string representation of the object +print(ConfigPrivacy.to_json()) + +# convert the object into a dict +config_privacy_dict = config_privacy_instance.to_dict() +# create an instance of ConfigPrivacy from a dict +config_privacy_from_dict = ConfigPrivacy.from_dict(config_privacy_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigPublish.md b/edu_sharing_openapi/docs/ConfigPublish.md new file mode 100644 index 00000000..0b3c9bb4 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigPublish.md @@ -0,0 +1,30 @@ +# ConfigPublish + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**license_mandatory** | **bool** | | [optional] +**author_mandatory** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_publish import ConfigPublish + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigPublish from a JSON string +config_publish_instance = ConfigPublish.from_json(json) +# print the JSON string representation of the object +print(ConfigPublish.to_json()) + +# convert the object into a dict +config_publish_dict = config_publish_instance.to_dict() +# create an instance of ConfigPublish from a dict +config_publish_from_dict = ConfigPublish.from_dict(config_publish_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigRating.md b/edu_sharing_openapi/docs/ConfigRating.md new file mode 100644 index 00000000..0bfd3868 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigRating.md @@ -0,0 +1,29 @@ +# ConfigRating + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**mode** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_rating import ConfigRating + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigRating from a JSON string +config_rating_instance = ConfigRating.from_json(json) +# print the JSON string representation of the object +print(ConfigRating.to_json()) + +# convert the object into a dict +config_rating_dict = config_rating_instance.to_dict() +# create an instance of ConfigRating from a dict +config_rating_from_dict = ConfigRating.from_dict(config_rating_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigRemote.md b/edu_sharing_openapi/docs/ConfigRemote.md new file mode 100644 index 00000000..877e6141 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigRemote.md @@ -0,0 +1,29 @@ +# ConfigRemote + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**rocketchat** | **object** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_remote import ConfigRemote + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigRemote from a JSON string +config_remote_instance = ConfigRemote.from_json(json) +# print the JSON string representation of the object +print(ConfigRemote.to_json()) + +# convert the object into a dict +config_remote_dict = config_remote_instance.to_dict() +# create an instance of ConfigRemote from a dict +config_remote_from_dict = ConfigRemote.from_dict(config_remote_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigThemeColor.md b/edu_sharing_openapi/docs/ConfigThemeColor.md new file mode 100644 index 00000000..a41ff431 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigThemeColor.md @@ -0,0 +1,30 @@ +# ConfigThemeColor + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**variable** | **str** | | [optional] +**value** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_theme_color import ConfigThemeColor + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigThemeColor from a JSON string +config_theme_color_instance = ConfigThemeColor.from_json(json) +# print the JSON string representation of the object +print(ConfigThemeColor.to_json()) + +# convert the object into a dict +config_theme_color_dict = config_theme_color_instance.to_dict() +# create an instance of ConfigThemeColor from a dict +config_theme_color_from_dict = ConfigThemeColor.from_dict(config_theme_color_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigThemeColors.md b/edu_sharing_openapi/docs/ConfigThemeColors.md new file mode 100644 index 00000000..a1ad0d58 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigThemeColors.md @@ -0,0 +1,29 @@ +# ConfigThemeColors + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**color** | [**List[ConfigThemeColor]**](ConfigThemeColor.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_theme_colors import ConfigThemeColors + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigThemeColors from a JSON string +config_theme_colors_instance = ConfigThemeColors.from_json(json) +# print the JSON string representation of the object +print(ConfigThemeColors.to_json()) + +# convert the object into a dict +config_theme_colors_dict = config_theme_colors_instance.to_dict() +# create an instance of ConfigThemeColors from a dict +config_theme_colors_from_dict = ConfigThemeColors.from_dict(config_theme_colors_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigTutorial.md b/edu_sharing_openapi/docs/ConfigTutorial.md new file mode 100644 index 00000000..a052200d --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigTutorial.md @@ -0,0 +1,29 @@ +# ConfigTutorial + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**enabled** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_tutorial import ConfigTutorial + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigTutorial from a JSON string +config_tutorial_instance = ConfigTutorial.from_json(json) +# print the JSON string representation of the object +print(ConfigTutorial.to_json()) + +# convert the object into a dict +config_tutorial_dict = config_tutorial_instance.to_dict() +# create an instance of ConfigTutorial from a dict +config_tutorial_from_dict = ConfigTutorial.from_dict(config_tutorial_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigUpload.md b/edu_sharing_openapi/docs/ConfigUpload.md new file mode 100644 index 00000000..f263e150 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigUpload.md @@ -0,0 +1,29 @@ +# ConfigUpload + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**post_dialog** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_upload import ConfigUpload + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigUpload from a JSON string +config_upload_instance = ConfigUpload.from_json(json) +# print the JSON string representation of the object +print(ConfigUpload.to_json()) + +# convert the object into a dict +config_upload_dict = config_upload_instance.to_dict() +# create an instance of ConfigUpload from a dict +config_upload_from_dict = ConfigUpload.from_dict(config_upload_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigWorkflow.md b/edu_sharing_openapi/docs/ConfigWorkflow.md new file mode 100644 index 00000000..6f26c4a3 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigWorkflow.md @@ -0,0 +1,32 @@ +# ConfigWorkflow + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**default_receiver** | **str** | | [optional] +**default_status** | **str** | | [optional] +**comment_required** | **bool** | | [optional] +**workflows** | [**List[ConfigWorkflowList]**](ConfigWorkflowList.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_workflow import ConfigWorkflow + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigWorkflow from a JSON string +config_workflow_instance = ConfigWorkflow.from_json(json) +# print the JSON string representation of the object +print(ConfigWorkflow.to_json()) + +# convert the object into a dict +config_workflow_dict = config_workflow_instance.to_dict() +# create an instance of ConfigWorkflow from a dict +config_workflow_from_dict = ConfigWorkflow.from_dict(config_workflow_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConfigWorkflowList.md b/edu_sharing_openapi/docs/ConfigWorkflowList.md new file mode 100644 index 00000000..c270ad18 --- /dev/null +++ b/edu_sharing_openapi/docs/ConfigWorkflowList.md @@ -0,0 +1,32 @@ +# ConfigWorkflowList + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**color** | **str** | | [optional] +**has_receiver** | **bool** | | [optional] +**next** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.config_workflow_list import ConfigWorkflowList + +# TODO update the JSON string below +json = "{}" +# create an instance of ConfigWorkflowList from a JSON string +config_workflow_list_instance = ConfigWorkflowList.from_json(json) +# print the JSON string representation of the object +print(ConfigWorkflowList.to_json()) + +# convert the object into a dict +config_workflow_list_dict = config_workflow_list_instance.to_dict() +# create an instance of ConfigWorkflowList from a dict +config_workflow_list_from_dict = ConfigWorkflowList.from_dict(config_workflow_list_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Connector.md b/edu_sharing_openapi/docs/Connector.md new file mode 100644 index 00000000..bf6f0b3f --- /dev/null +++ b/edu_sharing_openapi/docs/Connector.md @@ -0,0 +1,35 @@ +# Connector + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**icon** | **str** | | [optional] +**show_new** | **bool** | false | +**parameters** | **List[str]** | | [optional] +**filetypes** | [**List[ConnectorFileType]**](ConnectorFileType.md) | | [optional] +**only_desktop** | **bool** | | [optional] +**has_view_mode** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.connector import Connector + +# TODO update the JSON string below +json = "{}" +# create an instance of Connector from a JSON string +connector_instance = Connector.from_json(json) +# print the JSON string representation of the object +print(Connector.to_json()) + +# convert the object into a dict +connector_dict = connector_instance.to_dict() +# create an instance of Connector from a dict +connector_from_dict = Connector.from_dict(connector_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConnectorFileType.md b/edu_sharing_openapi/docs/ConnectorFileType.md new file mode 100644 index 00000000..3250dd75 --- /dev/null +++ b/edu_sharing_openapi/docs/ConnectorFileType.md @@ -0,0 +1,36 @@ +# ConnectorFileType + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ccressourceversion** | **str** | | [optional] +**ccressourcetype** | **str** | | [optional] +**ccresourcesubtype** | **str** | | [optional] +**editor_type** | **str** | | [optional] +**mimetype** | **str** | | [optional] +**filetype** | **str** | | [optional] +**creatable** | **bool** | | [optional] +**editable** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.connector_file_type import ConnectorFileType + +# TODO update the JSON string below +json = "{}" +# create an instance of ConnectorFileType from a JSON string +connector_file_type_instance = ConnectorFileType.from_json(json) +# print the JSON string representation of the object +print(ConnectorFileType.to_json()) + +# convert the object into a dict +connector_file_type_dict = connector_file_type_instance.to_dict() +# create an instance of ConnectorFileType from a dict +connector_file_type_from_dict = ConnectorFileType.from_dict(connector_file_type_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ConnectorList.md b/edu_sharing_openapi/docs/ConnectorList.md new file mode 100644 index 00000000..9aa5ae8d --- /dev/null +++ b/edu_sharing_openapi/docs/ConnectorList.md @@ -0,0 +1,30 @@ +# ConnectorList + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] +**connectors** | [**List[Connector]**](Connector.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.connector_list import ConnectorList + +# TODO update the JSON string below +json = "{}" +# create an instance of ConnectorList from a JSON string +connector_list_instance = ConnectorList.from_json(json) +# print the JSON string representation of the object +print(ConnectorList.to_json()) + +# convert the object into a dict +connector_list_dict = connector_list_instance.to_dict() +# create an instance of ConnectorList from a dict +connector_list_from_dict = ConnectorList.from_dict(connector_list_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Content.md b/edu_sharing_openapi/docs/Content.md new file mode 100644 index 00000000..4be03d3a --- /dev/null +++ b/edu_sharing_openapi/docs/Content.md @@ -0,0 +1,31 @@ +# Content + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] +**hash** | **str** | | [optional] +**version** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.content import Content + +# TODO update the JSON string below +json = "{}" +# create an instance of Content from a JSON string +content_instance = Content.from_json(json) +# print the JSON string representation of the object +print(Content.to_json()) + +# convert the object into a dict +content_dict = content_instance.to_dict() +# create an instance of Content from a dict +content_from_dict = Content.from_dict(content_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ContextMenuEntry.md b/edu_sharing_openapi/docs/ContextMenuEntry.md new file mode 100644 index 00000000..ab6fa155 --- /dev/null +++ b/edu_sharing_openapi/docs/ContextMenuEntry.md @@ -0,0 +1,48 @@ +# ContextMenuEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**position** | **int** | | [optional] +**icon** | **str** | | [optional] +**name** | **str** | | [optional] +**url** | **str** | | [optional] +**is_disabled** | **bool** | | [optional] +**open_in_new** | **bool** | | [optional] +**is_separate** | **bool** | | [optional] +**is_separate_bottom** | **bool** | | [optional] +**only_desktop** | **bool** | | [optional] +**only_web** | **bool** | | [optional] +**mode** | **str** | | [optional] +**scopes** | **List[str]** | | [optional] +**ajax** | **bool** | | [optional] +**group** | **str** | | [optional] +**permission** | **str** | | [optional] +**toolpermission** | **str** | | [optional] +**is_directory** | **bool** | | [optional] +**show_as_action** | **bool** | | [optional] +**multiple** | **bool** | | [optional] +**change_strategy** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.context_menu_entry import ContextMenuEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of ContextMenuEntry from a JSON string +context_menu_entry_instance = ContextMenuEntry.from_json(json) +# print the JSON string representation of the object +print(ContextMenuEntry.to_json()) + +# convert the object into a dict +context_menu_entry_dict = context_menu_entry_instance.to_dict() +# create an instance of ContextMenuEntry from a dict +context_menu_entry_from_dict = ContextMenuEntry.from_dict(context_menu_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Contributor.md b/edu_sharing_openapi/docs/Contributor.md new file mode 100644 index 00000000..8236bba3 --- /dev/null +++ b/edu_sharing_openapi/docs/Contributor.md @@ -0,0 +1,34 @@ +# Contributor + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_property** | **str** | | [optional] +**firstname** | **str** | | [optional] +**lastname** | **str** | | [optional] +**email** | **str** | | [optional] +**vcard** | **str** | | [optional] +**org** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.contributor import Contributor + +# TODO update the JSON string below +json = "{}" +# create an instance of Contributor from a JSON string +contributor_instance = Contributor.from_json(json) +# print the JSON string representation of the object +print(Contributor.to_json()) + +# convert the object into a dict +contributor_dict = contributor_instance.to_dict() +# create an instance of Contributor from a dict +contributor_from_dict = Contributor.from_dict(contributor_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Counts.md b/edu_sharing_openapi/docs/Counts.md new file mode 100644 index 00000000..b7bd774a --- /dev/null +++ b/edu_sharing_openapi/docs/Counts.md @@ -0,0 +1,29 @@ +# Counts + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**elements** | [**List[Element]**](Element.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.counts import Counts + +# TODO update the JSON string below +json = "{}" +# create an instance of Counts from a JSON string +counts_instance = Counts.from_json(json) +# print the JSON string representation of the object +print(Counts.to_json()) + +# convert the object into a dict +counts_dict = counts_instance.to_dict() +# create an instance of Counts from a dict +counts_from_dict = Counts.from_dict(counts_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Create.md b/edu_sharing_openapi/docs/Create.md new file mode 100644 index 00000000..1c4c06f6 --- /dev/null +++ b/edu_sharing_openapi/docs/Create.md @@ -0,0 +1,29 @@ +# Create + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**only_metadata** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.create import Create + +# TODO update the JSON string below +json = "{}" +# create an instance of Create from a JSON string +create_instance = Create.from_json(json) +# print the JSON string representation of the object +print(Create.to_json()) + +# convert the object into a dict +create_dict = create_instance.to_dict() +# create an instance of Create from a dict +create_from_dict = Create.from_dict(create_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/CreateUsage.md b/edu_sharing_openapi/docs/CreateUsage.md new file mode 100644 index 00000000..fbeb7ec9 --- /dev/null +++ b/edu_sharing_openapi/docs/CreateUsage.md @@ -0,0 +1,33 @@ +# CreateUsage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**app_id** | **str** | | [optional] +**course_id** | **str** | | [optional] +**resource_id** | **str** | | [optional] +**node_id** | **str** | | [optional] +**node_version** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.create_usage import CreateUsage + +# TODO update the JSON string below +json = "{}" +# create an instance of CreateUsage from a JSON string +create_usage_instance = CreateUsage.from_json(json) +# print the JSON string representation of the object +print(CreateUsage.to_json()) + +# convert the object into a dict +create_usage_dict = create_usage_instance.to_dict() +# create an instance of CreateUsage from a dict +create_usage_from_dict = CreateUsage.from_dict(create_usage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/DeleteOption.md b/edu_sharing_openapi/docs/DeleteOption.md new file mode 100644 index 00000000..1848f2bd --- /dev/null +++ b/edu_sharing_openapi/docs/DeleteOption.md @@ -0,0 +1,29 @@ +# DeleteOption + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**delete** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.delete_option import DeleteOption + +# TODO update the JSON string below +json = "{}" +# create an instance of DeleteOption from a JSON string +delete_option_instance = DeleteOption.from_json(json) +# print the JSON string representation of the object +print(DeleteOption.to_json()) + +# convert the object into a dict +delete_option_dict = delete_option_instance.to_dict() +# create an instance of DeleteOption from a dict +delete_option_from_dict = DeleteOption.from_dict(delete_option_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/DynamicConfig.md b/edu_sharing_openapi/docs/DynamicConfig.md new file mode 100644 index 00000000..95b773bd --- /dev/null +++ b/edu_sharing_openapi/docs/DynamicConfig.md @@ -0,0 +1,30 @@ +# DynamicConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_id** | **str** | | [optional] +**value** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.dynamic_config import DynamicConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of DynamicConfig from a JSON string +dynamic_config_instance = DynamicConfig.from_json(json) +# print the JSON string representation of the object +print(DynamicConfig.to_json()) + +# convert the object into a dict +dynamic_config_dict = dynamic_config_instance.to_dict() +# create an instance of DynamicConfig from a dict +dynamic_config_from_dict = DynamicConfig.from_dict(dynamic_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/DynamicRegistrationToken.md b/edu_sharing_openapi/docs/DynamicRegistrationToken.md new file mode 100644 index 00000000..6380d9c0 --- /dev/null +++ b/edu_sharing_openapi/docs/DynamicRegistrationToken.md @@ -0,0 +1,34 @@ +# DynamicRegistrationToken + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**token** | **str** | | [optional] +**url** | **str** | | [optional] +**registered_app_id** | **str** | | [optional] +**ts_created** | **int** | | [optional] +**ts_expiry** | **int** | | [optional] +**valid** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.dynamic_registration_token import DynamicRegistrationToken + +# TODO update the JSON string below +json = "{}" +# create an instance of DynamicRegistrationToken from a JSON string +dynamic_registration_token_instance = DynamicRegistrationToken.from_json(json) +# print the JSON string representation of the object +print(DynamicRegistrationToken.to_json()) + +# convert the object into a dict +dynamic_registration_token_dict = dynamic_registration_token_instance.to_dict() +# create an instance of DynamicRegistrationToken from a dict +dynamic_registration_token_from_dict = DynamicRegistrationToken.from_dict(dynamic_registration_token_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/DynamicRegistrationTokens.md b/edu_sharing_openapi/docs/DynamicRegistrationTokens.md new file mode 100644 index 00000000..3bfc0998 --- /dev/null +++ b/edu_sharing_openapi/docs/DynamicRegistrationTokens.md @@ -0,0 +1,29 @@ +# DynamicRegistrationTokens + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**registration_links** | [**List[DynamicRegistrationToken]**](DynamicRegistrationToken.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens + +# TODO update the JSON string below +json = "{}" +# create an instance of DynamicRegistrationTokens from a JSON string +dynamic_registration_tokens_instance = DynamicRegistrationTokens.from_json(json) +# print the JSON string representation of the object +print(DynamicRegistrationTokens.to_json()) + +# convert the object into a dict +dynamic_registration_tokens_dict = dynamic_registration_tokens_instance.to_dict() +# create an instance of DynamicRegistrationTokens from a dict +dynamic_registration_tokens_from_dict = DynamicRegistrationTokens.from_dict(dynamic_registration_tokens_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Element.md b/edu_sharing_openapi/docs/Element.md new file mode 100644 index 00000000..a6287af4 --- /dev/null +++ b/edu_sharing_openapi/docs/Element.md @@ -0,0 +1,31 @@ +# Element + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**name** | **str** | | [optional] +**type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.element import Element + +# TODO update the JSON string below +json = "{}" +# create an instance of Element from a JSON string +element_instance = Element.from_json(json) +# print the JSON string representation of the object +print(Element.to_json()) + +# convert the object into a dict +element_dict = element_instance.to_dict() +# create an instance of Element from a dict +element_from_dict = Element.from_dict(element_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ErrorResponse.md b/edu_sharing_openapi/docs/ErrorResponse.md new file mode 100644 index 00000000..df466c82 --- /dev/null +++ b/edu_sharing_openapi/docs/ErrorResponse.md @@ -0,0 +1,34 @@ +# ErrorResponse + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**stacktrace** | **str** | | [optional] +**details** | **Dict[str, object]** | | [optional] +**error** | **str** | | +**message** | **str** | | +**log_level** | **str** | | [optional] +**stacktrace_array** | **List[str]** | | + +## Example + +```python +from edu_sharing_client.models.error_response import ErrorResponse + +# TODO update the JSON string below +json = "{}" +# create an instance of ErrorResponse from a JSON string +error_response_instance = ErrorResponse.from_json(json) +# print the JSON string representation of the object +print(ErrorResponse.to_json()) + +# convert the object into a dict +error_response_dict = error_response_instance.to_dict() +# create an instance of ErrorResponse from a dict +error_response_from_dict = ErrorResponse.from_dict(error_response_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ExcelResult.md b/edu_sharing_openapi/docs/ExcelResult.md new file mode 100644 index 00000000..3d5c8d90 --- /dev/null +++ b/edu_sharing_openapi/docs/ExcelResult.md @@ -0,0 +1,29 @@ +# ExcelResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**rows** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.excel_result import ExcelResult + +# TODO update the JSON string below +json = "{}" +# create an instance of ExcelResult from a JSON string +excel_result_instance = ExcelResult.from_json(json) +# print the JSON string representation of the object +print(ExcelResult.to_json()) + +# convert the object into a dict +excel_result_dict = excel_result_instance.to_dict() +# create an instance of ExcelResult from a dict +excel_result_from_dict = ExcelResult.from_dict(excel_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/FEEDBACKV1Api.md b/edu_sharing_openapi/docs/FEEDBACKV1Api.md new file mode 100644 index 00000000..ba4eb948 --- /dev/null +++ b/edu_sharing_openapi/docs/FEEDBACKV1Api.md @@ -0,0 +1,162 @@ +# edu_sharing_client.FEEDBACKV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_feedback**](FEEDBACKV1Api.md#add_feedback) | **PUT** /feedback/v1/feedback/{repository}/{node}/add | Give feedback on a node +[**get_feedbacks**](FEEDBACKV1Api.md#get_feedbacks) | **GET** /feedback/v1/feedback/{repository}/{node}/list | Get given feedback on a node + + +# **add_feedback** +> FeedbackResult add_feedback(repository, node, request_body) + +Give feedback on a node + +Adds feedback to the given node. Depending on the internal config, the current user will be obscured to prevent back-tracing to the original id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.feedback_result import FeedbackResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.FEEDBACKV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + request_body = None # Dict[str, List[str]] | feedback data, key/value pairs + + try: + # Give feedback on a node + api_response = api_instance.add_feedback(repository, node, request_body) + print("The response of FEEDBACKV1Api->add_feedback:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling FEEDBACKV1Api->add_feedback: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **request_body** | [**Dict[str, List[str]]**](List.md)| feedback data, key/value pairs | + +### Return type + +[**FeedbackResult**](FeedbackResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_feedbacks** +> List[FeedbackData] get_feedbacks(repository, node) + +Get given feedback on a node + +Get all given feedback for a node. Requires Coordinator permissions on node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.feedback_data import FeedbackData +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.FEEDBACKV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get given feedback on a node + api_response = api_instance.get_feedbacks(repository, node) + print("The response of FEEDBACKV1Api->get_feedbacks:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling FEEDBACKV1Api->get_feedbacks: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**List[FeedbackData]**](FeedbackData.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Facet.md b/edu_sharing_openapi/docs/Facet.md new file mode 100644 index 00000000..0cc45139 --- /dev/null +++ b/edu_sharing_openapi/docs/Facet.md @@ -0,0 +1,31 @@ +# Facet + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_property** | **str** | | +**values** | [**List[Value]**](Value.md) | | +**sum_other_doc_count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.facet import Facet + +# TODO update the JSON string below +json = "{}" +# create an instance of Facet from a JSON string +facet_instance = Facet.from_json(json) +# print the JSON string representation of the object +print(Facet.to_json()) + +# convert the object into a dict +facet_dict = facet_instance.to_dict() +# create an instance of Facet from a dict +facet_from_dict = Facet.from_dict(facet_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/FeatureInfo.md b/edu_sharing_openapi/docs/FeatureInfo.md new file mode 100644 index 00000000..fa898e09 --- /dev/null +++ b/edu_sharing_openapi/docs/FeatureInfo.md @@ -0,0 +1,29 @@ +# FeatureInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.feature_info import FeatureInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of FeatureInfo from a JSON string +feature_info_instance = FeatureInfo.from_json(json) +# print the JSON string representation of the object +print(FeatureInfo.to_json()) + +# convert the object into a dict +feature_info_dict = feature_info_instance.to_dict() +# create an instance of FeatureInfo from a dict +feature_info_from_dict = FeatureInfo.from_dict(feature_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/FeedbackData.md b/edu_sharing_openapi/docs/FeedbackData.md new file mode 100644 index 00000000..6d56ad63 --- /dev/null +++ b/edu_sharing_openapi/docs/FeedbackData.md @@ -0,0 +1,32 @@ +# FeedbackData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**authority** | **str** | | [optional] +**data** | **Dict[str, List[str]]** | | [optional] +**created_at** | **datetime** | | [optional] +**modified_at** | **datetime** | | [optional] + +## Example + +```python +from edu_sharing_client.models.feedback_data import FeedbackData + +# TODO update the JSON string below +json = "{}" +# create an instance of FeedbackData from a JSON string +feedback_data_instance = FeedbackData.from_json(json) +# print the JSON string representation of the object +print(FeedbackData.to_json()) + +# convert the object into a dict +feedback_data_dict = feedback_data_instance.to_dict() +# create an instance of FeedbackData from a dict +feedback_data_from_dict = FeedbackData.from_dict(feedback_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/FeedbackResult.md b/edu_sharing_openapi/docs/FeedbackResult.md new file mode 100644 index 00000000..6b498bcf --- /dev/null +++ b/edu_sharing_openapi/docs/FeedbackResult.md @@ -0,0 +1,30 @@ +# FeedbackResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_id** | **str** | | [optional] +**was_updated** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.feedback_result import FeedbackResult + +# TODO update the JSON string below +json = "{}" +# create an instance of FeedbackResult from a JSON string +feedback_result_instance = FeedbackResult.from_json(json) +# print the JSON string representation of the object +print(FeedbackResult.to_json()) + +# convert the object into a dict +feedback_result_dict = feedback_result_instance.to_dict() +# create an instance of FeedbackResult from a dict +feedback_result_from_dict = FeedbackResult.from_dict(feedback_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Filter.md b/edu_sharing_openapi/docs/Filter.md new file mode 100644 index 00000000..247d5466 --- /dev/null +++ b/edu_sharing_openapi/docs/Filter.md @@ -0,0 +1,29 @@ +# Filter + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**entries** | [**List[FilterEntry]**](FilterEntry.md) | | + +## Example + +```python +from edu_sharing_client.models.filter import Filter + +# TODO update the JSON string below +json = "{}" +# create an instance of Filter from a JSON string +filter_instance = Filter.from_json(json) +# print the JSON string representation of the object +print(Filter.to_json()) + +# convert the object into a dict +filter_dict = filter_instance.to_dict() +# create an instance of Filter from a dict +filter_from_dict = Filter.from_dict(filter_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/FilterEntry.md b/edu_sharing_openapi/docs/FilterEntry.md new file mode 100644 index 00000000..afd4fbb9 --- /dev/null +++ b/edu_sharing_openapi/docs/FilterEntry.md @@ -0,0 +1,30 @@ +# FilterEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_property** | **str** | | +**values** | **List[str]** | | + +## Example + +```python +from edu_sharing_client.models.filter_entry import FilterEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of FilterEntry from a JSON string +filter_entry_instance = FilterEntry.from_json(json) +# print the JSON string representation of the object +print(FilterEntry.to_json()) + +# convert the object into a dict +filter_entry_dict = filter_entry_instance.to_dict() +# create an instance of FilterEntry from a dict +filter_entry_from_dict = FilterEntry.from_dict(filter_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/FontIcon.md b/edu_sharing_openapi/docs/FontIcon.md new file mode 100644 index 00000000..f103752f --- /dev/null +++ b/edu_sharing_openapi/docs/FontIcon.md @@ -0,0 +1,31 @@ +# FontIcon + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**original** | **str** | | [optional] +**replace** | **str** | | [optional] +**css_class** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.font_icon import FontIcon + +# TODO update the JSON string below +json = "{}" +# create an instance of FontIcon from a JSON string +font_icon_instance = FontIcon.from_json(json) +# print the JSON string representation of the object +print(FontIcon.to_json()) + +# convert the object into a dict +font_icon_dict = font_icon_instance.to_dict() +# create an instance of FontIcon from a dict +font_icon_from_dict = FontIcon.from_dict(font_icon_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Frontpage.md b/edu_sharing_openapi/docs/Frontpage.md new file mode 100644 index 00000000..dd23ed83 --- /dev/null +++ b/edu_sharing_openapi/docs/Frontpage.md @@ -0,0 +1,35 @@ +# Frontpage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**total_count** | **int** | | [optional] +**display_count** | **int** | | [optional] +**mode** | **str** | | [optional] +**timespan** | **int** | | [optional] +**timespan_all** | **bool** | | [optional] +**queries** | [**List[Query]**](Query.md) | | [optional] +**collection** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.frontpage import Frontpage + +# TODO update the JSON string below +json = "{}" +# create an instance of Frontpage from a JSON string +frontpage_instance = Frontpage.from_json(json) +# print the JSON string representation of the object +print(Frontpage.to_json()) + +# convert the object into a dict +frontpage_dict = frontpage_instance.to_dict() +# create an instance of Frontpage from a dict +frontpage_from_dict = Frontpage.from_dict(frontpage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/General.md b/edu_sharing_openapi/docs/General.md new file mode 100644 index 00000000..bfa4271d --- /dev/null +++ b/edu_sharing_openapi/docs/General.md @@ -0,0 +1,31 @@ +# General + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**referenced_in_name** | **str** | | [optional] +**referenced_in_type** | **str** | | [optional] +**referenced_in_instance** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.general import General + +# TODO update the JSON string below +json = "{}" +# create an instance of General from a JSON string +general_instance = General.from_json(json) +# print the JSON string representation of the object +print(General.to_json()) + +# convert the object into a dict +general_dict = general_instance.to_dict() +# create an instance of General from a dict +general_from_dict = General.from_dict(general_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Geo.md b/edu_sharing_openapi/docs/Geo.md new file mode 100644 index 00000000..dd0b0a3b --- /dev/null +++ b/edu_sharing_openapi/docs/Geo.md @@ -0,0 +1,31 @@ +# Geo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**longitude** | **float** | | [optional] +**latitude** | **float** | | [optional] +**address_country** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.geo import Geo + +# TODO update the JSON string below +json = "{}" +# create an instance of Geo from a JSON string +geo_instance = Geo.from_json(json) +# print the JSON string representation of the object +print(Geo.to_json()) + +# convert the object into a dict +geo_dict = geo_instance.to_dict() +# create an instance of Geo from a dict +geo_from_dict = Geo.from_dict(geo_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Group.md b/edu_sharing_openapi/docs/Group.md new file mode 100644 index 00000000..e36048c5 --- /dev/null +++ b/edu_sharing_openapi/docs/Group.md @@ -0,0 +1,38 @@ +# Group + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**editable** | **bool** | | [optional] +**signup_method** | **str** | | [optional] +**ref** | [**NodeRef**](NodeRef.md) | | [optional] +**aspects** | **List[str]** | | [optional] +**organizations** | [**List[Organization]**](Organization.md) | | [optional] +**authority_name** | **str** | | +**authority_type** | **str** | | [optional] +**group_name** | **str** | | [optional] +**profile** | [**GroupProfile**](GroupProfile.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.group import Group + +# TODO update the JSON string below +json = "{}" +# create an instance of Group from a JSON string +group_instance = Group.from_json(json) +# print the JSON string representation of the object +print(Group.to_json()) + +# convert the object into a dict +group_dict = group_instance.to_dict() +# create an instance of Group from a dict +group_from_dict = Group.from_dict(group_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/GroupEntries.md b/edu_sharing_openapi/docs/GroupEntries.md new file mode 100644 index 00000000..d9fca6ad --- /dev/null +++ b/edu_sharing_openapi/docs/GroupEntries.md @@ -0,0 +1,30 @@ +# GroupEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**groups** | [**List[Group]**](Group.md) | | +**pagination** | [**Pagination**](Pagination.md) | | + +## Example + +```python +from edu_sharing_client.models.group_entries import GroupEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of GroupEntries from a JSON string +group_entries_instance = GroupEntries.from_json(json) +# print the JSON string representation of the object +print(GroupEntries.to_json()) + +# convert the object into a dict +group_entries_dict = group_entries_instance.to_dict() +# create an instance of GroupEntries from a dict +group_entries_from_dict = GroupEntries.from_dict(group_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/GroupEntry.md b/edu_sharing_openapi/docs/GroupEntry.md new file mode 100644 index 00000000..b3e07f32 --- /dev/null +++ b/edu_sharing_openapi/docs/GroupEntry.md @@ -0,0 +1,29 @@ +# GroupEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**group** | [**Group**](Group.md) | | + +## Example + +```python +from edu_sharing_client.models.group_entry import GroupEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of GroupEntry from a JSON string +group_entry_instance = GroupEntry.from_json(json) +# print the JSON string representation of the object +print(GroupEntry.to_json()) + +# convert the object into a dict +group_entry_dict = group_entry_instance.to_dict() +# create an instance of GroupEntry from a dict +group_entry_from_dict = GroupEntry.from_dict(group_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/GroupProfile.md b/edu_sharing_openapi/docs/GroupProfile.md new file mode 100644 index 00000000..b2be3e99 --- /dev/null +++ b/edu_sharing_openapi/docs/GroupProfile.md @@ -0,0 +1,32 @@ +# GroupProfile + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**group_email** | **str** | | [optional] +**display_name** | **str** | | [optional] +**group_type** | **str** | | [optional] +**scope_type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.group_profile import GroupProfile + +# TODO update the JSON string below +json = "{}" +# create an instance of GroupProfile from a JSON string +group_profile_instance = GroupProfile.from_json(json) +# print the JSON string representation of the object +print(GroupProfile.to_json()) + +# convert the object into a dict +group_profile_dict = group_profile_instance.to_dict() +# create an instance of GroupProfile from a dict +group_profile_from_dict = GroupProfile.from_dict(group_profile_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/GroupSignupDetails.md b/edu_sharing_openapi/docs/GroupSignupDetails.md new file mode 100644 index 00000000..efe12cf3 --- /dev/null +++ b/edu_sharing_openapi/docs/GroupSignupDetails.md @@ -0,0 +1,30 @@ +# GroupSignupDetails + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**signup_method** | **str** | | [optional] +**signup_password** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.group_signup_details import GroupSignupDetails + +# TODO update the JSON string below +json = "{}" +# create an instance of GroupSignupDetails from a JSON string +group_signup_details_instance = GroupSignupDetails.from_json(json) +# print the JSON string representation of the object +print(GroupSignupDetails.to_json()) + +# convert the object into a dict +group_signup_details_dict = group_signup_details_instance.to_dict() +# create an instance of GroupSignupDetails from a dict +group_signup_details_from_dict = GroupSignupDetails.from_dict(group_signup_details_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Guest.md b/edu_sharing_openapi/docs/Guest.md new file mode 100644 index 00000000..0f809769 --- /dev/null +++ b/edu_sharing_openapi/docs/Guest.md @@ -0,0 +1,29 @@ +# Guest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**enabled** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.guest import Guest + +# TODO update the JSON string below +json = "{}" +# create an instance of Guest from a JSON string +guest_instance = Guest.from_json(json) +# print the JSON string representation of the object +print(Guest.to_json()) + +# convert the object into a dict +guest_dict = guest_instance.to_dict() +# create an instance of Guest from a dict +guest_from_dict = Guest.from_dict(guest_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/HandleParam.md b/edu_sharing_openapi/docs/HandleParam.md new file mode 100644 index 00000000..33b920dc --- /dev/null +++ b/edu_sharing_openapi/docs/HandleParam.md @@ -0,0 +1,30 @@ +# HandleParam + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**handle_service** | **str** | | [optional] +**doi_service** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.handle_param import HandleParam + +# TODO update the JSON string below +json = "{}" +# create an instance of HandleParam from a JSON string +handle_param_instance = HandleParam.from_json(json) +# print the JSON string representation of the object +print(HandleParam.to_json()) + +# convert the object into a dict +handle_param_dict = handle_param_instance.to_dict() +# create an instance of HandleParam from a dict +handle_param_from_dict = HandleParam.from_dict(handle_param_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/HelpMenuOptions.md b/edu_sharing_openapi/docs/HelpMenuOptions.md new file mode 100644 index 00000000..d45fa9ec --- /dev/null +++ b/edu_sharing_openapi/docs/HelpMenuOptions.md @@ -0,0 +1,31 @@ +# HelpMenuOptions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **str** | | [optional] +**icon** | **str** | | [optional] +**url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.help_menu_options import HelpMenuOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of HelpMenuOptions from a JSON string +help_menu_options_instance = HelpMenuOptions.from_json(json) +# print the JSON string representation of the object +print(HelpMenuOptions.to_json()) + +# convert the object into a dict +help_menu_options_dict = help_menu_options_instance.to_dict() +# create an instance of HelpMenuOptions from a dict +help_menu_options_from_dict = HelpMenuOptions.from_dict(help_menu_options_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/HomeFolderOptions.md b/edu_sharing_openapi/docs/HomeFolderOptions.md new file mode 100644 index 00000000..874ef4c0 --- /dev/null +++ b/edu_sharing_openapi/docs/HomeFolderOptions.md @@ -0,0 +1,32 @@ +# HomeFolderOptions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**folders** | **str** | | [optional] +**private_files** | **str** | | [optional] +**cc_files** | **str** | | [optional] +**keep_folder_structure** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.home_folder_options import HomeFolderOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of HomeFolderOptions from a JSON string +home_folder_options_instance = HomeFolderOptions.from_json(json) +# print the JSON string representation of the object +print(HomeFolderOptions.to_json()) + +# convert the object into a dict +home_folder_options_dict = home_folder_options_instance.to_dict() +# create an instance of HomeFolderOptions from a dict +home_folder_options_from_dict = HomeFolderOptions.from_dict(home_folder_options_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/IAMV1Api.md b/edu_sharing_openapi/docs/IAMV1Api.md new file mode 100644 index 00000000..935fdb88 --- /dev/null +++ b/edu_sharing_openapi/docs/IAMV1Api.md @@ -0,0 +1,2659 @@ +# edu_sharing_client.IAMV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_membership**](IAMV1Api.md#add_membership) | **PUT** /iam/v1/groups/{repository}/{group}/members/{member} | Add member to the group. +[**add_node_list**](IAMV1Api.md#add_node_list) | **PUT** /iam/v1/people/{repository}/{person}/nodeList/{list}/{node} | Add a node to node a list of a user +[**change_group_profile**](IAMV1Api.md#change_group_profile) | **PUT** /iam/v1/groups/{repository}/{group}/profile | Set profile of the group. +[**change_user_avatar**](IAMV1Api.md#change_user_avatar) | **PUT** /iam/v1/people/{repository}/{person}/avatar | Set avatar of the user. +[**change_user_password**](IAMV1Api.md#change_user_password) | **PUT** /iam/v1/people/{repository}/{person}/credential | Change/Set password of the user. +[**change_user_profile**](IAMV1Api.md#change_user_profile) | **PUT** /iam/v1/people/{repository}/{person}/profile | Set profile of the user. +[**confirm_signup**](IAMV1Api.md#confirm_signup) | **PUT** /iam/v1/groups/{repository}/{group}/signup/list/{user} | put the pending user into the group +[**create_group**](IAMV1Api.md#create_group) | **POST** /iam/v1/groups/{repository}/{group} | Create a new group. +[**create_user**](IAMV1Api.md#create_user) | **POST** /iam/v1/people/{repository}/{person} | Create a new user. +[**delete_group**](IAMV1Api.md#delete_group) | **DELETE** /iam/v1/groups/{repository}/{group} | Delete the group. +[**delete_membership**](IAMV1Api.md#delete_membership) | **DELETE** /iam/v1/groups/{repository}/{group}/members/{member} | Delete member from the group. +[**delete_user**](IAMV1Api.md#delete_user) | **DELETE** /iam/v1/people/{repository}/{person} | Delete the user. +[**get_group**](IAMV1Api.md#get_group) | **GET** /iam/v1/groups/{repository}/{group} | Get the group. +[**get_membership**](IAMV1Api.md#get_membership) | **GET** /iam/v1/groups/{repository}/{group}/members | Get all members of the group. +[**get_node_list**](IAMV1Api.md#get_node_list) | **GET** /iam/v1/people/{repository}/{person}/nodeList/{list} | Get a specific node list for a user +[**get_preferences**](IAMV1Api.md#get_preferences) | **GET** /iam/v1/people/{repository}/{person}/preferences | Get preferences stored for user +[**get_profile_settings**](IAMV1Api.md#get_profile_settings) | **GET** /iam/v1/people/{repository}/{person}/profileSettings | Get profileSettings configuration +[**get_recently_invited**](IAMV1Api.md#get_recently_invited) | **GET** /iam/v1/authorities/{repository}/recent | Get recently invited authorities. +[**get_subgroup_by_type**](IAMV1Api.md#get_subgroup_by_type) | **GET** /iam/v1/groups/{repository}/{group}/type/{type} | Get a subgroup by the specified type +[**get_user**](IAMV1Api.md#get_user) | **GET** /iam/v1/people/{repository}/{person} | Get the user. +[**get_user_groups**](IAMV1Api.md#get_user_groups) | **GET** /iam/v1/people/{repository}/{person}/memberships | Get all groups the given user is member of. +[**get_user_stats**](IAMV1Api.md#get_user_stats) | **GET** /iam/v1/people/{repository}/{person}/stats | Get the user stats. +[**reject_signup**](IAMV1Api.md#reject_signup) | **DELETE** /iam/v1/groups/{repository}/{group}/signup/list/{user} | reject the pending user +[**remove_node_list**](IAMV1Api.md#remove_node_list) | **DELETE** /iam/v1/people/{repository}/{person}/nodeList/{list}/{node} | Delete a node of a node list of a user +[**remove_user_avatar**](IAMV1Api.md#remove_user_avatar) | **DELETE** /iam/v1/people/{repository}/{person}/avatar | Remove avatar of the user. +[**search_authorities**](IAMV1Api.md#search_authorities) | **GET** /iam/v1/authorities/{repository} | Search authorities. +[**search_groups**](IAMV1Api.md#search_groups) | **GET** /iam/v1/groups/{repository} | Search groups. +[**search_user**](IAMV1Api.md#search_user) | **GET** /iam/v1/people/{repository} | Search users. +[**set_preferences**](IAMV1Api.md#set_preferences) | **PUT** /iam/v1/people/{repository}/{person}/preferences | Set preferences for user +[**set_profile_settings**](IAMV1Api.md#set_profile_settings) | **PUT** /iam/v1/people/{repository}/{person}/profileSettings | Set profileSettings Configuration +[**signup_group**](IAMV1Api.md#signup_group) | **POST** /iam/v1/groups/{repository}/{group}/signup | let the current user signup to the given group +[**signup_group_details**](IAMV1Api.md#signup_group_details) | **POST** /iam/v1/groups/{repository}/{group}/signup/config | requires admin rights +[**signup_group_list**](IAMV1Api.md#signup_group_list) | **GET** /iam/v1/groups/{repository}/{group}/signup/list | list pending users that want to join this group +[**update_user_status**](IAMV1Api.md#update_user_status) | **PUT** /iam/v1/people/{repository}/{person}/status/{status} | update the user status. + + +# **add_membership** +> add_membership(repository, group, member) + +Add member to the group. + +Add member to the group. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | groupname + member = 'member_example' # str | authorityName of member + + try: + # Add member to the group. + api_instance.add_membership(repository, group, member) + except Exception as e: + print("Exception when calling IAMV1Api->add_membership: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| groupname | + **member** | **str**| authorityName of member | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **add_node_list** +> add_node_list(repository, person, list, node) + +Add a node to node a list of a user + +For guest users, the list will be temporary stored in the current session + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + list = 'list_example' # str | list name. If this list does not exist, it will be created + node = 'node_example' # str | ID of node + + try: + # Add a node to node a list of a user + api_instance.add_node_list(repository, person, list, node) + except Exception as e: + print("Exception when calling IAMV1Api->add_node_list: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **list** | **str**| list name. If this list does not exist, it will be created | + **node** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_group_profile** +> change_group_profile(repository, group, group_profile) + +Set profile of the group. + +Set profile of the group. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | groupname + group_profile = edu_sharing_client.GroupProfile() # GroupProfile | properties + + try: + # Set profile of the group. + api_instance.change_group_profile(repository, group, group_profile) + except Exception as e: + print("Exception when calling IAMV1Api->change_group_profile: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| groupname | + **group_profile** | [**GroupProfile**](GroupProfile.md)| properties | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_user_avatar** +> change_user_avatar(repository, person, avatar) + +Set avatar of the user. + +Set avatar of the user. (To set foreign avatars, admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + avatar = None # object | avatar image + + try: + # Set avatar of the user. + api_instance.change_user_avatar(repository, person, avatar) + except Exception as e: + print("Exception when calling IAMV1Api->change_user_avatar: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **avatar** | [**object**](object.md)| avatar image | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_user_password** +> change_user_password(repository, person, user_credential) + +Change/Set password of the user. + +Change/Set password of the user. (To change foreign passwords or set passwords, admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.user_credential import UserCredential +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + user_credential = edu_sharing_client.UserCredential() # UserCredential | credential + + try: + # Change/Set password of the user. + api_instance.change_user_password(repository, person, user_credential) + except Exception as e: + print("Exception when calling IAMV1Api->change_user_password: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **user_credential** | [**UserCredential**](UserCredential.md)| credential | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_user_profile** +> change_user_profile(repository, person, user_profile_edit) + +Set profile of the user. + +Set profile of the user. (To set foreign profiles, admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.user_profile_edit import UserProfileEdit +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + user_profile_edit = edu_sharing_client.UserProfileEdit() # UserProfileEdit | properties + + try: + # Set profile of the user. + api_instance.change_user_profile(repository, person, user_profile_edit) + except Exception as e: + print("Exception when calling IAMV1Api->change_user_profile: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **user_profile_edit** | [**UserProfileEdit**](UserProfileEdit.md)| properties | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **confirm_signup** +> str confirm_signup(repository, group, user) + +put the pending user into the group + +Requires admin rights or org administrator on this group + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | ID of group + user = 'user_example' # str | ID of user + + try: + # put the pending user into the group + api_response = api_instance.confirm_signup(repository, group, user) + print("The response of IAMV1Api->confirm_signup:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->confirm_signup: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| ID of group | + **user** | **str**| ID of user | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_group** +> Group create_group(repository, group, group_profile, parent=parent) + +Create a new group. + +Create a new group. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.group import Group +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | groupname + group_profile = edu_sharing_client.GroupProfile() # GroupProfile | properties + parent = 'parent_example' # str | parent (will be added to this parent, also for name hashing), may be null (optional) + + try: + # Create a new group. + api_response = api_instance.create_group(repository, group, group_profile, parent=parent) + print("The response of IAMV1Api->create_group:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->create_group: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| groupname | + **group_profile** | [**GroupProfile**](GroupProfile.md)| properties | + **parent** | **str**| parent (will be added to this parent, also for name hashing), may be null | [optional] + +### Return type + +[**Group**](Group.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_user** +> User create_user(repository, person, user_profile_edit, password=password) + +Create a new user. + +Create a new user. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.user import User +from edu_sharing_client.models.user_profile_edit import UserProfileEdit +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = 'person_example' # str | username + user_profile_edit = edu_sharing_client.UserProfileEdit() # UserProfileEdit | profile + password = 'password_example' # str | Password, leave empty if you don't want to set any (optional) + + try: + # Create a new user. + api_response = api_instance.create_user(repository, person, user_profile_edit, password=password) + print("The response of IAMV1Api->create_user:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->create_user: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username | + **user_profile_edit** | [**UserProfileEdit**](UserProfileEdit.md)| profile | + **password** | **str**| Password, leave empty if you don't want to set any | [optional] + +### Return type + +[**User**](User.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_group** +> delete_group(repository, group) + +Delete the group. + +Delete the group. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | groupname + + try: + # Delete the group. + api_instance.delete_group(repository, group) + except Exception as e: + print("Exception when calling IAMV1Api->delete_group: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| groupname | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_membership** +> delete_membership(repository, group, member) + +Delete member from the group. + +Delete member from the group. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | groupname + member = 'member_example' # str | authorityName of member + + try: + # Delete member from the group. + api_instance.delete_membership(repository, group, member) + except Exception as e: + print("Exception when calling IAMV1Api->delete_membership: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| groupname | + **member** | **str**| authorityName of member | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_user** +> delete_user(repository, person, force=force) + +Delete the user. + +Delete the user. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = 'person_example' # str | username + force = False # bool | force the deletion (if false then only persons which are previously marked for deletion are getting deleted) (optional) (default to False) + + try: + # Delete the user. + api_instance.delete_user(repository, person, force=force) + except Exception as e: + print("Exception when calling IAMV1Api->delete_user: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username | + **force** | **bool**| force the deletion (if false then only persons which are previously marked for deletion are getting deleted) | [optional] [default to False] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_group** +> GroupEntry get_group(repository, group) + +Get the group. + +Get the group. (To get foreign profiles, admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.group_entry import GroupEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | groupname + + try: + # Get the group. + api_response = api_instance.get_group(repository, group) + print("The response of IAMV1Api->get_group:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_group: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| groupname | + +### Return type + +[**GroupEntry**](GroupEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_membership** +> AuthorityEntries get_membership(repository, group, pattern=pattern, authority_type=authority_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Get all members of the group. + +Get all members of the group. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | authority name (begins with GROUP_) + pattern = 'pattern_example' # str | pattern (optional) + authority_type = 'authority_type_example' # str | authorityType either GROUP or USER, empty to show all (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Get all members of the group. + api_response = api_instance.get_membership(repository, group, pattern=pattern, authority_type=authority_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of IAMV1Api->get_membership:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_membership: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| authority name (begins with GROUP_) | + **pattern** | **str**| pattern | [optional] + **authority_type** | **str**| authorityType either GROUP or USER, empty to show all | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**AuthorityEntries**](AuthorityEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_node_list** +> NodeEntries get_node_list(repository, person, list, property_filter=property_filter, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Get a specific node list for a user + +For guest users, the list will be temporary stored in the current session + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + list = 'list_example' # str | list name + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Get a specific node list for a user + api_response = api_instance.get_node_list(repository, person, list, property_filter=property_filter, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of IAMV1Api->get_node_list:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_node_list: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **list** | **str**| list name | + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**NodeEntries**](NodeEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_preferences** +> Preferences get_preferences(repository, person) + +Get preferences stored for user + +Will fail for guest + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.preferences import Preferences +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + + try: + # Get preferences stored for user + api_response = api_instance.get_preferences(repository, person) + print("The response of IAMV1Api->get_preferences:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_preferences: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + +### Return type + +[**Preferences**](Preferences.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_profile_settings** +> ProfileSettings get_profile_settings(repository, person) + +Get profileSettings configuration + +Will fail for guest + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.profile_settings import ProfileSettings +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + + try: + # Get profileSettings configuration + api_response = api_instance.get_profile_settings(repository, person) + print("The response of IAMV1Api->get_profile_settings:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_profile_settings: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + +### Return type + +[**ProfileSettings**](ProfileSettings.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_recently_invited** +> AuthorityEntries get_recently_invited(repository) + +Get recently invited authorities. + +Get the authorities the current user has recently invited. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + + try: + # Get recently invited authorities. + api_response = api_instance.get_recently_invited(repository) + print("The response of IAMV1Api->get_recently_invited:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_recently_invited: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + +### Return type + +[**AuthorityEntries**](AuthorityEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_subgroup_by_type** +> AuthorityEntries get_subgroup_by_type(repository, group, type) + +Get a subgroup by the specified type + +Get a subgroup by the specified type + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | authority name of the parent/primary group (begins with GROUP_) + type = 'type_example' # str | group type to filter for, e.g. ORG_ADMINISTRATORS + + try: + # Get a subgroup by the specified type + api_response = api_instance.get_subgroup_by_type(repository, group, type) + print("The response of IAMV1Api->get_subgroup_by_type:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_subgroup_by_type: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| authority name of the parent/primary group (begins with GROUP_) | + **type** | **str**| group type to filter for, e.g. ORG_ADMINISTRATORS | + +### Return type + +[**AuthorityEntries**](AuthorityEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_user** +> UserEntry get_user(repository, person) + +Get the user. + +Get the user. (Not all information are feteched for foreign profiles if current user is not an admin) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.user_entry import UserEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + + try: + # Get the user. + api_response = api_instance.get_user(repository, person) + print("The response of IAMV1Api->get_user:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_user: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + +### Return type + +[**UserEntry**](UserEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_user_groups** +> GroupEntries get_user_groups(repository, person, pattern=pattern, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Get all groups the given user is member of. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.group_entries import GroupEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = 'person_example' # str | authority name + pattern = 'pattern_example' # str | pattern (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Get all groups the given user is member of. + api_response = api_instance.get_user_groups(repository, person, pattern=pattern, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of IAMV1Api->get_user_groups:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_user_groups: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| authority name | + **pattern** | **str**| pattern | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**GroupEntries**](GroupEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_user_stats** +> UserStats get_user_stats(repository, person) + +Get the user stats. + +Get the user stats (e.g. publicly created material count) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.user_stats import UserStats +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + + try: + # Get the user stats. + api_response = api_instance.get_user_stats(repository, person) + print("The response of IAMV1Api->get_user_stats:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->get_user_stats: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + +### Return type + +[**UserStats**](UserStats.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **reject_signup** +> str reject_signup(repository, group, user) + +reject the pending user + +Requires admin rights or org administrator on this group + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | ID of group + user = 'user_example' # str | ID of user + + try: + # reject the pending user + api_response = api_instance.reject_signup(repository, group, user) + print("The response of IAMV1Api->reject_signup:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->reject_signup: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| ID of group | + **user** | **str**| ID of user | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_node_list** +> remove_node_list(repository, person, list, node) + +Delete a node of a node list of a user + +For guest users, the list will be temporary stored in the current session + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + list = 'list_example' # str | list name + node = 'node_example' # str | ID of node + + try: + # Delete a node of a node list of a user + api_instance.remove_node_list(repository, person, list, node) + except Exception as e: + print("Exception when calling IAMV1Api->remove_node_list: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **list** | **str**| list name | + **node** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_user_avatar** +> remove_user_avatar(repository, person) + +Remove avatar of the user. + +Remove avatar of the user. (To Remove foreign avatars, admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + + try: + # Remove avatar of the user. + api_instance.remove_user_avatar(repository, person) + except Exception as e: + print("Exception when calling IAMV1Api->remove_user_avatar: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_authorities** +> AuthorityEntries search_authorities(repository, pattern, var_global=var_global, group_type=group_type, signup_method=signup_method, max_items=max_items, skip_count=skip_count) + +Search authorities. + +Search authorities. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + pattern = 'pattern_example' # str | pattern + var_global = True # bool | global search context, defaults to true, otherwise just searches for users within the organizations (optional) (default to True) + group_type = 'group_type_example' # str | find a specific groupType (does nothing for persons) (optional) + signup_method = 'signup_method_example' # str | find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons) (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + + try: + # Search authorities. + api_response = api_instance.search_authorities(repository, pattern, var_global=var_global, group_type=group_type, signup_method=signup_method, max_items=max_items, skip_count=skip_count) + print("The response of IAMV1Api->search_authorities:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->search_authorities: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **pattern** | **str**| pattern | + **var_global** | **bool**| global search context, defaults to true, otherwise just searches for users within the organizations | [optional] [default to True] + **group_type** | **str**| find a specific groupType (does nothing for persons) | [optional] + **signup_method** | **str**| find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons) | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + +### Return type + +[**AuthorityEntries**](AuthorityEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_groups** +> GroupEntries search_groups(repository, pattern, group_type=group_type, signup_method=signup_method, var_global=var_global, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Search groups. + +Search groups. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.group_entries import GroupEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + pattern = 'pattern_example' # str | pattern + group_type = 'group_type_example' # str | find a specific groupType (optional) + signup_method = 'signup_method_example' # str | find a specific signupMethod for groups (or asterisk for all including one) (optional) + var_global = True # bool | global search context, defaults to true, otherwise just searches for groups within the organizations (optional) (default to True) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Search groups. + api_response = api_instance.search_groups(repository, pattern, group_type=group_type, signup_method=signup_method, var_global=var_global, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of IAMV1Api->search_groups:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->search_groups: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **pattern** | **str**| pattern | + **group_type** | **str**| find a specific groupType | [optional] + **signup_method** | **str**| find a specific signupMethod for groups (or asterisk for all including one) | [optional] + **var_global** | **bool**| global search context, defaults to true, otherwise just searches for groups within the organizations | [optional] [default to True] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**GroupEntries**](GroupEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_user** +> UserEntries search_user(repository, pattern, var_global=var_global, status=status, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Search users. + +Search users. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.user_entries import UserEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + pattern = 'pattern_example' # str | pattern + var_global = True # bool | global search context, defaults to true, otherwise just searches for users within the organizations (optional) (default to True) + status = 'status_example' # str | the user status (e.g. active), if not set, all users are returned (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Search users. + api_response = api_instance.search_user(repository, pattern, var_global=var_global, status=status, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of IAMV1Api->search_user:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->search_user: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **pattern** | **str**| pattern | + **var_global** | **bool**| global search context, defaults to true, otherwise just searches for users within the organizations | [optional] [default to True] + **status** | **str**| the user status (e.g. active), if not set, all users are returned | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**UserEntries**](UserEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_preferences** +> set_preferences(repository, person, body) + +Set preferences for user + +Will fail for guest + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + body = 'body_example' # str | preferences (json string) + + try: + # Set preferences for user + api_instance.set_preferences(repository, person, body) + except Exception as e: + print("Exception when calling IAMV1Api->set_preferences: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **body** | **str**| preferences (json string) | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_profile_settings** +> set_profile_settings(repository, person, profile_settings) + +Set profileSettings Configuration + +Will fail for guest + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.profile_settings import ProfileSettings +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = '-me-' # str | username (or \"-me-\" for current user) (default to '-me-') + profile_settings = edu_sharing_client.ProfileSettings() # ProfileSettings | ProfileSetting Object + + try: + # Set profileSettings Configuration + api_instance.set_profile_settings(repository, person, profile_settings) + except Exception as e: + print("Exception when calling IAMV1Api->set_profile_settings: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username (or \"-me-\" for current user) | [default to '-me-'] + **profile_settings** | [**ProfileSettings**](ProfileSettings.md)| ProfileSetting Object | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **signup_group** +> str signup_group(repository, group, password=password) + +let the current user signup to the given group + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | ID of group + password = 'password_example' # str | Password for signup (only required if signupMethod == password) (optional) + + try: + # let the current user signup to the given group + api_response = api_instance.signup_group(repository, group, password=password) + print("The response of IAMV1Api->signup_group:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->signup_group: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| ID of group | + **password** | **str**| Password for signup (only required if signupMethod == password) | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **signup_group_details** +> signup_group_details(repository, group, group_signup_details) + + requires admin rights + +set group signup options + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.group_signup_details import GroupSignupDetails +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | ID of group + group_signup_details = edu_sharing_client.GroupSignupDetails() # GroupSignupDetails | Details to edit + + try: + # requires admin rights + api_instance.signup_group_details(repository, group, group_signup_details) + except Exception as e: + print("Exception when calling IAMV1Api->signup_group_details: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| ID of group | + **group_signup_details** | [**GroupSignupDetails**](GroupSignupDetails.md)| Details to edit | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **signup_group_list** +> str signup_group_list(repository, group) + +list pending users that want to join this group + +Requires admin rights or org administrator on this group + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + group = 'group_example' # str | ID of group + + try: + # list pending users that want to join this group + api_response = api_instance.signup_group_list(repository, group) + print("The response of IAMV1Api->signup_group_list:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling IAMV1Api->signup_group_list: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **group** | **str**| ID of group | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_user_status** +> update_user_status(repository, person, status, notify) + +update the user status. + +update the user status. (admin rights are required.) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.IAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + person = 'person_example' # str | username + status = 'status_example' # str | the new status to set + notify = True # bool | notify the user via mail (default to True) + + try: + # update the user status. + api_instance.update_user_status(repository, person, status, notify) + except Exception as e: + print("Exception when calling IAMV1Api->update_user_status: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **person** | **str**| username | + **status** | **str**| the new status to set | + **notify** | **bool**| notify the user via mail | [default to True] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Icon.md b/edu_sharing_openapi/docs/Icon.md new file mode 100644 index 00000000..98beb9a5 --- /dev/null +++ b/edu_sharing_openapi/docs/Icon.md @@ -0,0 +1,29 @@ +# Icon + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.icon import Icon + +# TODO update the JSON string below +json = "{}" +# create an instance of Icon from a JSON string +icon_instance = Icon.from_json(json) +# print the JSON string representation of the object +print(Icon.to_json()) + +# convert the object into a dict +icon_dict = icon_instance.to_dict() +# create an instance of Icon from a dict +icon_from_dict = Icon.from_dict(icon_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Image.md b/edu_sharing_openapi/docs/Image.md new file mode 100644 index 00000000..654ca8be --- /dev/null +++ b/edu_sharing_openapi/docs/Image.md @@ -0,0 +1,30 @@ +# Image + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**src** | **str** | | [optional] +**replace** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.image import Image + +# TODO update the JSON string below +json = "{}" +# create an instance of Image from a JSON string +image_instance = Image.from_json(json) +# print the JSON string representation of the object +print(Image.to_json()) + +# convert the object into a dict +image_dict = image_instance.to_dict() +# create an instance of Image from a dict +image_from_dict = Image.from_dict(image_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Interface.md b/edu_sharing_openapi/docs/Interface.md new file mode 100644 index 00000000..819559e9 --- /dev/null +++ b/edu_sharing_openapi/docs/Interface.md @@ -0,0 +1,34 @@ +# Interface + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] +**set** | **str** | | [optional] +**metadata_prefix** | **str** | | [optional] +**documentation** | **str** | | [optional] +**format** | **str** | | [optional] +**type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.interface import Interface + +# TODO update the JSON string below +json = "{}" +# create an instance of Interface from a JSON string +interface_instance = Interface.from_json(json) +# print the JSON string representation of the object +print(Interface.to_json()) + +# convert the object into a dict +interface_dict = interface_instance.to_dict() +# create an instance of Interface from a dict +interface_from_dict = Interface.from_dict(interface_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/InviteEventDTO.md b/edu_sharing_openapi/docs/InviteEventDTO.md new file mode 100644 index 00000000..17bdf912 --- /dev/null +++ b/edu_sharing_openapi/docs/InviteEventDTO.md @@ -0,0 +1,33 @@ +# InviteEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**name** | **str** | | [optional] +**type** | **str** | | [optional] +**user_comment** | **str** | | [optional] +**permissions** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.invite_event_dto import InviteEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of InviteEventDTO from a JSON string +invite_event_dto_instance = InviteEventDTO.from_json(json) +# print the JSON string representation of the object +print(InviteEventDTO.to_json()) + +# convert the object into a dict +invite_event_dto_dict = invite_event_dto_instance.to_dict() +# create an instance of InviteEventDTO from a dict +invite_event_dto_from_dict = InviteEventDTO.from_dict(invite_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JSONObject.md b/edu_sharing_openapi/docs/JSONObject.md new file mode 100644 index 00000000..d6320035 --- /dev/null +++ b/edu_sharing_openapi/docs/JSONObject.md @@ -0,0 +1,29 @@ +# JSONObject + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**empty** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.json_object import JSONObject + +# TODO update the JSON string below +json = "{}" +# create an instance of JSONObject from a JSON string +json_object_instance = JSONObject.from_json(json) +# print the JSON string representation of the object +print(JSONObject.to_json()) + +# convert the object into a dict +json_object_dict = json_object_instance.to_dict() +# create an instance of JSONObject from a dict +json_object_from_dict = JSONObject.from_dict(json_object_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Job.md b/edu_sharing_openapi/docs/Job.md new file mode 100644 index 00000000..0c4d8d16 --- /dev/null +++ b/edu_sharing_openapi/docs/Job.md @@ -0,0 +1,30 @@ +# Job + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**status** | **str** | | + +## Example + +```python +from edu_sharing_client.models.job import Job + +# TODO update the JSON string below +json = "{}" +# create an instance of Job from a JSON string +job_instance = Job.from_json(json) +# print the JSON string representation of the object +print(Job.to_json()) + +# convert the object into a dict +job_dict = job_instance.to_dict() +# create an instance of Job from a dict +job_from_dict = Job.from_dict(job_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobBuilder.md b/edu_sharing_openapi/docs/JobBuilder.md new file mode 100644 index 00000000..bc724228 --- /dev/null +++ b/edu_sharing_openapi/docs/JobBuilder.md @@ -0,0 +1,29 @@ +# JobBuilder + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**job_data** | [**JobBuilder**](JobBuilder.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_builder import JobBuilder + +# TODO update the JSON string below +json = "{}" +# create an instance of JobBuilder from a JSON string +job_builder_instance = JobBuilder.from_json(json) +# print the JSON string representation of the object +print(JobBuilder.to_json()) + +# convert the object into a dict +job_builder_dict = job_builder_instance.to_dict() +# create an instance of JobBuilder from a dict +job_builder_from_dict = JobBuilder.from_dict(job_builder_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobDataMap.md b/edu_sharing_openapi/docs/JobDataMap.md new file mode 100644 index 00000000..7165dfe4 --- /dev/null +++ b/edu_sharing_openapi/docs/JobDataMap.md @@ -0,0 +1,33 @@ +# JobDataMap + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**dirty** | **bool** | | [optional] +**allows_transient_data** | **bool** | | [optional] +**keys** | **List[str]** | | [optional] +**wrapped_map** | **Dict[str, object]** | | [optional] +**empty** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_data_map import JobDataMap + +# TODO update the JSON string below +json = "{}" +# create an instance of JobDataMap from a JSON string +job_data_map_instance = JobDataMap.from_json(json) +# print the JSON string representation of the object +print(JobDataMap.to_json()) + +# convert the object into a dict +job_data_map_dict = job_data_map_instance.to_dict() +# create an instance of JobDataMap from a dict +job_data_map_from_dict = JobDataMap.from_dict(job_data_map_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobDescription.md b/edu_sharing_openapi/docs/JobDescription.md new file mode 100644 index 00000000..6f69d3f5 --- /dev/null +++ b/edu_sharing_openapi/docs/JobDescription.md @@ -0,0 +1,32 @@ +# JobDescription + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**description** | **str** | | [optional] +**params** | [**List[JobFieldDescription]**](JobFieldDescription.md) | | [optional] +**tags** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_description import JobDescription + +# TODO update the JSON string below +json = "{}" +# create an instance of JobDescription from a JSON string +job_description_instance = JobDescription.from_json(json) +# print the JSON string representation of the object +print(JobDescription.to_json()) + +# convert the object into a dict +job_description_dict = job_description_instance.to_dict() +# create an instance of JobDescription from a dict +job_description_from_dict = JobDescription.from_dict(job_description_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobDetail.md b/edu_sharing_openapi/docs/JobDetail.md new file mode 100644 index 00000000..038e7788 --- /dev/null +++ b/edu_sharing_openapi/docs/JobDetail.md @@ -0,0 +1,35 @@ +# JobDetail + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | [**JobKey**](JobKey.md) | | [optional] +**job_data_map** | [**JobDetailJobDataMap**](JobDetailJobDataMap.md) | | [optional] +**durable** | **bool** | | [optional] +**persist_job_data_after_execution** | **bool** | | [optional] +**concurrent_exection_disallowed** | **bool** | | [optional] +**job_builder** | [**JobBuilder**](JobBuilder.md) | | [optional] +**description** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_detail import JobDetail + +# TODO update the JSON string below +json = "{}" +# create an instance of JobDetail from a JSON string +job_detail_instance = JobDetail.from_json(json) +# print the JSON string representation of the object +print(JobDetail.to_json()) + +# convert the object into a dict +job_detail_dict = job_detail_instance.to_dict() +# create an instance of JobDetail from a dict +job_detail_from_dict = JobDetail.from_dict(job_detail_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobDetailJobDataMap.md b/edu_sharing_openapi/docs/JobDetailJobDataMap.md new file mode 100644 index 00000000..672bd8ce --- /dev/null +++ b/edu_sharing_openapi/docs/JobDetailJobDataMap.md @@ -0,0 +1,33 @@ +# JobDetailJobDataMap + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**dirty** | **bool** | | [optional] +**allows_transient_data** | **bool** | | [optional] +**keys** | **List[str]** | | [optional] +**wrapped_map** | **Dict[str, object]** | | [optional] +**empty** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_detail_job_data_map import JobDetailJobDataMap + +# TODO update the JSON string below +json = "{}" +# create an instance of JobDetailJobDataMap from a JSON string +job_detail_job_data_map_instance = JobDetailJobDataMap.from_json(json) +# print the JSON string representation of the object +print(JobDetailJobDataMap.to_json()) + +# convert the object into a dict +job_detail_job_data_map_dict = job_detail_job_data_map_instance.to_dict() +# create an instance of JobDetailJobDataMap from a dict +job_detail_job_data_map_from_dict = JobDetailJobDataMap.from_dict(job_detail_job_data_map_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobEntry.md b/edu_sharing_openapi/docs/JobEntry.md new file mode 100644 index 00000000..5de82c87 --- /dev/null +++ b/edu_sharing_openapi/docs/JobEntry.md @@ -0,0 +1,29 @@ +# JobEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**data** | [**Job**](Job.md) | | + +## Example + +```python +from edu_sharing_client.models.job_entry import JobEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of JobEntry from a JSON string +job_entry_instance = JobEntry.from_json(json) +# print the JSON string representation of the object +print(JobEntry.to_json()) + +# convert the object into a dict +job_entry_dict = job_entry_instance.to_dict() +# create an instance of JobEntry from a dict +job_entry_from_dict = JobEntry.from_dict(job_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobFieldDescription.md b/edu_sharing_openapi/docs/JobFieldDescription.md new file mode 100644 index 00000000..294dbaab --- /dev/null +++ b/edu_sharing_openapi/docs/JobFieldDescription.md @@ -0,0 +1,34 @@ +# JobFieldDescription + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**description** | **str** | | [optional] +**file** | **bool** | | [optional] +**sample_value** | **str** | | [optional] +**is_array** | **bool** | | [optional] +**array** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_field_description import JobFieldDescription + +# TODO update the JSON string below +json = "{}" +# create an instance of JobFieldDescription from a JSON string +job_field_description_instance = JobFieldDescription.from_json(json) +# print the JSON string representation of the object +print(JobFieldDescription.to_json()) + +# convert the object into a dict +job_field_description_dict = job_field_description_instance.to_dict() +# create an instance of JobFieldDescription from a dict +job_field_description_from_dict = JobFieldDescription.from_dict(job_field_description_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobInfo.md b/edu_sharing_openapi/docs/JobInfo.md new file mode 100644 index 00000000..47158527 --- /dev/null +++ b/edu_sharing_openapi/docs/JobInfo.md @@ -0,0 +1,37 @@ +# JobInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**job_data_map** | [**JobDetailJobDataMap**](JobDetailJobDataMap.md) | | [optional] +**job_name** | **str** | | [optional] +**job_group** | **str** | | [optional] +**start_time** | **int** | | [optional] +**finish_time** | **int** | | [optional] +**status** | **str** | | [optional] +**worst_level** | [**Level**](Level.md) | | [optional] +**log** | [**List[LogEntry]**](LogEntry.md) | | [optional] +**job_detail** | [**JobDetail**](JobDetail.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_info import JobInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of JobInfo from a JSON string +job_info_instance = JobInfo.from_json(json) +# print the JSON string representation of the object +print(JobInfo.to_json()) + +# convert the object into a dict +job_info_dict = job_info_instance.to_dict() +# create an instance of JobInfo from a dict +job_info_from_dict = JobInfo.from_dict(job_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/JobKey.md b/edu_sharing_openapi/docs/JobKey.md new file mode 100644 index 00000000..8f5d53af --- /dev/null +++ b/edu_sharing_openapi/docs/JobKey.md @@ -0,0 +1,30 @@ +# JobKey + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**group** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.job_key import JobKey + +# TODO update the JSON string below +json = "{}" +# create an instance of JobKey from a JSON string +job_key_instance = JobKey.from_json(json) +# print the JSON string representation of the object +print(JobKey.to_json()) + +# convert the object into a dict +job_key_dict = job_key_instance.to_dict() +# create an instance of JobKey from a dict +job_key_from_dict = JobKey.from_dict(job_key_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/KNOWLEDGEV1Api.md b/edu_sharing_openapi/docs/KNOWLEDGEV1Api.md new file mode 100644 index 00000000..9d3410d9 --- /dev/null +++ b/edu_sharing_openapi/docs/KNOWLEDGEV1Api.md @@ -0,0 +1,154 @@ +# edu_sharing_client.KNOWLEDGEV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_analyzing_job_status**](KNOWLEDGEV1Api.md#get_analyzing_job_status) | **GET** /knowledge/v1/analyze/jobs/{job} | Get analyzing job status. +[**run_analyzing_job**](KNOWLEDGEV1Api.md#run_analyzing_job) | **POST** /knowledge/v1/analyze/jobs | Run analyzing job. + + +# **get_analyzing_job_status** +> JobEntry get_analyzing_job_status(job) + +Get analyzing job status. + +Get analyzing job status. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.job_entry import JobEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.KNOWLEDGEV1Api(api_client) + job = 'job_example' # str | ID of job ticket + + try: + # Get analyzing job status. + api_response = api_instance.get_analyzing_job_status(job) + print("The response of KNOWLEDGEV1Api->get_analyzing_job_status:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling KNOWLEDGEV1Api->get_analyzing_job_status: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **job** | **str**| ID of job ticket | + +### Return type + +[**JobEntry**](JobEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**401** | Authorization failed. | - | +**403** | The current user has insufficient rights to access the ticket. | - | +**404** | Job not found. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **run_analyzing_job** +> JobEntry run_analyzing_job(repository, node) + +Run analyzing job. + +Run analyzing job for a node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.job_entry import JobEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.KNOWLEDGEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Run analyzing job. + api_response = api_instance.run_analyzing_job(repository, node) + print("The response of KNOWLEDGEV1Api->run_analyzing_job:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling KNOWLEDGEV1Api->run_analyzing_job: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**JobEntry**](JobEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**202** | Accepted. | - | +**401** | Authorization failed. | - | +**403** | The current user has insufficient rights to read the node or to perform an analyzing job. | - | +**404** | Repository or node not found. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/KeyValuePair.md b/edu_sharing_openapi/docs/KeyValuePair.md new file mode 100644 index 00000000..6e995619 --- /dev/null +++ b/edu_sharing_openapi/docs/KeyValuePair.md @@ -0,0 +1,30 @@ +# KeyValuePair + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **str** | | [optional] +**value** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.key_value_pair import KeyValuePair + +# TODO update the JSON string below +json = "{}" +# create an instance of KeyValuePair from a JSON string +key_value_pair_instance = KeyValuePair.from_json(json) +# print the JSON string representation of the object +print(KeyValuePair.to_json()) + +# convert the object into a dict +key_value_pair_dict = key_value_pair_instance.to_dict() +# create an instance of KeyValuePair from a dict +key_value_pair_from_dict = KeyValuePair.from_dict(key_value_pair_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LTIPlatformConfiguration.md b/edu_sharing_openapi/docs/LTIPlatformConfiguration.md new file mode 100644 index 00000000..b322635d --- /dev/null +++ b/edu_sharing_openapi/docs/LTIPlatformConfiguration.md @@ -0,0 +1,32 @@ +# LTIPlatformConfiguration + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**product_family_code** | **str** | | [optional] +**version** | **str** | | [optional] +**messages_supported** | [**List[Message]**](Message.md) | | [optional] +**variables** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.lti_platform_configuration import LTIPlatformConfiguration + +# TODO update the JSON string below +json = "{}" +# create an instance of LTIPlatformConfiguration from a JSON string +lti_platform_configuration_instance = LTIPlatformConfiguration.from_json(json) +# print the JSON string representation of the object +print(LTIPlatformConfiguration.to_json()) + +# convert the object into a dict +lti_platform_configuration_dict = lti_platform_configuration_instance.to_dict() +# create an instance of LTIPlatformConfiguration from a dict +lti_platform_configuration_from_dict = LTIPlatformConfiguration.from_dict(lti_platform_configuration_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LTIPlatformV13Api.md b/edu_sharing_openapi/docs/LTIPlatformV13Api.md new file mode 100644 index 00000000..d943f1b3 --- /dev/null +++ b/edu_sharing_openapi/docs/LTIPlatformV13Api.md @@ -0,0 +1,1121 @@ +# edu_sharing_client.LTIPlatformV13Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**auth**](LTIPlatformV13Api.md#auth) | **GET** /ltiplatform/v13/auth | LTI Platform oidc endpoint. responds to a login authentication request +[**auth_token_endpoint**](LTIPlatformV13Api.md#auth_token_endpoint) | **GET** /ltiplatform/v13/token | LTIPlatform auth token endpoint +[**change_content**](LTIPlatformV13Api.md#change_content) | **POST** /ltiplatform/v13/content | Custom edu-sharing endpoint to change content of node. +[**convert_to_resourcelink**](LTIPlatformV13Api.md#convert_to_resourcelink) | **POST** /ltiplatform/v13/convert2resourcelink | manual convertion of an io to an resource link without deeplinking +[**deep_linking_response**](LTIPlatformV13Api.md#deep_linking_response) | **POST** /ltiplatform/v13/deeplinking-response | receiving deeplink response messages. +[**generate_login_initiation_form**](LTIPlatformV13Api.md#generate_login_initiation_form) | **GET** /ltiplatform/v13/generateLoginInitiationForm | generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. +[**generate_login_initiation_form_resource_link**](LTIPlatformV13Api.md#generate_login_initiation_form_resource_link) | **GET** /ltiplatform/v13/generateLoginInitiationFormResourceLink | generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. +[**get_content**](LTIPlatformV13Api.md#get_content) | **GET** /ltiplatform/v13/content | Custom edu-sharing endpoint to get content of node. +[**manual_registration**](LTIPlatformV13Api.md#manual_registration) | **POST** /ltiplatform/v13/manual-registration | manual registration endpoint for registration of tools. +[**open_id_registration**](LTIPlatformV13Api.md#open_id_registration) | **POST** /ltiplatform/v13/openid-registration | registration endpoint the tool uses to register at platform. +[**openid_configuration**](LTIPlatformV13Api.md#openid_configuration) | **GET** /ltiplatform/v13/openid-configuration | LTIPlatform openid configuration +[**start_dynamic_registration**](LTIPlatformV13Api.md#start_dynamic_registration) | **POST** /ltiplatform/v13/start-dynamic-registration | starts lti dynamic registration. +[**start_dynamic_registration_get**](LTIPlatformV13Api.md#start_dynamic_registration_get) | **GET** /ltiplatform/v13/start-dynamic-registration | starts lti dynamic registration. +[**test_token**](LTIPlatformV13Api.md#test_token) | **PUT** /ltiplatform/v13/testToken | test creates a token signed with homeapp. +[**tools**](LTIPlatformV13Api.md#tools) | **GET** /ltiplatform/v13/tools | List of tools registered + + +# **auth** +> str auth(scope, response_type, login_hint, state, response_mode, nonce, prompt, redirect_uri, client_id=client_id, lti_message_hint=lti_message_hint) + +LTI Platform oidc endpoint. responds to a login authentication request + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + scope = 'scope_example' # str | scope + response_type = 'response_type_example' # str | response_type + login_hint = 'login_hint_example' # str | login_hint + state = 'state_example' # str | state + response_mode = 'response_mode_example' # str | response_mode + nonce = 'nonce_example' # str | nonce + prompt = 'prompt_example' # str | prompt + redirect_uri = 'redirect_uri_example' # str | redirect_uri + client_id = 'client_id_example' # str | optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request (optional) + lti_message_hint = 'lti_message_hint_example' # str | Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered (optional) + + try: + # LTI Platform oidc endpoint. responds to a login authentication request + api_response = api_instance.auth(scope, response_type, login_hint, state, response_mode, nonce, prompt, redirect_uri, client_id=client_id, lti_message_hint=lti_message_hint) + print("The response of LTIPlatformV13Api->auth:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->auth: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **scope** | **str**| scope | + **response_type** | **str**| response_type | + **login_hint** | **str**| login_hint | + **state** | **str**| state | + **response_mode** | **str**| response_mode | + **nonce** | **str**| nonce | + **prompt** | **str**| prompt | + **redirect_uri** | **str**| redirect_uri | + **client_id** | **str**| optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request | [optional] + **lti_message_hint** | **str**| Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **auth_token_endpoint** +> auth_token_endpoint() + +LTIPlatform auth token endpoint + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + + try: + # LTIPlatform auth token endpoint + api_instance.auth_token_endpoint() + except Exception as e: + print("Exception when calling LTIPlatformV13Api->auth_token_endpoint: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_content** +> NodeEntry change_content(jwt, mimetype, version_comment=version_comment, file=file) + +Custom edu-sharing endpoint to change content of node. + +Change content of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + jwt = 'jwt_example' # str | jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool + mimetype = 'mimetype_example' # str | MIME-Type + version_comment = 'version_comment_example' # str | comment, leave empty = no new version, otherwise new version is generated (optional) + file = None # bytearray | file upload (optional) + + try: + # Custom edu-sharing endpoint to change content of node. + api_response = api_instance.change_content(jwt, mimetype, version_comment=version_comment, file=file) + print("The response of LTIPlatformV13Api->change_content:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->change_content: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **jwt** | **str**| jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool | + **mimetype** | **str**| MIME-Type | + **version_comment** | **str**| comment, leave empty = no new version, otherwise new version is generated | [optional] + **file** | **bytearray**| file upload | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **convert_to_resourcelink** +> convert_to_resourcelink(node_id, app_id) + +manual convertion of an io to an resource link without deeplinking + +io conversion to resourcelink + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + node_id = 'node_id_example' # str | nodeId + app_id = 'app_id_example' # str | appId of a lti tool + + try: + # manual convertion of an io to an resource link without deeplinking + api_instance.convert_to_resourcelink(node_id, app_id) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->convert_to_resourcelink: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| nodeId | + **app_id** | **str**| appId of a lti tool | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **deep_linking_response** +> str deep_linking_response(jwt) + +receiving deeplink response messages. + +deeplink response + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + jwt = 'jwt_example' # str | JWT + + try: + # receiving deeplink response messages. + api_response = api_instance.deep_linking_response(jwt) + print("The response of LTIPlatformV13Api->deep_linking_response:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->deep_linking_response: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **jwt** | **str**| JWT | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/x-www-form-urlencoded + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **generate_login_initiation_form** +> str generate_login_initiation_form(app_id, parent_id, node_id=node_id) + +generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + app_id = 'app_id_example' # str | appId of the tool + parent_id = 'parent_id_example' # str | the folder id the lti node will be created in. is required for lti deeplink. + node_id = 'node_id_example' # str | the nodeId when tool has custom content option. (optional) + + try: + # generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. + api_response = api_instance.generate_login_initiation_form(app_id, parent_id, node_id=node_id) + print("The response of LTIPlatformV13Api->generate_login_initiation_form:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->generate_login_initiation_form: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **app_id** | **str**| appId of the tool | + **parent_id** | **str**| the folder id the lti node will be created in. is required for lti deeplink. | + **node_id** | **str**| the nodeId when tool has custom content option. | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **generate_login_initiation_form_resource_link** +> str generate_login_initiation_form_resource_link(node_id, edit_mode=edit_mode, version=version, launch_presentation=launch_presentation, jwt=jwt) + +generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + node_id = 'node_id_example' # str | the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink + edit_mode = True # bool | for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded (optional) (default to True) + version = 'version_example' # str | the version. for tools with contentoption. (optional) + launch_presentation = 'launch_presentation_example' # str | launchPresentation. how the resourcelink will be embedded. valid values: window,iframe (optional) + jwt = 'jwt_example' # str | jwt for checking access in lms context (optional) + + try: + # generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. + api_response = api_instance.generate_login_initiation_form_resource_link(node_id, edit_mode=edit_mode, version=version, launch_presentation=launch_presentation, jwt=jwt) + print("The response of LTIPlatformV13Api->generate_login_initiation_form_resource_link:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->generate_login_initiation_form_resource_link: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink | + **edit_mode** | **bool**| for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded | [optional] [default to True] + **version** | **str**| the version. for tools with contentoption. | [optional] + **launch_presentation** | **str**| launchPresentation. how the resourcelink will be embedded. valid values: window,iframe | [optional] + **jwt** | **str**| jwt for checking access in lms context | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_content** +> str get_content(jwt) + +Custom edu-sharing endpoint to get content of node. + +Get content of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + jwt = 'jwt_example' # str | jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool + + try: + # Custom edu-sharing endpoint to get content of node. + api_response = api_instance.get_content(jwt) + print("The response of LTIPlatformV13Api->get_content:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->get_content: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **jwt** | **str**| jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: */*, text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **manual_registration** +> manual_registration(manual_registration_data) + +manual registration endpoint for registration of tools. + +tool registration + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.manual_registration_data import ManualRegistrationData +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + manual_registration_data = edu_sharing_client.ManualRegistrationData() # ManualRegistrationData | registrationData + + try: + # manual registration endpoint for registration of tools. + api_instance.manual_registration(manual_registration_data) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->manual_registration: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **manual_registration_data** | [**ManualRegistrationData**](ManualRegistrationData.md)| registrationData | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **open_id_registration** +> OpenIdRegistrationResult open_id_registration(body) + +registration endpoint the tool uses to register at platform. + +tool registration + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.open_id_registration_result import OpenIdRegistrationResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + body = 'body_example' # str | registrationpayload + + try: + # registration endpoint the tool uses to register at platform. + api_response = api_instance.open_id_registration(body) + print("The response of LTIPlatformV13Api->open_id_registration:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->open_id_registration: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **body** | **str**| registrationpayload | + +### Return type + +[**OpenIdRegistrationResult**](OpenIdRegistrationResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **openid_configuration** +> OpenIdConfiguration openid_configuration() + +LTIPlatform openid configuration + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.open_id_configuration import OpenIdConfiguration +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + + try: + # LTIPlatform openid configuration + api_response = api_instance.openid_configuration() + print("The response of LTIPlatformV13Api->openid_configuration:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->openid_configuration: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**OpenIdConfiguration**](OpenIdConfiguration.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_dynamic_registration** +> str start_dynamic_registration(url) + +starts lti dynamic registration. + +start dynmic registration + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + url = 'url_example' # str | url + + try: + # starts lti dynamic registration. + api_response = api_instance.start_dynamic_registration(url) + print("The response of LTIPlatformV13Api->start_dynamic_registration:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->start_dynamic_registration: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **url** | **str**| url | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/x-www-form-urlencoded + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **start_dynamic_registration_get** +> str start_dynamic_registration_get(url) + +starts lti dynamic registration. + +start dynmic registration + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + url = 'url_example' # str | url + + try: + # starts lti dynamic registration. + api_response = api_instance.start_dynamic_registration_get(url) + print("The response of LTIPlatformV13Api->start_dynamic_registration_get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->start_dynamic_registration_get: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **url** | **str**| url | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **test_token** +> str test_token(request_body) + +test creates a token signed with homeapp. + +test token. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + request_body = {'key': 'request_body_example'} # Dict[str, str] | properties + + try: + # test creates a token signed with homeapp. + api_response = api_instance.test_token(request_body) + print("The response of LTIPlatformV13Api->test_token:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->test_token: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **request_body** | [**Dict[str, str]**](str.md)| properties | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **tools** +> Tools tools() + +List of tools registered + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.tools import Tools +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIPlatformV13Api(api_client) + + try: + # List of tools registered + api_response = api_instance.tools() + print("The response of LTIPlatformV13Api->tools:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIPlatformV13Api->tools: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**Tools**](Tools.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/LTISession.md b/edu_sharing_openapi/docs/LTISession.md new file mode 100644 index 00000000..d673884c --- /dev/null +++ b/edu_sharing_openapi/docs/LTISession.md @@ -0,0 +1,36 @@ +# LTISession + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**accept_multiple** | **bool** | | [optional] +**deeplink_return_url** | **str** | | [optional] +**accept_types** | **List[str]** | | [optional] +**accept_presentation_document_targets** | **List[str]** | | [optional] +**can_confirm** | **bool** | | [optional] +**title** | **str** | | [optional] +**text** | **str** | | [optional] +**custom_content_node** | [**Node**](Node.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.lti_session import LTISession + +# TODO update the JSON string below +json = "{}" +# create an instance of LTISession from a JSON string +lti_session_instance = LTISession.from_json(json) +# print the JSON string representation of the object +print(LTISession.to_json()) + +# convert the object into a dict +lti_session_dict = lti_session_instance.to_dict() +# create an instance of LTISession from a dict +lti_session_from_dict = LTISession.from_dict(lti_session_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LTIToolConfiguration.md b/edu_sharing_openapi/docs/LTIToolConfiguration.md new file mode 100644 index 00000000..aeac85bc --- /dev/null +++ b/edu_sharing_openapi/docs/LTIToolConfiguration.md @@ -0,0 +1,34 @@ +# LTIToolConfiguration + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**version** | **str** | | [optional] +**deployment_id** | **str** | | [optional] +**target_link_uri** | **str** | | [optional] +**domain** | **str** | | [optional] +**description** | **str** | | [optional] +**claims** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.lti_tool_configuration import LTIToolConfiguration + +# TODO update the JSON string below +json = "{}" +# create an instance of LTIToolConfiguration from a JSON string +lti_tool_configuration_instance = LTIToolConfiguration.from_json(json) +# print the JSON string representation of the object +print(LTIToolConfiguration.to_json()) + +# convert the object into a dict +lti_tool_configuration_dict = lti_tool_configuration_instance.to_dict() +# create an instance of LTIToolConfiguration from a dict +lti_tool_configuration_from_dict = LTIToolConfiguration.from_dict(lti_tool_configuration_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LTIV13Api.md b/edu_sharing_openapi/docs/LTIV13Api.md new file mode 100644 index 00000000..54e3ec0d --- /dev/null +++ b/edu_sharing_openapi/docs/LTIV13Api.md @@ -0,0 +1,923 @@ +# edu_sharing_client.LTIV13Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**generate_deep_linking_response**](LTIV13Api.md#generate_deep_linking_response) | **GET** /lti/v13/generateDeepLinkingResponse | generate DeepLinkingResponse +[**get_details_snippet**](LTIV13Api.md#get_details_snippet) | **GET** /lti/v13/details/{repository}/{node} | get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow +[**jwks_uri**](LTIV13Api.md#jwks_uri) | **GET** /lti/v13/jwks | LTI - returns repository JSON Web Key Sets +[**login_initiations**](LTIV13Api.md#login_initiations) | **POST** /lti/v13/oidc/login_initiations | lti authentication process preparation. +[**login_initiations_get**](LTIV13Api.md#login_initiations_get) | **GET** /lti/v13/oidc/login_initiations | lti authentication process preparation. +[**lti**](LTIV13Api.md#lti) | **POST** /lti/v13/lti13 | lti tool redirect. +[**lti_registration_dynamic**](LTIV13Api.md#lti_registration_dynamic) | **GET** /lti/v13/registration/dynamic/{token} | LTI Dynamic Registration - Initiate registration +[**lti_registration_url**](LTIV13Api.md#lti_registration_url) | **GET** /lti/v13/registration/url | LTI Dynamic Registration - generates url for platform +[**lti_target**](LTIV13Api.md#lti_target) | **POST** /lti/v13/lti13/{nodeId} | lti tool resource link target. +[**register_by_type**](LTIV13Api.md#register_by_type) | **POST** /lti/v13/registration/{type} | register LTI platform +[**register_test**](LTIV13Api.md#register_test) | **POST** /lti/v13/registration/static | register LTI platform +[**remove_lti_registration_url**](LTIV13Api.md#remove_lti_registration_url) | **DELETE** /lti/v13/registration/url/{token} | LTI Dynamic Regitration - delete url + + +# **generate_deep_linking_response** +> NodeLTIDeepLink generate_deep_linking_response(node_ids) + +generate DeepLinkingResponse + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + node_ids = ['node_ids_example'] # List[str] | selected node id's + + try: + # generate DeepLinkingResponse + api_response = api_instance.generate_deep_linking_response(node_ids) + print("The response of LTIV13Api->generate_deep_linking_response:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->generate_deep_linking_response: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_ids** | [**List[str]**](str.md)| selected node id's | + +### Return type + +[**NodeLTIDeepLink**](NodeLTIDeepLink.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_details_snippet** +> RenderingDetailsEntry get_details_snippet(repository, node, jwt, version=version, display_mode=display_mode) + +get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow + +get rendered html snippet for a node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + jwt = 'jwt_example' # str | jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform + version = 'version_example' # str | version of node (optional) + display_mode = 'display_mode_example' # str | Rendering displayMode (optional) + + try: + # get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow + api_response = api_instance.get_details_snippet(repository, node, jwt, version=version, display_mode=display_mode) + print("The response of LTIV13Api->get_details_snippet:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->get_details_snippet: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **jwt** | **str**| jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform | + **version** | **str**| version of node | [optional] + **display_mode** | **str**| Rendering displayMode | [optional] + +### Return type + +[**RenderingDetailsEntry**](RenderingDetailsEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **jwks_uri** +> RegistrationUrl jwks_uri() + +LTI - returns repository JSON Web Key Sets + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.registration_url import RegistrationUrl +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + + try: + # LTI - returns repository JSON Web Key Sets + api_response = api_instance.jwks_uri() + print("The response of LTIV13Api->jwks_uri:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->jwks_uri: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**RegistrationUrl**](RegistrationUrl.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **login_initiations** +> str login_initiations(iss, target_link_uri, client_id=client_id, login_hint=login_hint, lti_message_hint=lti_message_hint, lti_deployment_id=lti_deployment_id) + +lti authentication process preparation. + +preflight phase. prepares lti authentication process. checks it issuer is valid + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + iss = 'iss_example' # str | Issuer of the request, will be validated + target_link_uri = 'target_link_uri_example' # str | target url of platform at the end of the flow + client_id = 'client_id_example' # str | Id of the issuer (optional) + login_hint = 'login_hint_example' # str | context information of the platform (optional) + lti_message_hint = 'lti_message_hint_example' # str | additional context information of the platform (optional) + lti_deployment_id = 'lti_deployment_id_example' # str | A can have multiple deployments in a platform (optional) + + try: + # lti authentication process preparation. + api_response = api_instance.login_initiations(iss, target_link_uri, client_id=client_id, login_hint=login_hint, lti_message_hint=lti_message_hint, lti_deployment_id=lti_deployment_id) + print("The response of LTIV13Api->login_initiations:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->login_initiations: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **iss** | **str**| Issuer of the request, will be validated | + **target_link_uri** | **str**| target url of platform at the end of the flow | + **client_id** | **str**| Id of the issuer | [optional] + **login_hint** | **str**| context information of the platform | [optional] + **lti_message_hint** | **str**| additional context information of the platform | [optional] + **lti_deployment_id** | **str**| A can have multiple deployments in a platform | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/x-www-form-urlencoded + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **login_initiations_get** +> str login_initiations_get(iss, target_link_uri, client_id=client_id, login_hint=login_hint, lti_message_hint=lti_message_hint, lti_deployment_id=lti_deployment_id) + +lti authentication process preparation. + +preflight phase. prepares lti authentication process. checks it issuer is valid + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + iss = 'iss_example' # str | Issuer of the request, will be validated + target_link_uri = 'target_link_uri_example' # str | target url of platform at the end of the flow + client_id = 'client_id_example' # str | Id of the issuer (optional) + login_hint = 'login_hint_example' # str | context information of the platform (optional) + lti_message_hint = 'lti_message_hint_example' # str | additional context information of the platform (optional) + lti_deployment_id = 'lti_deployment_id_example' # str | A can have multiple deployments in a platform (optional) + + try: + # lti authentication process preparation. + api_response = api_instance.login_initiations_get(iss, target_link_uri, client_id=client_id, login_hint=login_hint, lti_message_hint=lti_message_hint, lti_deployment_id=lti_deployment_id) + print("The response of LTIV13Api->login_initiations_get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->login_initiations_get: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **iss** | **str**| Issuer of the request, will be validated | + **target_link_uri** | **str**| target url of platform at the end of the flow | + **client_id** | **str**| Id of the issuer | [optional] + **login_hint** | **str**| context information of the platform | [optional] + **lti_message_hint** | **str**| additional context information of the platform | [optional] + **lti_deployment_id** | **str**| A can have multiple deployments in a platform | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **lti** +> str lti(id_token, state) + +lti tool redirect. + +lti tool redirect + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + id_token = 'id_token_example' # str | Issuer of the request, will be validated + state = 'state_example' # str | Issuer of the request, will be validated + + try: + # lti tool redirect. + api_response = api_instance.lti(id_token, state) + print("The response of LTIV13Api->lti:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->lti: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id_token** | **str**| Issuer of the request, will be validated | + **state** | **str**| Issuer of the request, will be validated | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/x-www-form-urlencoded + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **lti_registration_dynamic** +> str lti_registration_dynamic(openid_configuration, token, registration_token=registration_token) + +LTI Dynamic Registration - Initiate registration + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + openid_configuration = 'openid_configuration_example' # str | the endpoint to the open id configuration to be used for this registration + token = 'token_example' # str | one time usage token which is autogenerated with the url in edu-sharing admin gui. + registration_token = 'registration_token_example' # str | the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration. (optional) + + try: + # LTI Dynamic Registration - Initiate registration + api_response = api_instance.lti_registration_dynamic(openid_configuration, token, registration_token=registration_token) + print("The response of LTIV13Api->lti_registration_dynamic:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->lti_registration_dynamic: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **openid_configuration** | **str**| the endpoint to the open id configuration to be used for this registration | + **token** | **str**| one time usage token which is autogenerated with the url in edu-sharing admin gui. | + **registration_token** | **str**| the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration. | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **lti_registration_url** +> DynamicRegistrationTokens lti_registration_url(generate) + +LTI Dynamic Registration - generates url for platform + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + generate = False # bool | if to add a ne url to the list (default to False) + + try: + # LTI Dynamic Registration - generates url for platform + api_response = api_instance.lti_registration_url(generate) + print("The response of LTIV13Api->lti_registration_url:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->lti_registration_url: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **generate** | **bool**| if to add a ne url to the list | [default to False] + +### Return type + +[**DynamicRegistrationTokens**](DynamicRegistrationTokens.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **lti_target** +> str lti_target(node_id, id_token, state) + +lti tool resource link target. + +used by some platforms for direct (without oidc login_init) launch requests + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + node_id = 'node_id_example' # str | edu-sharing node id + id_token = 'id_token_example' # str | Issuer of the request, will be validated + state = 'state_example' # str | Issuer of the request, will be validated + + try: + # lti tool resource link target. + api_response = api_instance.lti_target(node_id, id_token, state) + print("The response of LTIV13Api->lti_target:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->lti_target: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| edu-sharing node id | + **id_token** | **str**| Issuer of the request, will be validated | + **state** | **str**| Issuer of the request, will be validated | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/x-www-form-urlencoded + - **Accept**: text/html + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **register_by_type** +> register_by_type(type, base_url, client_id=client_id, deployment_id=deployment_id) + +register LTI platform + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + type = 'type_example' # str | lti platform typ i.e. moodle + base_url = 'base_url_example' # str | base url i.e. http://localhost/moodle used as platformId + client_id = 'client_id_example' # str | client id (optional) + deployment_id = 'deployment_id_example' # str | deployment id (optional) + + try: + # register LTI platform + api_instance.register_by_type(type, base_url, client_id=client_id, deployment_id=deployment_id) + except Exception as e: + print("Exception when calling LTIV13Api->register_by_type: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **type** | **str**| lti platform typ i.e. moodle | + **base_url** | **str**| base url i.e. http://localhost/moodle used as platformId | + **client_id** | **str**| client id | [optional] + **deployment_id** | **str**| deployment id | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **register_test** +> register_test(platform_id, client_id, deployment_id, authentication_request_url, keyset_url, auth_token_url, key_id=key_id) + +register LTI platform + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + platform_id = 'platform_id_example' # str | the issuer + client_id = 'client_id_example' # str | client id + deployment_id = 'deployment_id_example' # str | deployment id + authentication_request_url = 'authentication_request_url_example' # str | oidc endpoint, authentication request url + keyset_url = 'keyset_url_example' # str | jwks endpoint, keyset url + auth_token_url = 'auth_token_url_example' # str | auth token url + key_id = 'key_id_example' # str | jwks key id (optional) + + try: + # register LTI platform + api_instance.register_test(platform_id, client_id, deployment_id, authentication_request_url, keyset_url, auth_token_url, key_id=key_id) + except Exception as e: + print("Exception when calling LTIV13Api->register_test: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **platform_id** | **str**| the issuer | + **client_id** | **str**| client id | + **deployment_id** | **str**| deployment id | + **authentication_request_url** | **str**| oidc endpoint, authentication request url | + **keyset_url** | **str**| jwks endpoint, keyset url | + **auth_token_url** | **str**| auth token url | + **key_id** | **str**| jwks key id | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_lti_registration_url** +> DynamicRegistrationTokens remove_lti_registration_url(token) + +LTI Dynamic Regitration - delete url + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.LTIV13Api(api_client) + token = 'token_example' # str | the token of the link you have to remove + + try: + # LTI Dynamic Regitration - delete url + api_response = api_instance.remove_lti_registration_url(token) + print("The response of LTIV13Api->remove_lti_registration_url:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling LTIV13Api->remove_lti_registration_url: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **token** | **str**| the token of the link you have to remove | + +### Return type + +[**DynamicRegistrationTokens**](DynamicRegistrationTokens.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Language.md b/edu_sharing_openapi/docs/Language.md new file mode 100644 index 00000000..49da1e44 --- /dev/null +++ b/edu_sharing_openapi/docs/Language.md @@ -0,0 +1,31 @@ +# Language + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_global** | **Dict[str, str]** | | [optional] +**current** | **Dict[str, str]** | | [optional] +**current_language** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.language import Language + +# TODO update the JSON string below +json = "{}" +# create an instance of Language from a JSON string +language_instance = Language.from_json(json) +# print the JSON string representation of the object +print(Language.to_json()) + +# convert the object into a dict +language_dict = language_instance.to_dict() +# create an instance of Language from a dict +language_from_dict = Language.from_dict(language_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Level.md b/edu_sharing_openapi/docs/Level.md new file mode 100644 index 00000000..c0a5c663 --- /dev/null +++ b/edu_sharing_openapi/docs/Level.md @@ -0,0 +1,30 @@ +# Level + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**syslog_equivalent** | **int** | | [optional] +**version2_level** | [**Level**](Level.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.level import Level + +# TODO update the JSON string below +json = "{}" +# create an instance of Level from a JSON string +level_instance = Level.from_json(json) +# print the JSON string representation of the object +print(Level.to_json()) + +# convert the object into a dict +level_dict = level_instance.to_dict() +# create an instance of Level from a dict +level_from_dict = Level.from_dict(level_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/License.md b/edu_sharing_openapi/docs/License.md new file mode 100644 index 00000000..357b1c76 --- /dev/null +++ b/edu_sharing_openapi/docs/License.md @@ -0,0 +1,30 @@ +# License + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**icon** | **str** | | [optional] +**url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.license import License + +# TODO update the JSON string below +json = "{}" +# create an instance of License from a JSON string +license_instance = License.from_json(json) +# print the JSON string representation of the object +print(License.to_json()) + +# convert the object into a dict +license_dict = license_instance.to_dict() +# create an instance of License from a dict +license_from_dict = License.from_dict(license_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LicenseAgreement.md b/edu_sharing_openapi/docs/LicenseAgreement.md new file mode 100644 index 00000000..cbcf9420 --- /dev/null +++ b/edu_sharing_openapi/docs/LicenseAgreement.md @@ -0,0 +1,29 @@ +# LicenseAgreement + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_id** | [**List[LicenseAgreementNode]**](LicenseAgreementNode.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.license_agreement import LicenseAgreement + +# TODO update the JSON string below +json = "{}" +# create an instance of LicenseAgreement from a JSON string +license_agreement_instance = LicenseAgreement.from_json(json) +# print the JSON string representation of the object +print(LicenseAgreement.to_json()) + +# convert the object into a dict +license_agreement_dict = license_agreement_instance.to_dict() +# create an instance of LicenseAgreement from a dict +license_agreement_from_dict = LicenseAgreement.from_dict(license_agreement_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LicenseAgreementNode.md b/edu_sharing_openapi/docs/LicenseAgreementNode.md new file mode 100644 index 00000000..3c2d3e12 --- /dev/null +++ b/edu_sharing_openapi/docs/LicenseAgreementNode.md @@ -0,0 +1,30 @@ +# LicenseAgreementNode + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**language** | **str** | | [optional] +**value** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode + +# TODO update the JSON string below +json = "{}" +# create an instance of LicenseAgreementNode from a JSON string +license_agreement_node_instance = LicenseAgreementNode.from_json(json) +# print the JSON string representation of the object +print(LicenseAgreementNode.to_json()) + +# convert the object into a dict +license_agreement_node_dict = license_agreement_node_instance.to_dict() +# create an instance of LicenseAgreementNode from a dict +license_agreement_node_from_dict = LicenseAgreementNode.from_dict(license_agreement_node_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Licenses.md b/edu_sharing_openapi/docs/Licenses.md new file mode 100644 index 00000000..358413e9 --- /dev/null +++ b/edu_sharing_openapi/docs/Licenses.md @@ -0,0 +1,30 @@ +# Licenses + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repository** | **Dict[str, str]** | | [optional] +**services** | **Dict[str, Dict[str, str]]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.licenses import Licenses + +# TODO update the JSON string below +json = "{}" +# create an instance of Licenses from a JSON string +licenses_instance = Licenses.from_json(json) +# print the JSON string representation of the object +print(Licenses.to_json()) + +# convert the object into a dict +licenses_dict = licenses_instance.to_dict() +# create an instance of Licenses from a dict +licenses_from_dict = Licenses.from_dict(licenses_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Location.md b/edu_sharing_openapi/docs/Location.md new file mode 100644 index 00000000..157acb58 --- /dev/null +++ b/edu_sharing_openapi/docs/Location.md @@ -0,0 +1,29 @@ +# Location + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**geo** | [**Geo**](Geo.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.location import Location + +# TODO update the JSON string below +json = "{}" +# create an instance of Location from a JSON string +location_instance = Location.from_json(json) +# print the JSON string representation of the object +print(Location.to_json()) + +# convert the object into a dict +location_dict = location_instance.to_dict() +# create an instance of Location from a dict +location_from_dict = Location.from_dict(location_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LogEntry.md b/edu_sharing_openapi/docs/LogEntry.md new file mode 100644 index 00000000..4626cfd4 --- /dev/null +++ b/edu_sharing_openapi/docs/LogEntry.md @@ -0,0 +1,32 @@ +# LogEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**class_name** | **str** | | [optional] +**level** | [**Level**](Level.md) | | [optional] +**var_date** | **int** | | [optional] +**message** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.log_entry import LogEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of LogEntry from a JSON string +log_entry_instance = LogEntry.from_json(json) +# print the JSON string representation of the object +print(LogEntry.to_json()) + +# convert the object into a dict +log_entry_dict = log_entry_instance.to_dict() +# create an instance of LogEntry from a dict +log_entry_from_dict = LogEntry.from_dict(log_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LoggerConfigResult.md b/edu_sharing_openapi/docs/LoggerConfigResult.md new file mode 100644 index 00000000..9fd1a13d --- /dev/null +++ b/edu_sharing_openapi/docs/LoggerConfigResult.md @@ -0,0 +1,32 @@ +# LoggerConfigResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**level** | **str** | | [optional] +**appender** | **List[str]** | | [optional] +**config** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.logger_config_result import LoggerConfigResult + +# TODO update the JSON string below +json = "{}" +# create an instance of LoggerConfigResult from a JSON string +logger_config_result_instance = LoggerConfigResult.from_json(json) +# print the JSON string representation of the object +print(LoggerConfigResult.to_json()) + +# convert the object into a dict +logger_config_result_dict = logger_config_result_instance.to_dict() +# create an instance of LoggerConfigResult from a dict +logger_config_result_from_dict = LoggerConfigResult.from_dict(logger_config_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Login.md b/edu_sharing_openapi/docs/Login.md new file mode 100644 index 00000000..3a3d222d --- /dev/null +++ b/edu_sharing_openapi/docs/Login.md @@ -0,0 +1,39 @@ +# Login + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**remote_authentications** | [**Dict[str, RemoteAuthDescription]**](RemoteAuthDescription.md) | | [optional] +**is_valid_login** | **bool** | | +**is_admin** | **bool** | | +**lti_session** | [**LTISession**](LTISession.md) | | [optional] +**current_scope** | **str** | | +**user_home** | **str** | | [optional] +**session_timeout** | **int** | | +**tool_permissions** | **List[str]** | | [optional] +**status_code** | **str** | | [optional] +**authority_name** | **str** | | [optional] +**is_guest** | **bool** | | + +## Example + +```python +from edu_sharing_client.models.login import Login + +# TODO update the JSON string below +json = "{}" +# create an instance of Login from a JSON string +login_instance = Login.from_json(json) +# print the JSON string representation of the object +print(Login.to_json()) + +# convert the object into a dict +login_dict = login_instance.to_dict() +# create an instance of Login from a dict +login_from_dict = Login.from_dict(login_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LoginCredentials.md b/edu_sharing_openapi/docs/LoginCredentials.md new file mode 100644 index 00000000..7b522bee --- /dev/null +++ b/edu_sharing_openapi/docs/LoginCredentials.md @@ -0,0 +1,31 @@ +# LoginCredentials + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**user_name** | **str** | | +**password** | **str** | | +**scope** | **str** | | + +## Example + +```python +from edu_sharing_client.models.login_credentials import LoginCredentials + +# TODO update the JSON string below +json = "{}" +# create an instance of LoginCredentials from a JSON string +login_credentials_instance = LoginCredentials.from_json(json) +# print the JSON string representation of the object +print(LoginCredentials.to_json()) + +# convert the object into a dict +login_credentials_dict = login_credentials_instance.to_dict() +# create an instance of LoginCredentials from a dict +login_credentials_from_dict = LoginCredentials.from_dict(login_credentials_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/LogoutInfo.md b/edu_sharing_openapi/docs/LogoutInfo.md new file mode 100644 index 00000000..e3300c1d --- /dev/null +++ b/edu_sharing_openapi/docs/LogoutInfo.md @@ -0,0 +1,32 @@ +# LogoutInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] +**destroy_session** | **bool** | | [optional] +**ajax** | **bool** | | [optional] +**next** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.logout_info import LogoutInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of LogoutInfo from a JSON string +logout_info_instance = LogoutInfo.from_json(json) +# print the JSON string representation of the object +print(LogoutInfo.to_json()) + +# convert the object into a dict +logout_info_dict = logout_info_instance.to_dict() +# create an instance of LogoutInfo from a dict +logout_info_from_dict = LogoutInfo.from_dict(logout_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MDSV1Api.md b/edu_sharing_openapi/docs/MDSV1Api.md new file mode 100644 index 00000000..7d57fa89 --- /dev/null +++ b/edu_sharing_openapi/docs/MDSV1Api.md @@ -0,0 +1,403 @@ +# edu_sharing_client.MDSV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_metadata_set**](MDSV1Api.md#get_metadata_set) | **GET** /mds/v1/metadatasets/{repository}/{metadataset} | Get metadata set new. +[**get_metadata_sets**](MDSV1Api.md#get_metadata_sets) | **GET** /mds/v1/metadatasets/{repository} | Get metadata sets V2 of repository. +[**get_values**](MDSV1Api.md#get_values) | **POST** /mds/v1/metadatasets/{repository}/{metadataset}/values | Get values. +[**get_values4_keys**](MDSV1Api.md#get_values4_keys) | **POST** /mds/v1/metadatasets/{repository}/{metadataset}/values_for_keys | Get values for keys. +[**suggest_value**](MDSV1Api.md#suggest_value) | **POST** /mds/v1/metadatasets/{repository}/{metadataset}/values/{widget}/suggest | Suggest a value. + + +# **get_metadata_set** +> Mds get_metadata_set(repository, metadataset) + +Get metadata set new. + +Get metadata set new. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mds import Mds +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MDSV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + + try: + # Get metadata set new. + api_response = api_instance.get_metadata_set(repository, metadataset) + print("The response of MDSV1Api->get_metadata_set:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MDSV1Api->get_metadata_set: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + +### Return type + +[**Mds**](Mds.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_metadata_sets** +> MdsEntries get_metadata_sets(repository) + +Get metadata sets V2 of repository. + +Get metadata sets V2 of repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mds_entries import MdsEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MDSV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + + try: + # Get metadata sets V2 of repository. + api_response = api_instance.get_metadata_sets(repository) + print("The response of MDSV1Api->get_metadata_sets:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MDSV1Api->get_metadata_sets: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + +### Return type + +[**MdsEntries**](MdsEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_values** +> Suggestions get_values(repository, metadataset, suggestion_param=suggestion_param) + +Get values. + +Get values. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.suggestion_param import SuggestionParam +from edu_sharing_client.models.suggestions import Suggestions +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MDSV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + suggestion_param = edu_sharing_client.SuggestionParam() # SuggestionParam | suggestionParam (optional) + + try: + # Get values. + api_response = api_instance.get_values(repository, metadataset, suggestion_param=suggestion_param) + print("The response of MDSV1Api->get_values:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MDSV1Api->get_values: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **suggestion_param** | [**SuggestionParam**](SuggestionParam.md)| suggestionParam | [optional] + +### Return type + +[**Suggestions**](Suggestions.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_values4_keys** +> Suggestions get_values4_keys(repository, metadataset, query=query, var_property=var_property, request_body=request_body) + +Get values for keys. + +Get values for keys. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.suggestions import Suggestions +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MDSV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + query = 'query_example' # str | query (optional) + var_property = 'var_property_example' # str | property (optional) + request_body = ['request_body_example'] # List[str] | keys (optional) + + try: + # Get values for keys. + api_response = api_instance.get_values4_keys(repository, metadataset, query=query, var_property=var_property, request_body=request_body) + print("The response of MDSV1Api->get_values4_keys:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MDSV1Api->get_values4_keys: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **query** | **str**| query | [optional] + **var_property** | **str**| property | [optional] + **request_body** | [**List[str]**](str.md)| keys | [optional] + +### Return type + +[**Suggestions**](Suggestions.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **suggest_value** +> MdsValue suggest_value(repository, metadataset, widget, caption, parent=parent, node_id=node_id) + +Suggest a value. + +Suggest a new value for a given metadataset and widget. The suggestion will be forwarded to the corresponding person in the metadataset file + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mds_value import MdsValue +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MDSV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + widget = 'widget_example' # str | widget id, e.g. cm:name + caption = 'caption_example' # str | caption of the new entry (id will be auto-generated) + parent = 'parent_example' # str | parent id of the new entry (might be null) (optional) + node_id = ['node_id_example'] # List[str] | One or more nodes this suggestion relates to (optional, only for extended mail data) (optional) + + try: + # Suggest a value. + api_response = api_instance.suggest_value(repository, metadataset, widget, caption, parent=parent, node_id=node_id) + print("The response of MDSV1Api->suggest_value:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MDSV1Api->suggest_value: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **widget** | **str**| widget id, e.g. cm:name | + **caption** | **str**| caption of the new entry (id will be auto-generated) | + **parent** | **str**| parent id of the new entry (might be null) | [optional] + **node_id** | [**List[str]**](str.md)| One or more nodes this suggestion relates to (optional, only for extended mail data) | [optional] + +### Return type + +[**MdsValue**](MdsValue.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/MEDIACENTERV1Api.md b/edu_sharing_openapi/docs/MEDIACENTERV1Api.md new file mode 100644 index 00000000..83b40660 --- /dev/null +++ b/edu_sharing_openapi/docs/MEDIACENTERV1Api.md @@ -0,0 +1,942 @@ +# edu_sharing_client.MEDIACENTERV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_mediacenter_group**](MEDIACENTERV1Api.md#add_mediacenter_group) | **PUT** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group} | add a group that is managed by the given mediacenter +[**create_mediacenter**](MEDIACENTERV1Api.md#create_mediacenter) | **POST** /mediacenter/v1/mediacenter/{repository}/{mediacenter} | create new mediacenter in repository. +[**delete_mediacenter**](MEDIACENTERV1Api.md#delete_mediacenter) | **DELETE** /mediacenter/v1/mediacenter/{repository}/{mediacenter} | delete a mediacenter group and it's admin group and proxy group +[**edit_mediacenter**](MEDIACENTERV1Api.md#edit_mediacenter) | **PUT** /mediacenter/v1/mediacenter/{repository}/{mediacenter} | edit a mediacenter in repository. +[**export_mediacenter_licensed_nodes**](MEDIACENTERV1Api.md#export_mediacenter_licensed_nodes) | **POST** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses/export | get nodes that are licensed by the given mediacenter +[**get_mediacenter_groups**](MEDIACENTERV1Api.md#get_mediacenter_groups) | **GET** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages | get groups that are managed by the given mediacenter +[**get_mediacenter_licensed_nodes**](MEDIACENTERV1Api.md#get_mediacenter_licensed_nodes) | **POST** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses | get nodes that are licensed by the given mediacenter +[**get_mediacenters**](MEDIACENTERV1Api.md#get_mediacenters) | **GET** /mediacenter/v1/mediacenter/{repository} | get mediacenters in the repository. +[**import_mc_org_connections**](MEDIACENTERV1Api.md#import_mc_org_connections) | **POST** /mediacenter/v1/import/mc_org | Import Mediacenter Organisation Connection +[**import_mediacenters**](MEDIACENTERV1Api.md#import_mediacenters) | **POST** /mediacenter/v1/import/mediacenters | Import mediacenters +[**import_organisations**](MEDIACENTERV1Api.md#import_organisations) | **POST** /mediacenter/v1/import/organisations | Import Organisations +[**remove_mediacenter_group**](MEDIACENTERV1Api.md#remove_mediacenter_group) | **DELETE** /mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group} | delete a group that is managed by the given mediacenter + + +# **add_mediacenter_group** +> str add_mediacenter_group(repository, mediacenter, group) + +add a group that is managed by the given mediacenter + +although not restricted, it is recommended that the group is an edu-sharing organization (admin rights are required) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | authorityName of the mediacenter that should manage the group + group = 'group_example' # str | authorityName of the group that should be managed by that mediacenter + + try: + # add a group that is managed by the given mediacenter + api_response = api_instance.add_mediacenter_group(repository, mediacenter, group) + print("The response of MEDIACENTERV1Api->add_mediacenter_group:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->add_mediacenter_group: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| authorityName of the mediacenter that should manage the group | + **group** | **str**| authorityName of the group that should be managed by that mediacenter | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_mediacenter** +> Mediacenter create_mediacenter(repository, mediacenter, profile=profile) + +create new mediacenter in repository. + +admin rights are required. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mediacenter import Mediacenter +from edu_sharing_client.models.profile import Profile +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | mediacenter name + profile = edu_sharing_client.Profile() # Profile | (optional) + + try: + # create new mediacenter in repository. + api_response = api_instance.create_mediacenter(repository, mediacenter, profile=profile) + print("The response of MEDIACENTERV1Api->create_mediacenter:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->create_mediacenter: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| mediacenter name | + **profile** | [**Profile**](Profile.md)| | [optional] + +### Return type + +[**Mediacenter**](Mediacenter.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_mediacenter** +> delete_mediacenter(repository, mediacenter) + +delete a mediacenter group and it's admin group and proxy group + +admin rights are required. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | authorityName of the mediacenter that should manage the group + + try: + # delete a mediacenter group and it's admin group and proxy group + api_instance.delete_mediacenter(repository, mediacenter) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->delete_mediacenter: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| authorityName of the mediacenter that should manage the group | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **edit_mediacenter** +> Mediacenter edit_mediacenter(repository, mediacenter, profile=profile) + +edit a mediacenter in repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mediacenter import Mediacenter +from edu_sharing_client.models.profile import Profile +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | mediacenter name + profile = edu_sharing_client.Profile() # Profile | (optional) + + try: + # edit a mediacenter in repository. + api_response = api_instance.edit_mediacenter(repository, mediacenter, profile=profile) + print("The response of MEDIACENTERV1Api->edit_mediacenter:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->edit_mediacenter: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| mediacenter name | + **profile** | [**Profile**](Profile.md)| | [optional] + +### Return type + +[**Mediacenter**](Mediacenter.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **export_mediacenter_licensed_nodes** +> str export_mediacenter_licensed_nodes(repository, mediacenter, search_parameters, sort_properties=sort_properties, sort_ascending=sort_ascending, properties=properties) + +get nodes that are licensed by the given mediacenter + +e.g. cm:name + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | authorityName of the mediacenter that licenses nodes + search_parameters = edu_sharing_client.SearchParameters() # SearchParameters | search parameters + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + properties = ['properties_example'] # List[str] | properties to fetch, use parent:: to include parent property values (optional) + + try: + # get nodes that are licensed by the given mediacenter + api_response = api_instance.export_mediacenter_licensed_nodes(repository, mediacenter, search_parameters, sort_properties=sort_properties, sort_ascending=sort_ascending, properties=properties) + print("The response of MEDIACENTERV1Api->export_mediacenter_licensed_nodes:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->export_mediacenter_licensed_nodes: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| authorityName of the mediacenter that licenses nodes | + **search_parameters** | [**SearchParameters**](SearchParameters.md)| search parameters | + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **properties** | [**List[str]**](str.md)| properties to fetch, use parent::<property> to include parent property values | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_mediacenter_groups** +> str get_mediacenter_groups(repository, mediacenter) + +get groups that are managed by the given mediacenter + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | authorityName of the mediacenter that should manage the group + + try: + # get groups that are managed by the given mediacenter + api_response = api_instance.get_mediacenter_groups(repository, mediacenter) + print("The response of MEDIACENTERV1Api->get_mediacenter_groups:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->get_mediacenter_groups: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| authorityName of the mediacenter that should manage the group | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_mediacenter_licensed_nodes** +> str get_mediacenter_licensed_nodes(repository, mediacenter, searchword, search_parameters, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +get nodes that are licensed by the given mediacenter + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | authorityName of the mediacenter that licenses nodes + searchword = 'searchword_example' # str | searchword of licensed nodes + search_parameters = edu_sharing_client.SearchParameters() # SearchParameters | search parameters + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # get nodes that are licensed by the given mediacenter + api_response = api_instance.get_mediacenter_licensed_nodes(repository, mediacenter, searchword, search_parameters, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of MEDIACENTERV1Api->get_mediacenter_licensed_nodes:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->get_mediacenter_licensed_nodes: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| authorityName of the mediacenter that licenses nodes | + **searchword** | **str**| searchword of licensed nodes | + **search_parameters** | [**SearchParameters**](SearchParameters.md)| search parameters | + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_mediacenters** +> str get_mediacenters(repository) + +get mediacenters in the repository. + +Only shows the one available/managing the current user (only admin can access all) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + + try: + # get mediacenters in the repository. + api_response = api_instance.get_mediacenters(repository) + print("The response of MEDIACENTERV1Api->get_mediacenters:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->get_mediacenters: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_mc_org_connections** +> McOrgConnectResult import_mc_org_connections(mc_orgs, remove_schools_from_mc=remove_schools_from_mc) + +Import Mediacenter Organisation Connection + +Import Mediacenter Organisation Connection. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + mc_orgs = None # object | Mediacenter Organisation Connection csv to import + remove_schools_from_mc = False # bool | removeSchoolsFromMC (optional) (default to False) + + try: + # Import Mediacenter Organisation Connection + api_response = api_instance.import_mc_org_connections(mc_orgs, remove_schools_from_mc=remove_schools_from_mc) + print("The response of MEDIACENTERV1Api->import_mc_org_connections:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->import_mc_org_connections: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **mc_orgs** | [**object**](object.md)| Mediacenter Organisation Connection csv to import | + **remove_schools_from_mc** | **bool**| removeSchoolsFromMC | [optional] [default to False] + +### Return type + +[**McOrgConnectResult**](McOrgConnectResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_mediacenters** +> MediacentersImportResult import_mediacenters(mediacenters) + +Import mediacenters + +Import mediacenters. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + mediacenters = None # object | Mediacenters csv to import + + try: + # Import mediacenters + api_response = api_instance.import_mediacenters(mediacenters) + print("The response of MEDIACENTERV1Api->import_mediacenters:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->import_mediacenters: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **mediacenters** | [**object**](object.md)| Mediacenters csv to import | + +### Return type + +[**MediacentersImportResult**](MediacentersImportResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_organisations** +> OrganisationsImportResult import_organisations(organisations) + +Import Organisations + +Import Organisations. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + organisations = None # object | Organisations csv to import + + try: + # Import Organisations + api_response = api_instance.import_organisations(organisations) + print("The response of MEDIACENTERV1Api->import_organisations:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->import_organisations: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **organisations** | [**object**](object.md)| Organisations csv to import | + +### Return type + +[**OrganisationsImportResult**](OrganisationsImportResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_mediacenter_group** +> str remove_mediacenter_group(repository, mediacenter, group) + +delete a group that is managed by the given mediacenter + +admin rights are required. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.MEDIACENTERV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + mediacenter = 'mediacenter_example' # str | authorityName of the mediacenter that should manage the group + group = 'group_example' # str | authorityName of the group that should not longer be managed by that mediacenter + + try: + # delete a group that is managed by the given mediacenter + api_response = api_instance.remove_mediacenter_group(repository, mediacenter, group) + print("The response of MEDIACENTERV1Api->remove_mediacenter_group:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling MEDIACENTERV1Api->remove_mediacenter_group: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **mediacenter** | **str**| authorityName of the mediacenter that should manage the group | + **group** | **str**| authorityName of the group that should not longer be managed by that mediacenter | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Mainnav.md b/edu_sharing_openapi/docs/Mainnav.md new file mode 100644 index 00000000..933f9437 --- /dev/null +++ b/edu_sharing_openapi/docs/Mainnav.md @@ -0,0 +1,30 @@ +# Mainnav + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**icon** | [**Icon**](Icon.md) | | [optional] +**main_menu_style** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mainnav import Mainnav + +# TODO update the JSON string below +json = "{}" +# create an instance of Mainnav from a JSON string +mainnav_instance = Mainnav.from_json(json) +# print the JSON string representation of the object +print(Mainnav.to_json()) + +# convert the object into a dict +mainnav_dict = mainnav_instance.to_dict() +# create an instance of Mainnav from a dict +mainnav_from_dict = Mainnav.from_dict(mainnav_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ManualRegistrationData.md b/edu_sharing_openapi/docs/ManualRegistrationData.md new file mode 100644 index 00000000..f992426b --- /dev/null +++ b/edu_sharing_openapi/docs/ManualRegistrationData.md @@ -0,0 +1,39 @@ +# ManualRegistrationData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**tool_name** | **str** | | [optional] +**tool_url** | **str** | | [optional] +**tool_description** | **str** | | [optional] +**keyset_url** | **str** | | [optional] +**login_initiation_url** | **str** | | [optional] +**redirection_urls** | **List[str]** | | [optional] +**custom_parameters** | **List[str]** | JSON Object where each value is a string. Custom parameters to be included in each launch to this tool. If a custom parameter is also defined at the message level, the message level value takes precedence. The value of the custom parameters may be substitution parameters as described in the LTI Core [LTI-13] specification. | [optional] +**logo_url** | **str** | | [optional] +**target_link_uri** | **str** | The default target link uri to use unless defined otherwise in the message or link definition | +**target_link_uri_deep_link** | **str** | The target link uri to use for DeepLing Message | [optional] +**client_name** | **str** | Name of the Tool to be presented to the End-User. Localized representations may be included as described in Section 2.1 of the [OIDC-Reg] specification. | + +## Example + +```python +from edu_sharing_client.models.manual_registration_data import ManualRegistrationData + +# TODO update the JSON string below +json = "{}" +# create an instance of ManualRegistrationData from a JSON string +manual_registration_data_instance = ManualRegistrationData.from_json(json) +# print the JSON string representation of the object +print(ManualRegistrationData.to_json()) + +# convert the object into a dict +manual_registration_data_dict = manual_registration_data_instance.to_dict() +# create an instance of ManualRegistrationData from a dict +manual_registration_data_from_dict = ManualRegistrationData.from_dict(manual_registration_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/McOrgConnectResult.md b/edu_sharing_openapi/docs/McOrgConnectResult.md new file mode 100644 index 00000000..fd8d6824 --- /dev/null +++ b/edu_sharing_openapi/docs/McOrgConnectResult.md @@ -0,0 +1,29 @@ +# McOrgConnectResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**rows** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult + +# TODO update the JSON string below +json = "{}" +# create an instance of McOrgConnectResult from a JSON string +mc_org_connect_result_instance = McOrgConnectResult.from_json(json) +# print the JSON string representation of the object +print(McOrgConnectResult.to_json()) + +# convert the object into a dict +mc_org_connect_result_dict = mc_org_connect_result_instance.to_dict() +# create an instance of McOrgConnectResult from a dict +mc_org_connect_result_from_dict = McOrgConnectResult.from_dict(mc_org_connect_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Mds.md b/edu_sharing_openapi/docs/Mds.md new file mode 100644 index 00000000..9cd5ef64 --- /dev/null +++ b/edu_sharing_openapi/docs/Mds.md @@ -0,0 +1,35 @@ +# Mds + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**create** | [**Create**](Create.md) | | [optional] +**widgets** | [**List[MdsWidget]**](MdsWidget.md) | | +**views** | [**List[MdsView]**](MdsView.md) | | +**groups** | [**List[MdsGroup]**](MdsGroup.md) | | +**lists** | [**List[MdsList]**](MdsList.md) | | +**sorts** | [**List[MdsSort]**](MdsSort.md) | | + +## Example + +```python +from edu_sharing_client.models.mds import Mds + +# TODO update the JSON string below +json = "{}" +# create an instance of Mds from a JSON string +mds_instance = Mds.from_json(json) +# print the JSON string representation of the object +print(Mds.to_json()) + +# convert the object into a dict +mds_dict = mds_instance.to_dict() +# create an instance of Mds from a dict +mds_from_dict = Mds.from_dict(mds_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsColumn.md b/edu_sharing_openapi/docs/MdsColumn.md new file mode 100644 index 00000000..188a7e31 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsColumn.md @@ -0,0 +1,31 @@ +# MdsColumn + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**format** | **str** | | [optional] +**show_default** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_column import MdsColumn + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsColumn from a JSON string +mds_column_instance = MdsColumn.from_json(json) +# print the JSON string representation of the object +print(MdsColumn.to_json()) + +# convert the object into a dict +mds_column_dict = mds_column_instance.to_dict() +# create an instance of MdsColumn from a dict +mds_column_from_dict = MdsColumn.from_dict(mds_column_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsEntries.md b/edu_sharing_openapi/docs/MdsEntries.md new file mode 100644 index 00000000..4c460776 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsEntries.md @@ -0,0 +1,29 @@ +# MdsEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**metadatasets** | [**List[MetadataSetInfo]**](MetadataSetInfo.md) | | + +## Example + +```python +from edu_sharing_client.models.mds_entries import MdsEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsEntries from a JSON string +mds_entries_instance = MdsEntries.from_json(json) +# print the JSON string representation of the object +print(MdsEntries.to_json()) + +# convert the object into a dict +mds_entries_dict = mds_entries_instance.to_dict() +# create an instance of MdsEntries from a dict +mds_entries_from_dict = MdsEntries.from_dict(mds_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsGroup.md b/edu_sharing_openapi/docs/MdsGroup.md new file mode 100644 index 00000000..efb1c1c9 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsGroup.md @@ -0,0 +1,31 @@ +# MdsGroup + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**rendering** | **str** | | [optional] +**id** | **str** | | [optional] +**views** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_group import MdsGroup + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsGroup from a JSON string +mds_group_instance = MdsGroup.from_json(json) +# print the JSON string representation of the object +print(MdsGroup.to_json()) + +# convert the object into a dict +mds_group_dict = mds_group_instance.to_dict() +# create an instance of MdsGroup from a dict +mds_group_from_dict = MdsGroup.from_dict(mds_group_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsList.md b/edu_sharing_openapi/docs/MdsList.md new file mode 100644 index 00000000..9e28ecb6 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsList.md @@ -0,0 +1,30 @@ +# MdsList + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**columns** | [**List[MdsColumn]**](MdsColumn.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_list import MdsList + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsList from a JSON string +mds_list_instance = MdsList.from_json(json) +# print the JSON string representation of the object +print(MdsList.to_json()) + +# convert the object into a dict +mds_list_dict = mds_list_instance.to_dict() +# create an instance of MdsList from a dict +mds_list_from_dict = MdsList.from_dict(mds_list_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsQueryCriteria.md b/edu_sharing_openapi/docs/MdsQueryCriteria.md new file mode 100644 index 00000000..9276388d --- /dev/null +++ b/edu_sharing_openapi/docs/MdsQueryCriteria.md @@ -0,0 +1,30 @@ +# MdsQueryCriteria + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_property** | **str** | | +**values** | **List[str]** | | + +## Example + +```python +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsQueryCriteria from a JSON string +mds_query_criteria_instance = MdsQueryCriteria.from_json(json) +# print the JSON string representation of the object +print(MdsQueryCriteria.to_json()) + +# convert the object into a dict +mds_query_criteria_dict = mds_query_criteria_instance.to_dict() +# create an instance of MdsQueryCriteria from a dict +mds_query_criteria_from_dict = MdsQueryCriteria.from_dict(mds_query_criteria_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsSort.md b/edu_sharing_openapi/docs/MdsSort.md new file mode 100644 index 00000000..7ff2cfb2 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsSort.md @@ -0,0 +1,31 @@ +# MdsSort + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**columns** | [**List[MdsSortColumn]**](MdsSortColumn.md) | | [optional] +**default** | [**MdsSortDefault**](MdsSortDefault.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_sort import MdsSort + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsSort from a JSON string +mds_sort_instance = MdsSort.from_json(json) +# print the JSON string representation of the object +print(MdsSort.to_json()) + +# convert the object into a dict +mds_sort_dict = mds_sort_instance.to_dict() +# create an instance of MdsSort from a dict +mds_sort_from_dict = MdsSort.from_dict(mds_sort_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsSortColumn.md b/edu_sharing_openapi/docs/MdsSortColumn.md new file mode 100644 index 00000000..61721211 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsSortColumn.md @@ -0,0 +1,30 @@ +# MdsSortColumn + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**mode** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_sort_column import MdsSortColumn + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsSortColumn from a JSON string +mds_sort_column_instance = MdsSortColumn.from_json(json) +# print the JSON string representation of the object +print(MdsSortColumn.to_json()) + +# convert the object into a dict +mds_sort_column_dict = mds_sort_column_instance.to_dict() +# create an instance of MdsSortColumn from a dict +mds_sort_column_from_dict = MdsSortColumn.from_dict(mds_sort_column_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsSortDefault.md b/edu_sharing_openapi/docs/MdsSortDefault.md new file mode 100644 index 00000000..18d42b9d --- /dev/null +++ b/edu_sharing_openapi/docs/MdsSortDefault.md @@ -0,0 +1,30 @@ +# MdsSortDefault + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**sort_by** | **str** | | +**sort_ascending** | **bool** | | + +## Example + +```python +from edu_sharing_client.models.mds_sort_default import MdsSortDefault + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsSortDefault from a JSON string +mds_sort_default_instance = MdsSortDefault.from_json(json) +# print the JSON string representation of the object +print(MdsSortDefault.to_json()) + +# convert the object into a dict +mds_sort_default_dict = mds_sort_default_instance.to_dict() +# create an instance of MdsSortDefault from a dict +mds_sort_default_from_dict = MdsSortDefault.from_dict(mds_sort_default_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsSubwidget.md b/edu_sharing_openapi/docs/MdsSubwidget.md new file mode 100644 index 00000000..19fc3fc2 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsSubwidget.md @@ -0,0 +1,29 @@ +# MdsSubwidget + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_subwidget import MdsSubwidget + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsSubwidget from a JSON string +mds_subwidget_instance = MdsSubwidget.from_json(json) +# print the JSON string representation of the object +print(MdsSubwidget.to_json()) + +# convert the object into a dict +mds_subwidget_dict = mds_subwidget_instance.to_dict() +# create an instance of MdsSubwidget from a dict +mds_subwidget_from_dict = MdsSubwidget.from_dict(mds_subwidget_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsValue.md b/edu_sharing_openapi/docs/MdsValue.md new file mode 100644 index 00000000..a166c0b1 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsValue.md @@ -0,0 +1,34 @@ +# MdsValue + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**caption** | **str** | | [optional] +**description** | **str** | | [optional] +**parent** | **str** | | [optional] +**url** | **str** | | [optional] +**alternative_ids** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_value import MdsValue + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsValue from a JSON string +mds_value_instance = MdsValue.from_json(json) +# print the JSON string representation of the object +print(MdsValue.to_json()) + +# convert the object into a dict +mds_value_dict = mds_value_instance.to_dict() +# create an instance of MdsValue from a dict +mds_value_from_dict = MdsValue.from_dict(mds_value_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsView.md b/edu_sharing_openapi/docs/MdsView.md new file mode 100644 index 00000000..8b1d723f --- /dev/null +++ b/edu_sharing_openapi/docs/MdsView.md @@ -0,0 +1,35 @@ +# MdsView + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**caption** | **str** | | [optional] +**icon** | **str** | | [optional] +**html** | **str** | | [optional] +**rel** | **str** | | [optional] +**hide_if_empty** | **bool** | | [optional] +**is_extended** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_view import MdsView + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsView from a JSON string +mds_view_instance = MdsView.from_json(json) +# print the JSON string representation of the object +print(MdsView.to_json()) + +# convert the object into a dict +mds_view_dict = mds_view_instance.to_dict() +# create an instance of MdsView from a dict +mds_view_from_dict = MdsView.from_dict(mds_view_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsWidget.md b/edu_sharing_openapi/docs/MdsWidget.md new file mode 100644 index 00000000..bcb91366 --- /dev/null +++ b/edu_sharing_openapi/docs/MdsWidget.md @@ -0,0 +1,62 @@ +# MdsWidget + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ids** | **Dict[str, str]** | | [optional] +**link** | **str** | | [optional] +**configuration** | **str** | | [optional] +**format** | **str** | | [optional] +**allow_valuespace_suggestions** | **bool** | | [optional] +**count_defaultvalue_as_filter** | **bool** | When true, a set defaultvalue will still trigger the search to show an active filter. When false (default), the defaultvalue will be shown as if no filter is active | [optional] +**condition** | [**MdsWidgetCondition**](MdsWidgetCondition.md) | | [optional] +**maxlength** | **int** | | [optional] +**interaction_type** | **str** | | [optional] +**filter_mode** | **str** | | [optional] +**expandable** | **str** | | [optional] +**subwidgets** | [**List[MdsSubwidget]**](MdsSubwidget.md) | | [optional] +**required** | **str** | | [optional] +**id** | **str** | | [optional] +**caption** | **str** | | [optional] +**bottom_caption** | **str** | | [optional] +**icon** | **str** | | [optional] +**type** | **str** | | [optional] +**template** | **str** | | [optional] +**has_values** | **bool** | | [optional] +**values** | [**List[MdsValue]**](MdsValue.md) | | [optional] +**placeholder** | **str** | | [optional] +**unit** | **str** | | [optional] +**min** | **int** | | [optional] +**max** | **int** | | [optional] +**default_min** | **int** | | [optional] +**default_max** | **int** | | [optional] +**step** | **int** | | [optional] +**is_required** | **str** | | [optional] +**allowempty** | **bool** | | [optional] +**defaultvalue** | **str** | | [optional] +**is_extended** | **bool** | | [optional] +**is_searchable** | **bool** | | [optional] +**hide_if_empty** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_widget import MdsWidget + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsWidget from a JSON string +mds_widget_instance = MdsWidget.from_json(json) +# print the JSON string representation of the object +print(MdsWidget.to_json()) + +# convert the object into a dict +mds_widget_dict = mds_widget_instance.to_dict() +# create an instance of MdsWidget from a dict +mds_widget_from_dict = MdsWidget.from_dict(mds_widget_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MdsWidgetCondition.md b/edu_sharing_openapi/docs/MdsWidgetCondition.md new file mode 100644 index 00000000..004c3c2f --- /dev/null +++ b/edu_sharing_openapi/docs/MdsWidgetCondition.md @@ -0,0 +1,33 @@ +# MdsWidgetCondition + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | **str** | | +**value** | **str** | | +**negate** | **bool** | | +**dynamic** | **bool** | | +**pattern** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mds_widget_condition import MdsWidgetCondition + +# TODO update the JSON string below +json = "{}" +# create an instance of MdsWidgetCondition from a JSON string +mds_widget_condition_instance = MdsWidgetCondition.from_json(json) +# print the JSON string representation of the object +print(MdsWidgetCondition.to_json()) + +# convert the object into a dict +mds_widget_condition_dict = mds_widget_condition_instance.to_dict() +# create an instance of MdsWidgetCondition from a dict +mds_widget_condition_from_dict = MdsWidgetCondition.from_dict(mds_widget_condition_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Mediacenter.md b/edu_sharing_openapi/docs/Mediacenter.md new file mode 100644 index 00000000..d941fc48 --- /dev/null +++ b/edu_sharing_openapi/docs/Mediacenter.md @@ -0,0 +1,39 @@ +# Mediacenter + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**editable** | **bool** | | [optional] +**signup_method** | **str** | | [optional] +**ref** | [**NodeRef**](NodeRef.md) | | [optional] +**aspects** | **List[str]** | | [optional] +**organizations** | [**List[Organization]**](Organization.md) | | [optional] +**authority_name** | **str** | | +**authority_type** | **str** | | [optional] +**group_name** | **str** | | [optional] +**profile** | [**GroupProfile**](GroupProfile.md) | | [optional] +**administration_access** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mediacenter import Mediacenter + +# TODO update the JSON string below +json = "{}" +# create an instance of Mediacenter from a JSON string +mediacenter_instance = Mediacenter.from_json(json) +# print the JSON string representation of the object +print(Mediacenter.to_json()) + +# convert the object into a dict +mediacenter_dict = mediacenter_instance.to_dict() +# create an instance of Mediacenter from a dict +mediacenter_from_dict = Mediacenter.from_dict(mediacenter_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MediacenterProfileExtension.md b/edu_sharing_openapi/docs/MediacenterProfileExtension.md new file mode 100644 index 00000000..0c66bb92 --- /dev/null +++ b/edu_sharing_openapi/docs/MediacenterProfileExtension.md @@ -0,0 +1,34 @@ +# MediacenterProfileExtension + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**location** | **str** | | [optional] +**district_abbreviation** | **str** | | [optional] +**main_url** | **str** | | [optional] +**catalogs** | [**List[Catalog]**](Catalog.md) | | [optional] +**content_status** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension + +# TODO update the JSON string below +json = "{}" +# create an instance of MediacenterProfileExtension from a JSON string +mediacenter_profile_extension_instance = MediacenterProfileExtension.from_json(json) +# print the JSON string representation of the object +print(MediacenterProfileExtension.to_json()) + +# convert the object into a dict +mediacenter_profile_extension_dict = mediacenter_profile_extension_instance.to_dict() +# create an instance of MediacenterProfileExtension from a dict +mediacenter_profile_extension_from_dict = MediacenterProfileExtension.from_dict(mediacenter_profile_extension_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MediacentersImportResult.md b/edu_sharing_openapi/docs/MediacentersImportResult.md new file mode 100644 index 00000000..d1e359b2 --- /dev/null +++ b/edu_sharing_openapi/docs/MediacentersImportResult.md @@ -0,0 +1,29 @@ +# MediacentersImportResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**rows** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult + +# TODO update the JSON string below +json = "{}" +# create an instance of MediacentersImportResult from a JSON string +mediacenters_import_result_instance = MediacentersImportResult.from_json(json) +# print the JSON string representation of the object +print(MediacentersImportResult.to_json()) + +# convert the object into a dict +mediacenters_import_result_dict = mediacenters_import_result_instance.to_dict() +# create an instance of MediacentersImportResult from a dict +mediacenters_import_result_from_dict = MediacentersImportResult.from_dict(mediacenters_import_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MenuEntry.md b/edu_sharing_openapi/docs/MenuEntry.md new file mode 100644 index 00000000..ede12972 --- /dev/null +++ b/edu_sharing_openapi/docs/MenuEntry.md @@ -0,0 +1,40 @@ +# MenuEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**position** | **int** | | [optional] +**icon** | **str** | | [optional] +**name** | **str** | | [optional] +**url** | **str** | | [optional] +**is_disabled** | **bool** | | [optional] +**open_in_new** | **bool** | | [optional] +**is_separate** | **bool** | | [optional] +**is_separate_bottom** | **bool** | | [optional] +**only_desktop** | **bool** | | [optional] +**only_web** | **bool** | | [optional] +**path** | **str** | | [optional] +**scope** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.menu_entry import MenuEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of MenuEntry from a JSON string +menu_entry_instance = MenuEntry.from_json(json) +# print the JSON string representation of the object +print(MenuEntry.to_json()) + +# convert the object into a dict +menu_entry_dict = menu_entry_instance.to_dict() +# create an instance of MenuEntry from a dict +menu_entry_from_dict = MenuEntry.from_dict(menu_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Message.md b/edu_sharing_openapi/docs/Message.md new file mode 100644 index 00000000..1c768db7 --- /dev/null +++ b/edu_sharing_openapi/docs/Message.md @@ -0,0 +1,30 @@ +# Message + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | **str** | | [optional] +**placements** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.message import Message + +# TODO update the JSON string below +json = "{}" +# create an instance of Message from a JSON string +message_instance = Message.from_json(json) +# print the JSON string representation of the object +print(Message.to_json()) + +# convert the object into a dict +message_dict = message_instance.to_dict() +# create an instance of Message from a dict +message_from_dict = Message.from_dict(message_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MetadataSetInfo.md b/edu_sharing_openapi/docs/MetadataSetInfo.md new file mode 100644 index 00000000..0e55f8b4 --- /dev/null +++ b/edu_sharing_openapi/docs/MetadataSetInfo.md @@ -0,0 +1,30 @@ +# MetadataSetInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | +**name** | **str** | | + +## Example + +```python +from edu_sharing_client.models.metadata_set_info import MetadataSetInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of MetadataSetInfo from a JSON string +metadata_set_info_instance = MetadataSetInfo.from_json(json) +# print the JSON string representation of the object +print(MetadataSetInfo.to_json()) + +# convert the object into a dict +metadata_set_info_dict = metadata_set_info_instance.to_dict() +# create an instance of MetadataSetInfo from a dict +metadata_set_info_from_dict = MetadataSetInfo.from_dict(metadata_set_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/MetadataSuggestionEventDTO.md b/edu_sharing_openapi/docs/MetadataSuggestionEventDTO.md new file mode 100644 index 00000000..61044447 --- /dev/null +++ b/edu_sharing_openapi/docs/MetadataSuggestionEventDTO.md @@ -0,0 +1,34 @@ +# MetadataSuggestionEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**caption_id** | **str** | | [optional] +**caption** | **str** | | [optional] +**parent_id** | **str** | | [optional] +**parent_caption** | **str** | | [optional] +**widget** | [**WidgetDataDTO**](WidgetDataDTO.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.metadata_suggestion_event_dto import MetadataSuggestionEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of MetadataSuggestionEventDTO from a JSON string +metadata_suggestion_event_dto_instance = MetadataSuggestionEventDTO.from_json(json) +# print the JSON string representation of the object +print(MetadataSuggestionEventDTO.to_json()) + +# convert the object into a dict +metadata_suggestion_event_dto_dict = metadata_suggestion_event_dto_instance.to_dict() +# create an instance of MetadataSuggestionEventDTO from a dict +metadata_suggestion_event_dto_from_dict = MetadataSuggestionEventDTO.from_dict(metadata_suggestion_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NETWORKV1Api.md b/edu_sharing_openapi/docs/NETWORKV1Api.md new file mode 100644 index 00000000..583dbaa3 --- /dev/null +++ b/edu_sharing_openapi/docs/NETWORKV1Api.md @@ -0,0 +1,373 @@ +# edu_sharing_client.NETWORKV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_service**](NETWORKV1Api.md#add_service) | **POST** /network/v1/services | Register service. +[**get_repositories**](NETWORKV1Api.md#get_repositories) | **GET** /network/v1/repositories | Get repositories. +[**get_service**](NETWORKV1Api.md#get_service) | **GET** /network/v1/service | Get own service. +[**get_services**](NETWORKV1Api.md#get_services) | **GET** /network/v1/services | Get services. +[**update_service**](NETWORKV1Api.md#update_service) | **PUT** /network/v1/services/{id} | Update a service. + + +# **add_service** +> StoredService add_service(service=service) + +Register service. + +Register a new service. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.service import Service +from edu_sharing_client.models.stored_service import StoredService +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NETWORKV1Api(api_client) + service = edu_sharing_client.Service() # Service | Service data object (optional) + + try: + # Register service. + api_response = api_instance.add_service(service=service) + print("The response of NETWORKV1Api->add_service:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NETWORKV1Api->add_service: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **service** | [**Service**](Service.md)| Service data object | [optional] + +### Return type + +[**StoredService**](StoredService.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_repositories** +> RepoEntries get_repositories() + +Get repositories. + +Get repositories. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.repo_entries import RepoEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NETWORKV1Api(api_client) + + try: + # Get repositories. + api_response = api_instance.get_repositories() + print("The response of NETWORKV1Api->get_repositories:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NETWORKV1Api->get_repositories: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**RepoEntries**](RepoEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_service** +> StoredService get_service() + +Get own service. + +Get the servic entry from the current repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.stored_service import StoredService +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NETWORKV1Api(api_client) + + try: + # Get own service. + api_response = api_instance.get_service() + print("The response of NETWORKV1Api->get_service:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NETWORKV1Api->get_service: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**StoredService**](StoredService.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_services** +> str get_services(query=query) + +Get services. + +Get registerted services. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NETWORKV1Api(api_client) + query = 'query_example' # str | search or filter for services (optional) + + try: + # Get services. + api_response = api_instance.get_services(query=query) + print("The response of NETWORKV1Api->get_services:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NETWORKV1Api->get_services: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **query** | **str**| search or filter for services | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_service** +> StoredService update_service(id, service=service) + +Update a service. + +Update an existing service. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.service import Service +from edu_sharing_client.models.stored_service import StoredService +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NETWORKV1Api(api_client) + id = 'id_example' # str | Service id + service = edu_sharing_client.Service() # Service | Service data object (optional) + + try: + # Update a service. + api_response = api_instance.update_service(id, service=service) + print("The response of NETWORKV1Api->update_service:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NETWORKV1Api->update_service: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| Service id | + **service** | [**Service**](Service.md)| Service data object | [optional] + +### Return type + +[**StoredService**](StoredService.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/NODEV1Api.md b/edu_sharing_openapi/docs/NODEV1Api.md new file mode 100644 index 00000000..e696bbaf --- /dev/null +++ b/edu_sharing_openapi/docs/NODEV1Api.md @@ -0,0 +1,3724 @@ +# edu_sharing_client.NODEV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_aspects**](NODEV1Api.md#add_aspects) | **PUT** /node/v1/nodes/{repository}/{node}/aspects | Add aspect to node. +[**add_workflow_history**](NODEV1Api.md#add_workflow_history) | **PUT** /node/v1/nodes/{repository}/{node}/workflow | Add workflow. +[**change_content1**](NODEV1Api.md#change_content1) | **POST** /node/v1/nodes/{repository}/{node}/content | Change content of node. +[**change_content_as_text**](NODEV1Api.md#change_content_as_text) | **POST** /node/v1/nodes/{repository}/{node}/textContent | Change content of node as text. +[**change_metadata**](NODEV1Api.md#change_metadata) | **PUT** /node/v1/nodes/{repository}/{node}/metadata | Change metadata of node. +[**change_metadata_with_versioning**](NODEV1Api.md#change_metadata_with_versioning) | **POST** /node/v1/nodes/{repository}/{node}/metadata | Change metadata of node (new version). +[**change_preview**](NODEV1Api.md#change_preview) | **POST** /node/v1/nodes/{repository}/{node}/preview | Change preview of node. +[**change_template_metadata**](NODEV1Api.md#change_template_metadata) | **PUT** /node/v1/nodes/{repository}/{node}/metadata/template | Set the metadata template for this folder. +[**copy_metadata**](NODEV1Api.md#copy_metadata) | **PUT** /node/v1/nodes/{repository}/{node}/metadata/copy/{from} | Copy metadata from another node. +[**create_child**](NODEV1Api.md#create_child) | **POST** /node/v1/nodes/{repository}/{node}/children | Create a new child. +[**create_child_by_copying**](NODEV1Api.md#create_child_by_copying) | **POST** /node/v1/nodes/{repository}/{node}/children/_copy | Create a new child by copying. +[**create_child_by_moving**](NODEV1Api.md#create_child_by_moving) | **POST** /node/v1/nodes/{repository}/{node}/children/_move | Create a new child by moving. +[**create_fork_of_node**](NODEV1Api.md#create_fork_of_node) | **POST** /node/v1/nodes/{repository}/{node}/children/_fork | Create a copy of a node by creating a forked version (variant). +[**create_share**](NODEV1Api.md#create_share) | **PUT** /node/v1/nodes/{repository}/{node}/shares | Create a share for a node. +[**delete**](NODEV1Api.md#delete) | **DELETE** /node/v1/nodes/{repository}/{node} | Delete node. +[**delete_preview**](NODEV1Api.md#delete_preview) | **DELETE** /node/v1/nodes/{repository}/{node}/preview | Delete preview of node. +[**get_assocs**](NODEV1Api.md#get_assocs) | **GET** /node/v1/nodes/{repository}/{node}/assocs | Get related nodes. +[**get_children**](NODEV1Api.md#get_children) | **GET** /node/v1/nodes/{repository}/{node}/children | Get children of node. +[**get_lrmi_data**](NODEV1Api.md#get_lrmi_data) | **GET** /node/v1/nodes/{repository}/{node}/lrmi | Get lrmi data. +[**get_metadata**](NODEV1Api.md#get_metadata) | **GET** /node/v1/nodes/{repository}/{node}/metadata | Get metadata of node. +[**get_nodes**](NODEV1Api.md#get_nodes) | **POST** /node/v1/nodes/{repository} | Searching nodes. +[**get_notify_list**](NODEV1Api.md#get_notify_list) | **GET** /node/v1/nodes/{repository}/{node}/notifys | Get notifys (sharing history) of the node. +[**get_parents**](NODEV1Api.md#get_parents) | **GET** /node/v1/nodes/{repository}/{node}/parents | Get parents of node. +[**get_permission**](NODEV1Api.md#get_permission) | **GET** /node/v1/nodes/{repository}/{node}/permissions | Get all permission of node. +[**get_published_copies**](NODEV1Api.md#get_published_copies) | **GET** /node/v1/nodes/{repository}/{node}/publish | Publish +[**get_shares**](NODEV1Api.md#get_shares) | **GET** /node/v1/nodes/{repository}/{node}/shares | Get shares of node. +[**get_stats**](NODEV1Api.md#get_stats) | **GET** /node/v1/nodes/{repository}/{node}/stats | Get statistics of node. +[**get_template_metadata**](NODEV1Api.md#get_template_metadata) | **GET** /node/v1/nodes/{repository}/{node}/metadata/template | Get the metadata template + status for this folder. +[**get_text_content**](NODEV1Api.md#get_text_content) | **GET** /node/v1/nodes/{repository}/{node}/textContent | Get the text content of a document. +[**get_version_metadata**](NODEV1Api.md#get_version_metadata) | **GET** /node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/metadata | Get metadata of node version. +[**get_versions**](NODEV1Api.md#get_versions) | **GET** /node/v1/nodes/{repository}/{node}/versions | Get all versions of node. +[**get_versions1**](NODEV1Api.md#get_versions1) | **GET** /node/v1/nodes/{repository}/{node}/versions/metadata | Get all versions of node, including it's metadata. +[**get_workflow_history**](NODEV1Api.md#get_workflow_history) | **GET** /node/v1/nodes/{repository}/{node}/workflow | Get workflow history. +[**has_permission**](NODEV1Api.md#has_permission) | **GET** /node/v1/nodes/{repository}/{node}/permissions/{user} | Which permissions has user/group for node. +[**import_node**](NODEV1Api.md#import_node) | **POST** /node/v1/nodes/{repository}/{node}/import | Import node +[**islocked**](NODEV1Api.md#islocked) | **GET** /node/v1/nodes/{repository}/{node}/lock/status | locked status of a node. +[**prepare_usage**](NODEV1Api.md#prepare_usage) | **POST** /node/v1/nodes/{repository}/{node}/prepareUsage | create remote object and get properties. +[**publish_copy**](NODEV1Api.md#publish_copy) | **POST** /node/v1/nodes/{repository}/{node}/publish | Publish +[**remove_share**](NODEV1Api.md#remove_share) | **DELETE** /node/v1/nodes/{repository}/{node}/shares/{shareId} | Remove share of a node. +[**report_node**](NODEV1Api.md#report_node) | **POST** /node/v1/nodes/{repository}/{node}/report | Report the node. +[**revert_version**](NODEV1Api.md#revert_version) | **PUT** /node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/_revert | Revert to node version. +[**set_owner**](NODEV1Api.md#set_owner) | **POST** /node/v1/nodes/{repository}/{node}/owner | Set owner of node. +[**set_permission**](NODEV1Api.md#set_permission) | **POST** /node/v1/nodes/{repository}/{node}/permissions | Set local permissions of node. +[**set_property**](NODEV1Api.md#set_property) | **POST** /node/v1/nodes/{repository}/{node}/property | Set single property of node. +[**store_x_api_data**](NODEV1Api.md#store_x_api_data) | **POST** /node/v1/nodes/{repository}/{node}/xapi | Store xApi-Conform data for a given node +[**unlock**](NODEV1Api.md#unlock) | **GET** /node/v1/nodes/{repository}/{node}/lock/unlock | unlock node. +[**update_share**](NODEV1Api.md#update_share) | **POST** /node/v1/nodes/{repository}/{node}/shares/{shareId} | update share of a node. + + +# **add_aspects** +> NodeEntry add_aspects(repository, node, request_body) + +Add aspect to node. + +Add aspect to node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + request_body = ['request_body_example'] # List[str] | aspect name, e.g. ccm:lomreplication + + try: + # Add aspect to node. + api_response = api_instance.add_aspects(repository, node, request_body) + print("The response of NODEV1Api->add_aspects:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->add_aspects: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **request_body** | [**List[str]**](str.md)| aspect name, e.g. ccm:lomreplication | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **add_workflow_history** +> add_workflow_history(repository, node, workflow_history) + +Add workflow. + +Add workflow entry to node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.workflow_history import WorkflowHistory +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + workflow_history = edu_sharing_client.WorkflowHistory() # WorkflowHistory | The history entry to put (editor and time can be null and will be filled automatically) + + try: + # Add workflow. + api_instance.add_workflow_history(repository, node, workflow_history) + except Exception as e: + print("Exception when calling NODEV1Api->add_workflow_history: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **workflow_history** | [**WorkflowHistory**](WorkflowHistory.md)| The history entry to put (editor and time can be null and will be filled automatically) | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_content1** +> NodeEntry change_content1(repository, node, mimetype, version_comment=version_comment, file=file) + +Change content of node. + +Change content of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + mimetype = 'mimetype_example' # str | MIME-Type + version_comment = 'version_comment_example' # str | comment, leave empty = no new version, otherwise new version is generated (optional) + file = None # bytearray | file upload (optional) + + try: + # Change content of node. + api_response = api_instance.change_content1(repository, node, mimetype, version_comment=version_comment, file=file) + print("The response of NODEV1Api->change_content1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->change_content1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **mimetype** | **str**| MIME-Type | + **version_comment** | **str**| comment, leave empty = no new version, otherwise new version is generated | [optional] + **file** | **bytearray**| file upload | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_content_as_text** +> NodeEntry change_content_as_text(repository, node, mimetype, version_comment=version_comment) + +Change content of node as text. + +Change content of node as text. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + mimetype = 'mimetype_example' # str | MIME-Type + version_comment = 'version_comment_example' # str | comment, leave empty = no new version, otherwise new version is generated (optional) + + try: + # Change content of node as text. + api_response = api_instance.change_content_as_text(repository, node, mimetype, version_comment=version_comment) + print("The response of NODEV1Api->change_content_as_text:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->change_content_as_text: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **mimetype** | **str**| MIME-Type | + **version_comment** | **str**| comment, leave empty = no new version, otherwise new version is generated | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_metadata** +> NodeEntry change_metadata(repository, node, request_body) + +Change metadata of node. + +Change metadata of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + request_body = None # Dict[str, List[str]] | properties + + try: + # Change metadata of node. + api_response = api_instance.change_metadata(repository, node, request_body) + print("The response of NODEV1Api->change_metadata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->change_metadata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_metadata_with_versioning** +> NodeEntry change_metadata_with_versioning(repository, node, version_comment, request_body) + +Change metadata of node (new version). + +Change metadata of node (new version). + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + version_comment = 'version_comment_example' # str | comment + request_body = None # Dict[str, List[str]] | properties + + try: + # Change metadata of node (new version). + api_response = api_instance.change_metadata_with_versioning(repository, node, version_comment, request_body) + print("The response of NODEV1Api->change_metadata_with_versioning:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->change_metadata_with_versioning: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **version_comment** | **str**| comment | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_preview** +> NodeEntry change_preview(repository, node, mimetype, create_version=create_version, image=image) + +Change preview of node. + +Change preview of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + mimetype = 'mimetype_example' # str | MIME-Type + create_version = True # bool | create a node version (optional) (default to True) + image = None # object | (optional) + + try: + # Change preview of node. + api_response = api_instance.change_preview(repository, node, mimetype, create_version=create_version, image=image) + print("The response of NODEV1Api->change_preview:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->change_preview: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **mimetype** | **str**| MIME-Type | + **create_version** | **bool**| create a node version | [optional] [default to True] + **image** | [**object**](object.md)| | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: multipart/form-data + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **change_template_metadata** +> NodeEntry change_template_metadata(repository, node, enable, request_body) + +Set the metadata template for this folder. + +All the given metadata will be inherited to child nodes. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + enable = True # bool | Is the inherition currently enabled + request_body = None # Dict[str, List[str]] | properties + + try: + # Set the metadata template for this folder. + api_response = api_instance.change_template_metadata(repository, node, enable, request_body) + print("The response of NODEV1Api->change_template_metadata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->change_template_metadata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **enable** | **bool**| Is the inherition currently enabled | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **copy_metadata** +> NodeEntry copy_metadata(repository, node, var_from) + +Copy metadata from another node. + +Copies all common metadata from one note to another. Current user needs write access to the target node and read access to the source node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + var_from = 'var_from_example' # str | The node where to copy the metadata from + + try: + # Copy metadata from another node. + api_response = api_instance.copy_metadata(repository, node, var_from) + print("The response of NODEV1Api->copy_metadata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->copy_metadata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **var_from** | **str**| The node where to copy the metadata from | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_child** +> NodeEntry create_child(repository, node, type, request_body, aspects=aspects, rename_if_exists=rename_if_exists, version_comment=version_comment, assoc_type=assoc_type) + +Create a new child. + +Create a new child. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of parent node use -userhome- for userhome or -inbox- for inbox node + type = 'type_example' # str | type of node + request_body = None # Dict[str, List[str]] | properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} + aspects = ['aspects_example'] # List[str] | aspects of node (optional) + rename_if_exists = False # bool | rename if the same node name exists (optional) (default to False) + version_comment = 'version_comment_example' # str | comment, leave empty = no inital version (optional) + assoc_type = 'assoc_type_example' # str | Association type, can be empty (optional) + + try: + # Create a new child. + api_response = api_instance.create_child(repository, node, type, request_body, aspects=aspects, rename_if_exists=rename_if_exists, version_comment=version_comment, assoc_type=assoc_type) + print("The response of NODEV1Api->create_child:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->create_child: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of parent node use -userhome- for userhome or -inbox- for inbox node | + **type** | **str**| type of node | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} | + **aspects** | [**List[str]**](str.md)| aspects of node | [optional] + **rename_if_exists** | **bool**| rename if the same node name exists | [optional] [default to False] + **version_comment** | **str**| comment, leave empty = no inital version | [optional] + **assoc_type** | **str**| Association type, can be empty | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_child_by_copying** +> NodeEntry create_child_by_copying(repository, node, source, with_children) + +Create a new child by copying. + +Create a new child by copying. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of parent node + source = 'source_example' # str | ID of source node + with_children = True # bool | flag for children + + try: + # Create a new child by copying. + api_response = api_instance.create_child_by_copying(repository, node, source, with_children) + print("The response of NODEV1Api->create_child_by_copying:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->create_child_by_copying: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of parent node | + **source** | **str**| ID of source node | + **with_children** | **bool**| flag for children | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_child_by_moving** +> NodeEntry create_child_by_moving(repository, node, source) + +Create a new child by moving. + +Create a new child by moving. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of parent node + source = 'source_example' # str | ID of source node + + try: + # Create a new child by moving. + api_response = api_instance.create_child_by_moving(repository, node, source) + print("The response of NODEV1Api->create_child_by_moving:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->create_child_by_moving: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of parent node | + **source** | **str**| ID of source node | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_fork_of_node** +> NodeEntry create_fork_of_node(repository, node, source, with_children) + +Create a copy of a node by creating a forked version (variant). + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of parent node + source = 'source_example' # str | ID of source node + with_children = True # bool | flag for children + + try: + # Create a copy of a node by creating a forked version (variant). + api_response = api_instance.create_fork_of_node(repository, node, source, with_children) + print("The response of NODEV1Api->create_fork_of_node:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->create_fork_of_node: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of parent node | + **source** | **str**| ID of source node | + **with_children** | **bool**| flag for children | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_share** +> NodeShare create_share(repository, node, expiry_date=expiry_date, password=password) + +Create a share for a node. + +Create a new share for a node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_share import NodeShare +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + expiry_date = -1 # int | expiry date for this share, leave empty or -1 for unlimited (optional) (default to -1) + password = 'password_example' # str | password for this share, use none to not use a password (optional) + + try: + # Create a share for a node. + api_response = api_instance.create_share(repository, node, expiry_date=expiry_date, password=password) + print("The response of NODEV1Api->create_share:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->create_share: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **expiry_date** | **int**| expiry date for this share, leave empty or -1 for unlimited | [optional] [default to -1] + **password** | **str**| password for this share, use none to not use a password | [optional] + +### Return type + +[**NodeShare**](NodeShare.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete** +> delete(repository, node, recycle=recycle, protocol=protocol, store=store) + +Delete node. + +Delete node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + recycle = True # bool | move the node to recycle (optional) (default to True) + protocol = 'protocol_example' # str | protocol (optional) + store = 'store_example' # str | store (optional) + + try: + # Delete node. + api_instance.delete(repository, node, recycle=recycle, protocol=protocol, store=store) + except Exception as e: + print("Exception when calling NODEV1Api->delete: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **recycle** | **bool**| move the node to recycle | [optional] [default to True] + **protocol** | **str**| protocol | [optional] + **store** | **str**| store | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_preview** +> NodeEntry delete_preview(repository, node) + +Delete preview of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Delete preview of node. + api_response = api_instance.delete_preview(repository, node) + print("The response of NODEV1Api->delete_preview:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->delete_preview: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_assocs** +> NodeEntries get_assocs(repository, node, direction, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, assoc_name=assoc_name, property_filter=property_filter) + +Get related nodes. + +Get nodes related based on an assoc. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + direction = 'direction_example' # str | Either where the given node should be the \"SOURCE\" or the \"TARGET\" + max_items = 500 # int | maximum items per page (optional) (default to 500) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + assoc_name = 'assoc_name_example' # str | Association name (e.g. ccm:forkio). (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get related nodes. + api_response = api_instance.get_assocs(repository, node, direction, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, assoc_name=assoc_name, property_filter=property_filter) + print("The response of NODEV1Api->get_assocs:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_assocs: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **direction** | **str**| Either where the given node should be the \"SOURCE\" or the \"TARGET\" | + **max_items** | **int**| maximum items per page | [optional] [default to 500] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **assoc_name** | **str**| Association name (e.g. ccm:forkio). | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**NodeEntries**](NodeEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_children** +> NodeEntries get_children(repository, node, max_items=max_items, skip_count=skip_count, filter=filter, sort_properties=sort_properties, sort_ascending=sort_ascending, assoc_name=assoc_name, property_filter=property_filter) + +Get children of node. + +Get children of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) + max_items = 500 # int | maximum items per page (optional) (default to 500) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + filter = ['filter_example'] # List[str] | filter by type files,folders (optional) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + assoc_name = 'assoc_name_example' # str | Filter for a specific association. May be empty (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get children of node. + api_response = api_instance.get_children(repository, node, max_items=max_items, skip_count=skip_count, filter=filter, sort_properties=sort_properties, sort_ascending=sort_ascending, assoc_name=assoc_name, property_filter=property_filter) + print("The response of NODEV1Api->get_children:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_children: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) | + **max_items** | **int**| maximum items per page | [optional] [default to 500] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **filter** | [**List[str]**](str.md)| filter by type files,folders | [optional] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **assoc_name** | **str**| Filter for a specific association. May be empty | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**NodeEntries**](NodeEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_lrmi_data** +> JSONObject get_lrmi_data(repository, node, version=version) + +Get lrmi data. + +Get lrmi data of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.json_object import JSONObject +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + version = 'version_example' # str | Version of the node (optional) + + try: + # Get lrmi data. + api_response = api_instance.get_lrmi_data(repository, node, version=version) + print("The response of NODEV1Api->get_lrmi_data:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_lrmi_data: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **version** | **str**| Version of the node | [optional] + +### Return type + +[**JSONObject**](JSONObject.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_metadata** +> NodeEntry get_metadata(repository, node, property_filter=property_filter) + +Get metadata of node. + +Get metadata of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get metadata of node. + api_response = api_instance.get_metadata(repository, node, property_filter=property_filter) + print("The response of NODEV1Api->get_metadata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_metadata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_nodes** +> SearchResult get_nodes(repository, query, facets=facets, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Searching nodes. + +Searching nodes. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + query = 'query_example' # str | lucene query + facets = ['facets_example'] # List[str] | facets (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Searching nodes. + api_response = api_instance.get_nodes(repository, query, facets=facets, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of NODEV1Api->get_nodes:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_nodes: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **query** | **str**| lucene query | + **facets** | [**List[str]**](str.md)| facets | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResult**](SearchResult.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_notify_list** +> str get_notify_list(repository, node) + +Get notifys (sharing history) of the node. + +Ordered by the time of each notify + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get notifys (sharing history) of the node. + api_response = api_instance.get_notify_list(repository, node) + print("The response of NODEV1Api->get_notify_list:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_notify_list: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_parents** +> ParentEntries get_parents(repository, node, property_filter=property_filter, full_path=full_path) + +Get parents of node. + +Get all parents metadata + own metadata of node. Index 0 is always the current node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.parent_entries import ParentEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + full_path = True # bool | activate to return the full alfresco path, otherwise the path for the user home is resolved (optional) + + try: + # Get parents of node. + api_response = api_instance.get_parents(repository, node, property_filter=property_filter, full_path=full_path) + print("The response of NODEV1Api->get_parents:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_parents: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + **full_path** | **bool**| activate to return the full alfresco path, otherwise the path for the user home is resolved | [optional] + +### Return type + +[**ParentEntries**](ParentEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_permission** +> NodePermissionEntry get_permission(repository, node) + +Get all permission of node. + +Get all permission of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_permission_entry import NodePermissionEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get all permission of node. + api_response = api_instance.get_permission(repository, node) + print("The response of NODEV1Api->get_permission:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_permission: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodePermissionEntry**](NodePermissionEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_published_copies** +> NodeEntries get_published_copies(repository, node) + +Publish + +Get all published copies of the current node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Publish + api_response = api_instance.get_published_copies(repository, node) + print("The response of NODEV1Api->get_published_copies:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_published_copies: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeEntries**](NodeEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_shares** +> str get_shares(repository, node, email=email) + +Get shares of node. + +Get list of shares (via mail/token) for a node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + email = 'email_example' # str | Filter for a specific email or use LINK for link shares (Optional) (optional) + + try: + # Get shares of node. + api_response = api_instance.get_shares(repository, node, email=email) + print("The response of NODEV1Api->get_shares:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_shares: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **email** | **str**| Filter for a specific email or use LINK for link shares (Optional) | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_stats** +> NodeStats get_stats(repository, node) + +Get statistics of node. + +Get statistics (views, downloads) of node. Requires ChangePermissions permission on node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_stats import NodeStats +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get statistics of node. + api_response = api_instance.get_stats(repository, node) + print("The response of NODEV1Api->get_stats:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_stats: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeStats**](NodeStats.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_template_metadata** +> NodeEntry get_template_metadata(repository, node) + +Get the metadata template + status for this folder. + +All the given metadata will be inherited to child nodes. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get the metadata template + status for this folder. + api_response = api_instance.get_template_metadata(repository, node) + print("The response of NODEV1Api->get_template_metadata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_template_metadata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_text_content** +> NodeText get_text_content(repository, node) + +Get the text content of a document. + +May fails with 500 if the node can not be read. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_text import NodeText +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get the text content of a document. + api_response = api_instance.get_text_content(repository, node) + print("The response of NODEV1Api->get_text_content:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_text_content: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeText**](NodeText.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_version_metadata** +> NodeVersionEntry get_version_metadata(repository, node, major, minor, property_filter=property_filter) + +Get metadata of node version. + +Get metadata of node version. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_version_entry import NodeVersionEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + major = 56 # int | major version + minor = 56 # int | minor version + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get metadata of node version. + api_response = api_instance.get_version_metadata(repository, node, major, minor, property_filter=property_filter) + print("The response of NODEV1Api->get_version_metadata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_version_metadata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **major** | **int**| major version | + **minor** | **int**| minor version | + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**NodeVersionEntry**](NodeVersionEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_versions** +> NodeVersionRefEntries get_versions(repository, node) + +Get all versions of node. + +Get all versions of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get all versions of node. + api_response = api_instance.get_versions(repository, node) + print("The response of NODEV1Api->get_versions:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_versions: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeVersionRefEntries**](NodeVersionRefEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_versions1** +> NodeVersionEntries get_versions1(repository, node, property_filter=property_filter) + +Get all versions of node, including it's metadata. + +Get all versions of node, including it's metadata. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_version_entries import NodeVersionEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Get all versions of node, including it's metadata. + api_response = api_instance.get_versions1(repository, node, property_filter=property_filter) + print("The response of NODEV1Api->get_versions1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_versions1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**NodeVersionEntries**](NodeVersionEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_workflow_history** +> str get_workflow_history(repository, node) + +Get workflow history. + +Get workflow history of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # Get workflow history. + api_response = api_instance.get_workflow_history(repository, node) + print("The response of NODEV1Api->get_workflow_history:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->get_workflow_history: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **has_permission** +> str has_permission(repository, node, user) + +Which permissions has user/group for node. + +Check for actual permissions (also when user is in groups) for a specific node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + user = 'user_example' # str | Authority (user/group) to check (use \"-me-\" for current user + + try: + # Which permissions has user/group for node. + api_response = api_instance.has_permission(repository, node, user) + print("The response of NODEV1Api->has_permission:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->has_permission: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **user** | **str**| Authority (user/group) to check (use \"-me-\" for current user | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **import_node** +> NodeEntry import_node(repository, node, parent) + +Import node + +Import a node from a foreign repository to the local repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = 'repository_example' # str | The id of the foreign repository + node = 'node_example' # str | ID of node + parent = 'parent_example' # str | Parent node where to store it locally, may also use -userhome- or -inbox- + + try: + # Import node + api_response = api_instance.import_node(repository, node, parent) + print("The response of NODEV1Api->import_node:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->import_node: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| The id of the foreign repository | + **node** | **str**| ID of node | + **parent** | **str**| Parent node where to store it locally, may also use -userhome- or -inbox- | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **islocked** +> NodeLocked islocked(repository, node) + +locked status of a node. + +locked status of a node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_locked import NodeLocked +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # locked status of a node. + api_response = api_instance.islocked(repository, node) + print("The response of NODEV1Api->islocked:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->islocked: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeLocked**](NodeLocked.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **prepare_usage** +> NodeRemote prepare_usage(repository, node) + +create remote object and get properties. + +create remote object and get properties. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_remote import NodeRemote +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # create remote object and get properties. + api_response = api_instance.prepare_usage(repository, node) + print("The response of NODEV1Api->prepare_usage:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->prepare_usage: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeRemote**](NodeRemote.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **publish_copy** +> NodeEntry publish_copy(repository, node, handle_mode=handle_mode, handle_param=handle_param) + +Publish + +Create a published copy of the current node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.handle_param import HandleParam +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + handle_mode = 'handle_mode_example' # str | handle mode, if a handle should be created. Skip this parameter if you don't want an handle (optional) + handle_param = edu_sharing_client.HandleParam() # HandleParam | handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi, (optional) + + try: + # Publish + api_response = api_instance.publish_copy(repository, node, handle_mode=handle_mode, handle_param=handle_param) + print("The response of NODEV1Api->publish_copy:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->publish_copy: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **handle_mode** | **str**| handle mode, if a handle should be created. Skip this parameter if you don't want an handle | [optional] + **handle_param** | [**HandleParam**](HandleParam.md)| handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi, | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_share** +> remove_share(repository, node, share_id) + +Remove share of a node. + +Remove the specified share id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + share_id = 'share_id_example' # str | share id + + try: + # Remove share of a node. + api_instance.remove_share(repository, node, share_id) + except Exception as e: + print("Exception when calling NODEV1Api->remove_share: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **share_id** | **str**| share id | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **report_node** +> report_node(repository, node, reason, user_email, user_comment=user_comment) + +Report the node. + +Report a node to notify the admin about an issue) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + reason = 'reason_example' # str | the reason for the report + user_email = 'user_email_example' # str | mail of reporting user + user_comment = 'user_comment_example' # str | additional user comment (optional) + + try: + # Report the node. + api_instance.report_node(repository, node, reason, user_email, user_comment=user_comment) + except Exception as e: + print("Exception when calling NODEV1Api->report_node: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **reason** | **str**| the reason for the report | + **user_email** | **str**| mail of reporting user | + **user_comment** | **str**| additional user comment | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **revert_version** +> NodeEntry revert_version(repository, node, major, minor) + +Revert to node version. + +Revert to node version. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + major = 56 # int | major version + minor = 56 # int | minor version + + try: + # Revert to node version. + api_response = api_instance.revert_version(repository, node, major, minor) + print("The response of NODEV1Api->revert_version:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->revert_version: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **major** | **int**| major version | + **minor** | **int**| minor version | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_owner** +> set_owner(repository, node, username=username) + +Set owner of node. + +Set owner of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + username = 'username_example' # str | username (optional) + + try: + # Set owner of node. + api_instance.set_owner(repository, node, username=username) + except Exception as e: + print("Exception when calling NODEV1Api->set_owner: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **username** | **str**| username | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_permission** +> set_permission(repository, node, send_mail, send_copy, acl, mailtext=mailtext) + +Set local permissions of node. + +Set local permissions of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.acl import ACL +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + send_mail = True # bool | sendMail + send_copy = True # bool | sendCopy + acl = edu_sharing_client.ACL() # ACL | permissions + mailtext = 'mailtext_example' # str | mailtext (optional) + + try: + # Set local permissions of node. + api_instance.set_permission(repository, node, send_mail, send_copy, acl, mailtext=mailtext) + except Exception as e: + print("Exception when calling NODEV1Api->set_permission: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **send_mail** | **bool**| sendMail | + **send_copy** | **bool**| sendCopy | + **acl** | [**ACL**](ACL.md)| permissions | + **mailtext** | **str**| mailtext | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_property** +> set_property(repository, node, var_property, keep_modified_date=keep_modified_date, value=value) + +Set single property of node. + +When the property is unset (null), it will be removed + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + var_property = 'var_property_example' # str | property + keep_modified_date = False # bool | keepModifiedDate (optional) (default to False) + value = ['value_example'] # List[str] | value (optional) + + try: + # Set single property of node. + api_instance.set_property(repository, node, var_property, keep_modified_date=keep_modified_date, value=value) + except Exception as e: + print("Exception when calling NODEV1Api->set_property: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **var_property** | **str**| property | + **keep_modified_date** | **bool**| keepModifiedDate | [optional] [default to False] + **value** | [**List[str]**](str.md)| value | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **store_x_api_data** +> object store_x_api_data(repository, node, body) + +Store xApi-Conform data for a given node + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + body = 'body_example' # str | xApi conform json data + + try: + # Store xApi-Conform data for a given node + api_response = api_instance.store_x_api_data(repository, node, body) + print("The response of NODEV1Api->store_x_api_data:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->store_x_api_data: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **body** | **str**| xApi conform json data | + +### Return type + +**object** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **unlock** +> unlock(repository, node) + +unlock node. + +unlock node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # unlock node. + api_instance.unlock(repository, node) + except Exception as e: + print("Exception when calling NODEV1Api->unlock: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_share** +> NodeShare update_share(repository, node, share_id, expiry_date=expiry_date, password=password) + +update share of a node. + +update the specified share id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_share import NodeShare +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NODEV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + share_id = 'share_id_example' # str | share id + expiry_date = -1 # int | expiry date for this share, leave empty or -1 for unlimited (optional) (default to -1) + password = 'password_example' # str | new password for share, leave empty if you don't want to change it (optional) + + try: + # update share of a node. + api_response = api_instance.update_share(repository, node, share_id, expiry_date=expiry_date, password=password) + print("The response of NODEV1Api->update_share:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NODEV1Api->update_share: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **share_id** | **str**| share id | + **expiry_date** | **int**| expiry date for this share, leave empty or -1 for unlimited | [optional] [default to -1] + **password** | **str**| new password for share, leave empty if you don't want to change it | [optional] + +### Return type + +[**NodeShare**](NodeShare.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/NOTIFICATIONV1Api.md b/edu_sharing_openapi/docs/NOTIFICATIONV1Api.md new file mode 100644 index 00000000..bebaee91 --- /dev/null +++ b/edu_sharing_openapi/docs/NOTIFICATIONV1Api.md @@ -0,0 +1,412 @@ +# edu_sharing_client.NOTIFICATIONV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**delete_notification**](NOTIFICATIONV1Api.md#delete_notification) | **DELETE** /notification/v1/notifications | Endpoint to delete notification by id +[**get_config2**](NOTIFICATIONV1Api.md#get_config2) | **GET** /notification/v1/config | get the config for notifications of the current user +[**get_notifications**](NOTIFICATIONV1Api.md#get_notifications) | **GET** /notification/v1/notifications | Retrieve stored notification, filtered by receiver and status +[**set_config1**](NOTIFICATIONV1Api.md#set_config1) | **PUT** /notification/v1/config | Update the config for notifications of the current user +[**update_notification_status**](NOTIFICATIONV1Api.md#update_notification_status) | **PUT** /notification/v1/notifications/status | Endpoint to update the notification status +[**update_notification_status_by_receiver_id**](NOTIFICATIONV1Api.md#update_notification_status_by_receiver_id) | **PUT** /notification/v1/notifications/receiver/status | Endpoint to update the notification status + + +# **delete_notification** +> delete_notification(id=id) + +Endpoint to delete notification by id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NOTIFICATIONV1Api(api_client) + id = 'id_example' # str | (optional) + + try: + # Endpoint to delete notification by id + api_instance.delete_notification(id=id) + except Exception as e: + print("Exception when calling NOTIFICATIONV1Api->delete_notification: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | deleted notification | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_config2** +> NotificationConfig get_config2() + +get the config for notifications of the current user + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.notification_config import NotificationConfig +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NOTIFICATIONV1Api(api_client) + + try: + # get the config for notifications of the current user + api_response = api_instance.get_config2() + print("The response of NOTIFICATIONV1Api->get_config2:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NOTIFICATIONV1Api->get_config2: %s\n" % e) +``` + + + +### Parameters + +This endpoint does not need any parameter. + +### Return type + +[**NotificationConfig**](NotificationConfig.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_notifications** +> NotificationResponsePage get_notifications(receiver_id=receiver_id, status=status, page=page, size=size, sort=sort) + +Retrieve stored notification, filtered by receiver and status + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.notification_response_page import NotificationResponsePage +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NOTIFICATIONV1Api(api_client) + receiver_id = '-me-' # str | (optional) (default to '-me-') + status = ['status_example'] # List[str] | status (or conjunction) (optional) + page = 0 # int | page number (optional) (default to 0) + size = 25 # int | page size (optional) (default to 25) + sort = ['sort_example'] # List[str] | Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported. (optional) + + try: + # Retrieve stored notification, filtered by receiver and status + api_response = api_instance.get_notifications(receiver_id=receiver_id, status=status, page=page, size=size, sort=sort) + print("The response of NOTIFICATIONV1Api->get_notifications:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NOTIFICATIONV1Api->get_notifications: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **receiver_id** | **str**| | [optional] [default to '-me-'] + **status** | [**List[str]**](str.md)| status (or conjunction) | [optional] + **page** | **int**| page number | [optional] [default to 0] + **size** | **int**| page size | [optional] [default to 25] + **sort** | [**List[str]**](str.md)| Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported. | [optional] + +### Return type + +[**NotificationResponsePage**](NotificationResponsePage.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | get the received notifications | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_config1** +> set_config1(notification_config=notification_config) + +Update the config for notifications of the current user + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.notification_config import NotificationConfig +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NOTIFICATIONV1Api(api_client) + notification_config = edu_sharing_client.NotificationConfig() # NotificationConfig | (optional) + + try: + # Update the config for notifications of the current user + api_instance.set_config1(notification_config=notification_config) + except Exception as e: + print("Exception when calling NOTIFICATIONV1Api->set_config1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **notification_config** | [**NotificationConfig**](NotificationConfig.md)| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_notification_status** +> NotificationEventDTO update_notification_status(id=id, status=status) + +Endpoint to update the notification status + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NOTIFICATIONV1Api(api_client) + id = 'id_example' # str | (optional) + status = READ # str | (optional) (default to READ) + + try: + # Endpoint to update the notification status + api_response = api_instance.update_notification_status(id=id, status=status) + print("The response of NOTIFICATIONV1Api->update_notification_status:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling NOTIFICATIONV1Api->update_notification_status: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| | [optional] + **status** | **str**| | [optional] [default to READ] + +### Return type + +[**NotificationEventDTO**](NotificationEventDTO.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | set notification status | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_notification_status_by_receiver_id** +> update_notification_status_by_receiver_id(receiver_id=receiver_id, old_status=old_status, new_status=new_status) + +Endpoint to update the notification status + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.NOTIFICATIONV1Api(api_client) + receiver_id = 'receiver_id_example' # str | (optional) + old_status = ['old_status_example'] # List[str] | The old status (or conjunction) (optional) + new_status = READ # str | (optional) (default to READ) + + try: + # Endpoint to update the notification status + api_instance.update_notification_status_by_receiver_id(receiver_id=receiver_id, old_status=old_status, new_status=new_status) + except Exception as e: + print("Exception when calling NOTIFICATIONV1Api->update_notification_status_by_receiver_id: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **receiver_id** | **str**| | [optional] + **old_status** | [**List[str]**](str.md)| The old status (or conjunction) | [optional] + **new_status** | **str**| | [optional] [default to READ] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: Not defined + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | set notification status | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Node.md b/edu_sharing_openapi/docs/Node.md new file mode 100644 index 00000000..164df151 --- /dev/null +++ b/edu_sharing_openapi/docs/Node.md @@ -0,0 +1,61 @@ +# Node + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_lti_deep_link** | [**NodeLTIDeepLink**](NodeLTIDeepLink.md) | | [optional] +**remote** | [**Remote**](Remote.md) | | [optional] +**content** | [**Content**](Content.md) | | [optional] +**license** | [**License**](License.md) | | [optional] +**is_directory** | **bool** | | [optional] +**comment_count** | **int** | | [optional] +**rating** | [**RatingDetails**](RatingDetails.md) | | [optional] +**used_in_collections** | [**List[Node]**](Node.md) | | [optional] +**relations** | [**Dict[str, Node]**](Node.md) | | [optional] +**contributors** | [**List[Contributor]**](Contributor.md) | | [optional] +**ref** | [**NodeRef**](NodeRef.md) | | +**parent** | [**NodeRef**](NodeRef.md) | | [optional] +**type** | **str** | | [optional] +**aspects** | **List[str]** | | [optional] +**name** | **str** | | +**title** | **str** | | [optional] +**metadataset** | **str** | | [optional] +**repository_type** | **str** | | [optional] +**created_at** | **datetime** | | +**created_by** | [**Person**](Person.md) | | +**modified_at** | **datetime** | | [optional] +**modified_by** | [**Person**](Person.md) | | [optional] +**access** | **List[str]** | | +**download_url** | **str** | | +**properties** | **Dict[str, List[str]]** | | [optional] +**mimetype** | **str** | | [optional] +**mediatype** | **str** | | [optional] +**size** | **str** | | [optional] +**preview** | [**Preview**](Preview.md) | | [optional] +**icon_url** | **str** | | [optional] +**collection** | [**Collection**](Collection.md) | | +**owner** | [**Person**](Person.md) | | +**is_public** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node import Node + +# TODO update the JSON string below +json = "{}" +# create an instance of Node from a JSON string +node_instance = Node.from_json(json) +# print the JSON string representation of the object +print(Node.to_json()) + +# convert the object into a dict +node_dict = node_instance.to_dict() +# create an instance of Node from a dict +node_from_dict = Node.from_dict(node_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeCollectionProposalCount.md b/edu_sharing_openapi/docs/NodeCollectionProposalCount.md new file mode 100644 index 00000000..d4acc8b9 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeCollectionProposalCount.md @@ -0,0 +1,63 @@ +# NodeCollectionProposalCount + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_lti_deep_link** | [**NodeLTIDeepLink**](NodeLTIDeepLink.md) | | [optional] +**remote** | [**Remote**](Remote.md) | | [optional] +**content** | [**Content**](Content.md) | | [optional] +**license** | [**License**](License.md) | | [optional] +**is_directory** | **bool** | | [optional] +**comment_count** | **int** | | [optional] +**rating** | [**RatingDetails**](RatingDetails.md) | | [optional] +**used_in_collections** | [**List[Node]**](Node.md) | | [optional] +**relations** | [**Dict[str, Node]**](Node.md) | | [optional] +**contributors** | [**List[Contributor]**](Contributor.md) | | [optional] +**proposal_counts** | **Dict[str, int]** | | [optional] +**proposal_count** | **Dict[str, int]** | | [optional] +**ref** | [**NodeRef**](NodeRef.md) | | +**parent** | [**NodeRef**](NodeRef.md) | | [optional] +**type** | **str** | | [optional] +**aspects** | **List[str]** | | [optional] +**name** | **str** | | +**title** | **str** | | [optional] +**metadataset** | **str** | | [optional] +**repository_type** | **str** | | [optional] +**created_at** | **datetime** | | +**created_by** | [**Person**](Person.md) | | +**modified_at** | **datetime** | | [optional] +**modified_by** | [**Person**](Person.md) | | [optional] +**access** | **List[str]** | | +**download_url** | **str** | | +**properties** | **Dict[str, List[str]]** | | [optional] +**mimetype** | **str** | | [optional] +**mediatype** | **str** | | [optional] +**size** | **str** | | [optional] +**preview** | [**Preview**](Preview.md) | | [optional] +**icon_url** | **str** | | [optional] +**collection** | [**Collection**](Collection.md) | | +**owner** | [**Person**](Person.md) | | +**is_public** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_collection_proposal_count import NodeCollectionProposalCount + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeCollectionProposalCount from a JSON string +node_collection_proposal_count_instance = NodeCollectionProposalCount.from_json(json) +# print the JSON string representation of the object +print(NodeCollectionProposalCount.to_json()) + +# convert the object into a dict +node_collection_proposal_count_dict = node_collection_proposal_count_instance.to_dict() +# create an instance of NodeCollectionProposalCount from a dict +node_collection_proposal_count_from_dict = NodeCollectionProposalCount.from_dict(node_collection_proposal_count_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeData.md b/edu_sharing_openapi/docs/NodeData.md new file mode 100644 index 00000000..e91edba2 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeData.md @@ -0,0 +1,30 @@ +# NodeData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**timestamp** | **str** | | [optional] +**counts** | **Dict[str, int]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_data import NodeData + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeData from a JSON string +node_data_instance = NodeData.from_json(json) +# print the JSON string representation of the object +print(NodeData.to_json()) + +# convert the object into a dict +node_data_dict = node_data_instance.to_dict() +# create an instance of NodeData from a dict +node_data_from_dict = NodeData.from_dict(node_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeDataDTO.md b/edu_sharing_openapi/docs/NodeDataDTO.md new file mode 100644 index 00000000..19b2615f --- /dev/null +++ b/edu_sharing_openapi/docs/NodeDataDTO.md @@ -0,0 +1,31 @@ +# NodeDataDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | **str** | | [optional] +**aspects** | **List[str]** | | [optional] +**properties** | **Dict[str, object]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_data_dto import NodeDataDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeDataDTO from a JSON string +node_data_dto_instance = NodeDataDTO.from_json(json) +# print the JSON string representation of the object +print(NodeDataDTO.to_json()) + +# convert the object into a dict +node_data_dto_dict = node_data_dto_instance.to_dict() +# create an instance of NodeDataDTO from a dict +node_data_dto_from_dict = NodeDataDTO.from_dict(node_data_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeEntries.md b/edu_sharing_openapi/docs/NodeEntries.md new file mode 100644 index 00000000..a496a31d --- /dev/null +++ b/edu_sharing_openapi/docs/NodeEntries.md @@ -0,0 +1,30 @@ +# NodeEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**nodes** | [**List[Node]**](Node.md) | | +**pagination** | [**Pagination**](Pagination.md) | | + +## Example + +```python +from edu_sharing_client.models.node_entries import NodeEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeEntries from a JSON string +node_entries_instance = NodeEntries.from_json(json) +# print the JSON string representation of the object +print(NodeEntries.to_json()) + +# convert the object into a dict +node_entries_dict = node_entries_instance.to_dict() +# create an instance of NodeEntries from a dict +node_entries_from_dict = NodeEntries.from_dict(node_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeEntry.md b/edu_sharing_openapi/docs/NodeEntry.md new file mode 100644 index 00000000..a4ee26ab --- /dev/null +++ b/edu_sharing_openapi/docs/NodeEntry.md @@ -0,0 +1,29 @@ +# NodeEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**Node**](Node.md) | | + +## Example + +```python +from edu_sharing_client.models.node_entry import NodeEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeEntry from a JSON string +node_entry_instance = NodeEntry.from_json(json) +# print the JSON string representation of the object +print(NodeEntry.to_json()) + +# convert the object into a dict +node_entry_dict = node_entry_instance.to_dict() +# create an instance of NodeEntry from a dict +node_entry_from_dict = NodeEntry.from_dict(node_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeIssueEventDTO.md b/edu_sharing_openapi/docs/NodeIssueEventDTO.md new file mode 100644 index 00000000..eca197dc --- /dev/null +++ b/edu_sharing_openapi/docs/NodeIssueEventDTO.md @@ -0,0 +1,31 @@ +# NodeIssueEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**reason** | **str** | | [optional] +**user_comment** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_issue_event_dto import NodeIssueEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeIssueEventDTO from a JSON string +node_issue_event_dto_instance = NodeIssueEventDTO.from_json(json) +# print the JSON string representation of the object +print(NodeIssueEventDTO.to_json()) + +# convert the object into a dict +node_issue_event_dto_dict = node_issue_event_dto_instance.to_dict() +# create an instance of NodeIssueEventDTO from a dict +node_issue_event_dto_from_dict = NodeIssueEventDTO.from_dict(node_issue_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeLTIDeepLink.md b/edu_sharing_openapi/docs/NodeLTIDeepLink.md new file mode 100644 index 00000000..194b0358 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeLTIDeepLink.md @@ -0,0 +1,30 @@ +# NodeLTIDeepLink + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**lti_deep_link_return_url** | **str** | | [optional] +**jwt_deep_link_response** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeLTIDeepLink from a JSON string +node_lti_deep_link_instance = NodeLTIDeepLink.from_json(json) +# print the JSON string representation of the object +print(NodeLTIDeepLink.to_json()) + +# convert the object into a dict +node_lti_deep_link_dict = node_lti_deep_link_instance.to_dict() +# create an instance of NodeLTIDeepLink from a dict +node_lti_deep_link_from_dict = NodeLTIDeepLink.from_dict(node_lti_deep_link_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeLocked.md b/edu_sharing_openapi/docs/NodeLocked.md new file mode 100644 index 00000000..7550f56e --- /dev/null +++ b/edu_sharing_openapi/docs/NodeLocked.md @@ -0,0 +1,29 @@ +# NodeLocked + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**is_locked** | **bool** | | + +## Example + +```python +from edu_sharing_client.models.node_locked import NodeLocked + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeLocked from a JSON string +node_locked_instance = NodeLocked.from_json(json) +# print the JSON string representation of the object +print(NodeLocked.to_json()) + +# convert the object into a dict +node_locked_dict = node_locked_instance.to_dict() +# create an instance of NodeLocked from a dict +node_locked_from_dict = NodeLocked.from_dict(node_locked_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodePermissionEntry.md b/edu_sharing_openapi/docs/NodePermissionEntry.md new file mode 100644 index 00000000..ee680ec7 --- /dev/null +++ b/edu_sharing_openapi/docs/NodePermissionEntry.md @@ -0,0 +1,29 @@ +# NodePermissionEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**permissions** | [**NodePermissions**](NodePermissions.md) | | + +## Example + +```python +from edu_sharing_client.models.node_permission_entry import NodePermissionEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of NodePermissionEntry from a JSON string +node_permission_entry_instance = NodePermissionEntry.from_json(json) +# print the JSON string representation of the object +print(NodePermissionEntry.to_json()) + +# convert the object into a dict +node_permission_entry_dict = node_permission_entry_instance.to_dict() +# create an instance of NodePermissionEntry from a dict +node_permission_entry_from_dict = NodePermissionEntry.from_dict(node_permission_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodePermissions.md b/edu_sharing_openapi/docs/NodePermissions.md new file mode 100644 index 00000000..1b8f62ba --- /dev/null +++ b/edu_sharing_openapi/docs/NodePermissions.md @@ -0,0 +1,30 @@ +# NodePermissions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**local_permissions** | [**ACL**](ACL.md) | | +**inherited_permissions** | [**List[ACE]**](ACE.md) | | + +## Example + +```python +from edu_sharing_client.models.node_permissions import NodePermissions + +# TODO update the JSON string below +json = "{}" +# create an instance of NodePermissions from a JSON string +node_permissions_instance = NodePermissions.from_json(json) +# print the JSON string representation of the object +print(NodePermissions.to_json()) + +# convert the object into a dict +node_permissions_dict = node_permissions_instance.to_dict() +# create an instance of NodePermissions from a dict +node_permissions_from_dict = NodePermissions.from_dict(node_permissions_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeRef.md b/edu_sharing_openapi/docs/NodeRef.md new file mode 100644 index 00000000..07528541 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeRef.md @@ -0,0 +1,32 @@ +# NodeRef + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repo** | **str** | | +**id** | **str** | | +**archived** | **bool** | | +**is_home_repo** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_ref import NodeRef + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeRef from a JSON string +node_ref_instance = NodeRef.from_json(json) +# print the JSON string representation of the object +print(NodeRef.to_json()) + +# convert the object into a dict +node_ref_dict = node_ref_instance.to_dict() +# create an instance of NodeRef from a dict +node_ref_from_dict = NodeRef.from_dict(node_ref_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeRelation.md b/edu_sharing_openapi/docs/NodeRelation.md new file mode 100644 index 00000000..6201896a --- /dev/null +++ b/edu_sharing_openapi/docs/NodeRelation.md @@ -0,0 +1,30 @@ +# NodeRelation + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**Node**](Node.md) | | [optional] +**relations** | [**List[RelationData]**](RelationData.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_relation import NodeRelation + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeRelation from a JSON string +node_relation_instance = NodeRelation.from_json(json) +# print the JSON string representation of the object +print(NodeRelation.to_json()) + +# convert the object into a dict +node_relation_dict = node_relation_instance.to_dict() +# create an instance of NodeRelation from a dict +node_relation_from_dict = NodeRelation.from_dict(node_relation_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeRemote.md b/edu_sharing_openapi/docs/NodeRemote.md new file mode 100644 index 00000000..98379caf --- /dev/null +++ b/edu_sharing_openapi/docs/NodeRemote.md @@ -0,0 +1,30 @@ +# NodeRemote + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**Node**](Node.md) | | +**remote** | [**Node**](Node.md) | | + +## Example + +```python +from edu_sharing_client.models.node_remote import NodeRemote + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeRemote from a JSON string +node_remote_instance = NodeRemote.from_json(json) +# print the JSON string representation of the object +print(NodeRemote.to_json()) + +# convert the object into a dict +node_remote_dict = node_remote_instance.to_dict() +# create an instance of NodeRemote from a dict +node_remote_from_dict = NodeRemote.from_dict(node_remote_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeShare.md b/edu_sharing_openapi/docs/NodeShare.md new file mode 100644 index 00000000..ba7f9312 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeShare.md @@ -0,0 +1,36 @@ +# NodeShare + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**password** | **bool** | | [optional] +**token** | **str** | | [optional] +**email** | **str** | | [optional] +**expiry_date** | **int** | | [optional] +**invited_at** | **int** | | [optional] +**download_count** | **int** | | [optional] +**url** | **str** | | [optional] +**share_id** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_share import NodeShare + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeShare from a JSON string +node_share_instance = NodeShare.from_json(json) +# print the JSON string representation of the object +print(NodeShare.to_json()) + +# convert the object into a dict +node_share_dict = node_share_instance.to_dict() +# create an instance of NodeShare from a dict +node_share_from_dict = NodeShare.from_dict(node_share_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeStats.md b/edu_sharing_openapi/docs/NodeStats.md new file mode 100644 index 00000000..104b7d2f --- /dev/null +++ b/edu_sharing_openapi/docs/NodeStats.md @@ -0,0 +1,29 @@ +# NodeStats + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**total** | **Dict[str, int]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_stats import NodeStats + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeStats from a JSON string +node_stats_instance = NodeStats.from_json(json) +# print the JSON string representation of the object +print(NodeStats.to_json()) + +# convert the object into a dict +node_stats_dict = node_stats_instance.to_dict() +# create an instance of NodeStats from a dict +node_stats_from_dict = NodeStats.from_dict(node_stats_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeText.md b/edu_sharing_openapi/docs/NodeText.md new file mode 100644 index 00000000..9f4c4d1d --- /dev/null +++ b/edu_sharing_openapi/docs/NodeText.md @@ -0,0 +1,31 @@ +# NodeText + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**text** | **str** | | [optional] +**html** | **str** | | [optional] +**raw** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_text import NodeText + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeText from a JSON string +node_text_instance = NodeText.from_json(json) +# print the JSON string representation of the object +print(NodeText.to_json()) + +# convert the object into a dict +node_text_dict = node_text_instance.to_dict() +# create an instance of NodeText from a dict +node_text_from_dict = NodeText.from_dict(node_text_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeVersion.md b/edu_sharing_openapi/docs/NodeVersion.md new file mode 100644 index 00000000..1aecaca4 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeVersion.md @@ -0,0 +1,34 @@ +# NodeVersion + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**version** | [**NodeVersionRef**](NodeVersionRef.md) | | +**comment** | **str** | | +**modified_at** | **str** | | +**modified_by** | [**Person**](Person.md) | | +**content_url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.node_version import NodeVersion + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeVersion from a JSON string +node_version_instance = NodeVersion.from_json(json) +# print the JSON string representation of the object +print(NodeVersion.to_json()) + +# convert the object into a dict +node_version_dict = node_version_instance.to_dict() +# create an instance of NodeVersion from a dict +node_version_from_dict = NodeVersion.from_dict(node_version_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeVersionEntries.md b/edu_sharing_openapi/docs/NodeVersionEntries.md new file mode 100644 index 00000000..1982aec3 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeVersionEntries.md @@ -0,0 +1,29 @@ +# NodeVersionEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**versions** | [**List[NodeVersion]**](NodeVersion.md) | | + +## Example + +```python +from edu_sharing_client.models.node_version_entries import NodeVersionEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeVersionEntries from a JSON string +node_version_entries_instance = NodeVersionEntries.from_json(json) +# print the JSON string representation of the object +print(NodeVersionEntries.to_json()) + +# convert the object into a dict +node_version_entries_dict = node_version_entries_instance.to_dict() +# create an instance of NodeVersionEntries from a dict +node_version_entries_from_dict = NodeVersionEntries.from_dict(node_version_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeVersionEntry.md b/edu_sharing_openapi/docs/NodeVersionEntry.md new file mode 100644 index 00000000..9438e021 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeVersionEntry.md @@ -0,0 +1,29 @@ +# NodeVersionEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**version** | [**NodeVersion**](NodeVersion.md) | | + +## Example + +```python +from edu_sharing_client.models.node_version_entry import NodeVersionEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeVersionEntry from a JSON string +node_version_entry_instance = NodeVersionEntry.from_json(json) +# print the JSON string representation of the object +print(NodeVersionEntry.to_json()) + +# convert the object into a dict +node_version_entry_dict = node_version_entry_instance.to_dict() +# create an instance of NodeVersionEntry from a dict +node_version_entry_from_dict = NodeVersionEntry.from_dict(node_version_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeVersionRef.md b/edu_sharing_openapi/docs/NodeVersionRef.md new file mode 100644 index 00000000..e4037549 --- /dev/null +++ b/edu_sharing_openapi/docs/NodeVersionRef.md @@ -0,0 +1,31 @@ +# NodeVersionRef + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeRef**](NodeRef.md) | | +**major** | **int** | | +**minor** | **int** | | + +## Example + +```python +from edu_sharing_client.models.node_version_ref import NodeVersionRef + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeVersionRef from a JSON string +node_version_ref_instance = NodeVersionRef.from_json(json) +# print the JSON string representation of the object +print(NodeVersionRef.to_json()) + +# convert the object into a dict +node_version_ref_dict = node_version_ref_instance.to_dict() +# create an instance of NodeVersionRef from a dict +node_version_ref_from_dict = NodeVersionRef.from_dict(node_version_ref_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NodeVersionRefEntries.md b/edu_sharing_openapi/docs/NodeVersionRefEntries.md new file mode 100644 index 00000000..fc94e43b --- /dev/null +++ b/edu_sharing_openapi/docs/NodeVersionRefEntries.md @@ -0,0 +1,29 @@ +# NodeVersionRefEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**versions** | [**List[NodeVersionRef]**](NodeVersionRef.md) | | + +## Example + +```python +from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of NodeVersionRefEntries from a JSON string +node_version_ref_entries_instance = NodeVersionRefEntries.from_json(json) +# print the JSON string representation of the object +print(NodeVersionRefEntries.to_json()) + +# convert the object into a dict +node_version_ref_entries_dict = node_version_ref_entries_instance.to_dict() +# create an instance of NodeVersionRefEntries from a dict +node_version_ref_entries_from_dict = NodeVersionRefEntries.from_dict(node_version_ref_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NotificationConfig.md b/edu_sharing_openapi/docs/NotificationConfig.md new file mode 100644 index 00000000..c954972d --- /dev/null +++ b/edu_sharing_openapi/docs/NotificationConfig.md @@ -0,0 +1,31 @@ +# NotificationConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**config_mode** | **str** | | [optional] +**default_interval** | **str** | | [optional] +**intervals** | [**NotificationIntervals**](NotificationIntervals.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.notification_config import NotificationConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of NotificationConfig from a JSON string +notification_config_instance = NotificationConfig.from_json(json) +# print the JSON string representation of the object +print(NotificationConfig.to_json()) + +# convert the object into a dict +notification_config_dict = notification_config_instance.to_dict() +# create an instance of NotificationConfig from a dict +notification_config_from_dict = NotificationConfig.from_dict(notification_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NotificationEventDTO.md b/edu_sharing_openapi/docs/NotificationEventDTO.md new file mode 100644 index 00000000..398dfc2b --- /dev/null +++ b/edu_sharing_openapi/docs/NotificationEventDTO.md @@ -0,0 +1,34 @@ +# NotificationEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**timestamp** | **datetime** | | [optional] +**creator** | [**UserDataDTO**](UserDataDTO.md) | | [optional] +**receiver** | [**UserDataDTO**](UserDataDTO.md) | | [optional] +**status** | **str** | | [optional] +**id** | **str** | | [optional] +**var_class** | **str** | | + +## Example + +```python +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of NotificationEventDTO from a JSON string +notification_event_dto_instance = NotificationEventDTO.from_json(json) +# print the JSON string representation of the object +print(NotificationEventDTO.to_json()) + +# convert the object into a dict +notification_event_dto_dict = notification_event_dto_instance.to_dict() +# create an instance of NotificationEventDTO from a dict +notification_event_dto_from_dict = NotificationEventDTO.from_dict(notification_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NotificationIntervals.md b/edu_sharing_openapi/docs/NotificationIntervals.md new file mode 100644 index 00000000..8fb7d6e1 --- /dev/null +++ b/edu_sharing_openapi/docs/NotificationIntervals.md @@ -0,0 +1,36 @@ +# NotificationIntervals + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**add_to_collection_event** | **str** | | [optional] +**propose_for_collection_event** | **str** | | [optional] +**comment_event** | **str** | | [optional] +**invite_event** | **str** | | [optional] +**node_issue_event** | **str** | | [optional] +**rating_event** | **str** | | [optional] +**workflow_event** | **str** | | [optional] +**metadata_suggestion_event** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.notification_intervals import NotificationIntervals + +# TODO update the JSON string below +json = "{}" +# create an instance of NotificationIntervals from a JSON string +notification_intervals_instance = NotificationIntervals.from_json(json) +# print the JSON string representation of the object +print(NotificationIntervals.to_json()) + +# convert the object into a dict +notification_intervals_dict = notification_intervals_instance.to_dict() +# create an instance of NotificationIntervals from a dict +notification_intervals_from_dict = NotificationIntervals.from_dict(notification_intervals_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NotificationResponsePage.md b/edu_sharing_openapi/docs/NotificationResponsePage.md new file mode 100644 index 00000000..0cd20337 --- /dev/null +++ b/edu_sharing_openapi/docs/NotificationResponsePage.md @@ -0,0 +1,39 @@ +# NotificationResponsePage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**content** | [**List[NotificationEventDTO]**](NotificationEventDTO.md) | | [optional] +**pageable** | [**Pageable**](Pageable.md) | | [optional] +**total_elements** | **int** | | [optional] +**total_pages** | **int** | | [optional] +**last** | **bool** | | [optional] +**number_of_elements** | **int** | | [optional] +**first** | **bool** | | [optional] +**size** | **int** | | [optional] +**number** | **int** | | [optional] +**sort** | [**Sort**](Sort.md) | | [optional] +**empty** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.notification_response_page import NotificationResponsePage + +# TODO update the JSON string below +json = "{}" +# create an instance of NotificationResponsePage from a JSON string +notification_response_page_instance = NotificationResponsePage.from_json(json) +# print the JSON string representation of the object +print(NotificationResponsePage.to_json()) + +# convert the object into a dict +notification_response_page_dict = notification_response_page_instance.to_dict() +# create an instance of NotificationResponsePage from a dict +notification_response_page_from_dict = NotificationResponsePage.from_dict(notification_response_page_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/NotifyEntry.md b/edu_sharing_openapi/docs/NotifyEntry.md new file mode 100644 index 00000000..83e08639 --- /dev/null +++ b/edu_sharing_openapi/docs/NotifyEntry.md @@ -0,0 +1,32 @@ +# NotifyEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_date** | **int** | | +**permissions** | [**ACL**](ACL.md) | | +**user** | [**User**](User.md) | | +**action** | **str** | | + +## Example + +```python +from edu_sharing_client.models.notify_entry import NotifyEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of NotifyEntry from a JSON string +notify_entry_instance = NotifyEntry.from_json(json) +# print the JSON string representation of the object +print(NotifyEntry.to_json()) + +# convert the object into a dict +notify_entry_dict = notify_entry_instance.to_dict() +# create an instance of NotifyEntry from a dict +notify_entry_from_dict = NotifyEntry.from_dict(notify_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ORGANIZATIONV1Api.md b/edu_sharing_openapi/docs/ORGANIZATIONV1Api.md new file mode 100644 index 00000000..ce9ad50b --- /dev/null +++ b/edu_sharing_openapi/docs/ORGANIZATIONV1Api.md @@ -0,0 +1,397 @@ +# edu_sharing_client.ORGANIZATIONV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_organizations**](ORGANIZATIONV1Api.md#create_organizations) | **PUT** /organization/v1/organizations/{repository}/{organization} | create organization in repository. +[**delete_organizations**](ORGANIZATIONV1Api.md#delete_organizations) | **DELETE** /organization/v1/organizations/{repository}/{organization} | Delete organization of repository. +[**get_organization**](ORGANIZATIONV1Api.md#get_organization) | **GET** /organization/v1/organizations/{repository}/{organization} | Get organization by id. +[**get_organizations**](ORGANIZATIONV1Api.md#get_organizations) | **GET** /organization/v1/organizations/{repository} | Get organizations of repository. +[**remove_from_organization**](ORGANIZATIONV1Api.md#remove_from_organization) | **DELETE** /organization/v1/organizations/{repository}/{organization}/member/{member} | Remove member from organization. + + +# **create_organizations** +> Organization create_organizations(repository, organization, eduscope=eduscope) + +create organization in repository. + +create organization in repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ORGANIZATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + organization = 'organization_example' # str | organization name + eduscope = 'eduscope_example' # str | eduscope (may be null) (optional) + + try: + # create organization in repository. + api_response = api_instance.create_organizations(repository, organization, eduscope=eduscope) + print("The response of ORGANIZATIONV1Api->create_organizations:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ORGANIZATIONV1Api->create_organizations: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **organization** | **str**| organization name | + **eduscope** | **str**| eduscope (may be null) | [optional] + +### Return type + +[**Organization**](Organization.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_organizations** +> delete_organizations(repository, organization) + +Delete organization of repository. + +Delete organization of repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ORGANIZATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + organization = 'organization_example' # str | groupname + + try: + # Delete organization of repository. + api_instance.delete_organizations(repository, organization) + except Exception as e: + print("Exception when calling ORGANIZATIONV1Api->delete_organizations: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **organization** | **str**| groupname | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_organization** +> Organization get_organization(repository, organization) + +Get organization by id. + +Get organization by id. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ORGANIZATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + organization = 'organization_example' # str | ID of organization + + try: + # Get organization by id. + api_response = api_instance.get_organization(repository, organization) + print("The response of ORGANIZATIONV1Api->get_organization:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ORGANIZATIONV1Api->get_organization: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **organization** | **str**| ID of organization | + +### Return type + +[**Organization**](Organization.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_organizations** +> OrganizationEntries get_organizations(repository, pattern=pattern, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, only_memberships=only_memberships) + +Get organizations of repository. + +Get organizations of repository the current user is member. May returns an empty list. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.organization_entries import OrganizationEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ORGANIZATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + pattern = 'pattern_example' # str | pattern (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + only_memberships = True # bool | search only in memberships, false can only be done by admin (optional) (default to True) + + try: + # Get organizations of repository. + api_response = api_instance.get_organizations(repository, pattern=pattern, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, only_memberships=only_memberships) + print("The response of ORGANIZATIONV1Api->get_organizations:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling ORGANIZATIONV1Api->get_organizations: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **pattern** | **str**| pattern | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **only_memberships** | **bool**| search only in memberships, false can only be done by admin | [optional] [default to True] + +### Return type + +[**OrganizationEntries**](OrganizationEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **remove_from_organization** +> remove_from_organization(repository, organization, member) + +Remove member from organization. + +Remove member from organization. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.ORGANIZATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + organization = 'organization_example' # str | groupname + member = 'member_example' # str | authorityName of member + + try: + # Remove member from organization. + api_instance.remove_from_organization(repository, organization, member) + except Exception as e: + print("Exception when calling ORGANIZATIONV1Api->remove_from_organization: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **organization** | **str**| groupname | + **member** | **str**| authorityName of member | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/OpenIdConfiguration.md b/edu_sharing_openapi/docs/OpenIdConfiguration.md new file mode 100644 index 00000000..89a250fa --- /dev/null +++ b/edu_sharing_openapi/docs/OpenIdConfiguration.md @@ -0,0 +1,41 @@ +# OpenIdConfiguration + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**issuer** | **str** | | [optional] +**token_endpoint** | **str** | | [optional] +**token_endpoint_auth_methods_supported** | **List[str]** | | [optional] +**token_endpoint_auth_signing_alg_values_supported** | **List[str]** | | [optional] +**jwks_uri** | **str** | | [optional] +**authorization_endpoint** | **str** | | [optional] +**registration_endpoint** | **str** | | [optional] +**scopes_supported** | **List[str]** | | [optional] +**response_types_supported** | **List[str]** | | [optional] +**subject_types_supported** | **List[str]** | | [optional] +**id_token_signing_alg_values_supported** | **List[str]** | | [optional] +**claims_supported** | **List[str]** | | [optional] +**https__purl_imsglobal_org_spec_lti_platform_configuration** | [**LTIPlatformConfiguration**](LTIPlatformConfiguration.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.open_id_configuration import OpenIdConfiguration + +# TODO update the JSON string below +json = "{}" +# create an instance of OpenIdConfiguration from a JSON string +open_id_configuration_instance = OpenIdConfiguration.from_json(json) +# print the JSON string representation of the object +print(OpenIdConfiguration.to_json()) + +# convert the object into a dict +open_id_configuration_dict = open_id_configuration_instance.to_dict() +# create an instance of OpenIdConfiguration from a dict +open_id_configuration_from_dict = OpenIdConfiguration.from_dict(open_id_configuration_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/OpenIdRegistrationResult.md b/edu_sharing_openapi/docs/OpenIdRegistrationResult.md new file mode 100644 index 00000000..4b86b28b --- /dev/null +++ b/edu_sharing_openapi/docs/OpenIdRegistrationResult.md @@ -0,0 +1,40 @@ +# OpenIdRegistrationResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**client_id** | **str** | | [optional] +**response_types** | **List[str]** | | [optional] +**jwks_uri** | **str** | | [optional] +**initiate_login_uri** | **str** | | [optional] +**grant_types** | **List[str]** | | [optional] +**redirect_uris** | **List[str]** | | [optional] +**application_type** | **str** | | [optional] +**token_endpoint_auth_method** | **str** | | [optional] +**client_name** | **str** | | [optional] +**logo_uri** | **str** | | [optional] +**scope** | **str** | | [optional] +**https__purl_imsglobal_org_spec_lti_tool_configuration** | [**LTIToolConfiguration**](LTIToolConfiguration.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.open_id_registration_result import OpenIdRegistrationResult + +# TODO update the JSON string below +json = "{}" +# create an instance of OpenIdRegistrationResult from a JSON string +open_id_registration_result_instance = OpenIdRegistrationResult.from_json(json) +# print the JSON string representation of the object +print(OpenIdRegistrationResult.to_json()) + +# convert the object into a dict +open_id_registration_result_dict = open_id_registration_result_instance.to_dict() +# create an instance of OpenIdRegistrationResult from a dict +open_id_registration_result_from_dict = OpenIdRegistrationResult.from_dict(open_id_registration_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/OrganisationsImportResult.md b/edu_sharing_openapi/docs/OrganisationsImportResult.md new file mode 100644 index 00000000..6eb5c61d --- /dev/null +++ b/edu_sharing_openapi/docs/OrganisationsImportResult.md @@ -0,0 +1,29 @@ +# OrganisationsImportResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**rows** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult + +# TODO update the JSON string below +json = "{}" +# create an instance of OrganisationsImportResult from a JSON string +organisations_import_result_instance = OrganisationsImportResult.from_json(json) +# print the JSON string representation of the object +print(OrganisationsImportResult.to_json()) + +# convert the object into a dict +organisations_import_result_dict = organisations_import_result_instance.to_dict() +# create an instance of OrganisationsImportResult from a dict +organisations_import_result_from_dict = OrganisationsImportResult.from_dict(organisations_import_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Organization.md b/edu_sharing_openapi/docs/Organization.md new file mode 100644 index 00000000..0a427df4 --- /dev/null +++ b/edu_sharing_openapi/docs/Organization.md @@ -0,0 +1,39 @@ +# Organization + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**editable** | **bool** | | [optional] +**signup_method** | **str** | | [optional] +**ref** | [**NodeRef**](NodeRef.md) | | [optional] +**aspects** | **List[str]** | | [optional] +**authority_name** | **str** | | +**authority_type** | **str** | | [optional] +**group_name** | **str** | | [optional] +**profile** | [**GroupProfile**](GroupProfile.md) | | [optional] +**administration_access** | **bool** | | [optional] +**shared_folder** | [**NodeRef**](NodeRef.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.organization import Organization + +# TODO update the JSON string below +json = "{}" +# create an instance of Organization from a JSON string +organization_instance = Organization.from_json(json) +# print the JSON string representation of the object +print(Organization.to_json()) + +# convert the object into a dict +organization_dict = organization_instance.to_dict() +# create an instance of Organization from a dict +organization_from_dict = Organization.from_dict(organization_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/OrganizationEntries.md b/edu_sharing_openapi/docs/OrganizationEntries.md new file mode 100644 index 00000000..53e40918 --- /dev/null +++ b/edu_sharing_openapi/docs/OrganizationEntries.md @@ -0,0 +1,31 @@ +# OrganizationEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**organizations** | [**List[Organization]**](Organization.md) | | +**pagination** | [**Pagination**](Pagination.md) | | +**can_create** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.organization_entries import OrganizationEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of OrganizationEntries from a JSON string +organization_entries_instance = OrganizationEntries.from_json(json) +# print the JSON string representation of the object +print(OrganizationEntries.to_json()) + +# convert the object into a dict +organization_entries_dict = organization_entries_instance.to_dict() +# create an instance of OrganizationEntries from a dict +organization_entries_from_dict = OrganizationEntries.from_dict(organization_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Pageable.md b/edu_sharing_openapi/docs/Pageable.md new file mode 100644 index 00000000..fd4a3576 --- /dev/null +++ b/edu_sharing_openapi/docs/Pageable.md @@ -0,0 +1,34 @@ +# Pageable + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**page_number** | **int** | | [optional] +**unpaged** | **bool** | | [optional] +**offset** | **int** | | [optional] +**sort** | [**Sort**](Sort.md) | | [optional] +**paged** | **bool** | | [optional] +**page_size** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.pageable import Pageable + +# TODO update the JSON string below +json = "{}" +# create an instance of Pageable from a JSON string +pageable_instance = Pageable.from_json(json) +# print the JSON string representation of the object +print(Pageable.to_json()) + +# convert the object into a dict +pageable_dict = pageable_instance.to_dict() +# create an instance of Pageable from a dict +pageable_from_dict = Pageable.from_dict(pageable_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Pagination.md b/edu_sharing_openapi/docs/Pagination.md new file mode 100644 index 00000000..ef8c6cfc --- /dev/null +++ b/edu_sharing_openapi/docs/Pagination.md @@ -0,0 +1,31 @@ +# Pagination + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**total** | **int** | | +**var_from** | **int** | | +**count** | **int** | | + +## Example + +```python +from edu_sharing_client.models.pagination import Pagination + +# TODO update the JSON string below +json = "{}" +# create an instance of Pagination from a JSON string +pagination_instance = Pagination.from_json(json) +# print the JSON string representation of the object +print(Pagination.to_json()) + +# convert the object into a dict +pagination_dict = pagination_instance.to_dict() +# create an instance of Pagination from a dict +pagination_from_dict = Pagination.from_dict(pagination_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Parameters.md b/edu_sharing_openapi/docs/Parameters.md new file mode 100644 index 00000000..88d24cc1 --- /dev/null +++ b/edu_sharing_openapi/docs/Parameters.md @@ -0,0 +1,29 @@ +# Parameters + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**general** | [**General**](General.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.parameters import Parameters + +# TODO update the JSON string below +json = "{}" +# create an instance of Parameters from a JSON string +parameters_instance = Parameters.from_json(json) +# print the JSON string representation of the object +print(Parameters.to_json()) + +# convert the object into a dict +parameters_dict = parameters_instance.to_dict() +# create an instance of Parameters from a dict +parameters_from_dict = Parameters.from_dict(parameters_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ParentEntries.md b/edu_sharing_openapi/docs/ParentEntries.md new file mode 100644 index 00000000..ebcc6f88 --- /dev/null +++ b/edu_sharing_openapi/docs/ParentEntries.md @@ -0,0 +1,31 @@ +# ParentEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**scope** | **str** | | [optional] +**nodes** | [**List[Node]**](Node.md) | | +**pagination** | [**Pagination**](Pagination.md) | | + +## Example + +```python +from edu_sharing_client.models.parent_entries import ParentEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of ParentEntries from a JSON string +parent_entries_instance = ParentEntries.from_json(json) +# print the JSON string representation of the object +print(ParentEntries.to_json()) + +# convert the object into a dict +parent_entries_dict = parent_entries_instance.to_dict() +# create an instance of ParentEntries from a dict +parent_entries_from_dict = ParentEntries.from_dict(parent_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Person.md b/edu_sharing_openapi/docs/Person.md new file mode 100644 index 00000000..d0ec1fd0 --- /dev/null +++ b/edu_sharing_openapi/docs/Person.md @@ -0,0 +1,32 @@ +# Person + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**profile** | [**UserProfile**](UserProfile.md) | | [optional] +**first_name** | **str** | | [optional] +**last_name** | **str** | | [optional] +**mailbox** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.person import Person + +# TODO update the JSON string below +json = "{}" +# create an instance of Person from a JSON string +person_instance = Person.from_json(json) +# print the JSON string representation of the object +print(Person.to_json()) + +# convert the object into a dict +person_dict = person_instance.to_dict() +# create an instance of Person from a dict +person_from_dict = Person.from_dict(person_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/PersonDeleteOptions.md b/edu_sharing_openapi/docs/PersonDeleteOptions.md new file mode 100644 index 00000000..f75d45e1 --- /dev/null +++ b/edu_sharing_openapi/docs/PersonDeleteOptions.md @@ -0,0 +1,39 @@ +# PersonDeleteOptions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**cleanup_metadata** | **bool** | | [optional] +**home_folder** | [**HomeFolderOptions**](HomeFolderOptions.md) | | [optional] +**shared_folders** | [**SharedFolderOptions**](SharedFolderOptions.md) | | [optional] +**collections** | [**CollectionOptions**](CollectionOptions.md) | | [optional] +**ratings** | [**DeleteOption**](DeleteOption.md) | | [optional] +**comments** | [**DeleteOption**](DeleteOption.md) | | [optional] +**collection_feedback** | [**DeleteOption**](DeleteOption.md) | | [optional] +**statistics** | [**DeleteOption**](DeleteOption.md) | | [optional] +**stream** | [**DeleteOption**](DeleteOption.md) | | [optional] +**receiver** | **str** | | [optional] +**receiver_group** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of PersonDeleteOptions from a JSON string +person_delete_options_instance = PersonDeleteOptions.from_json(json) +# print the JSON string representation of the object +print(PersonDeleteOptions.to_json()) + +# convert the object into a dict +person_delete_options_dict = person_delete_options_instance.to_dict() +# create an instance of PersonDeleteOptions from a dict +person_delete_options_from_dict = PersonDeleteOptions.from_dict(person_delete_options_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/PersonDeleteResult.md b/edu_sharing_openapi/docs/PersonDeleteResult.md new file mode 100644 index 00000000..20c0a3f6 --- /dev/null +++ b/edu_sharing_openapi/docs/PersonDeleteResult.md @@ -0,0 +1,37 @@ +# PersonDeleteResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**authority_name** | **str** | | [optional] +**deleted_name** | **str** | | [optional] +**home_folder** | [**Dict[str, Counts]**](Counts.md) | | [optional] +**shared_folders** | [**Dict[str, Counts]**](Counts.md) | | [optional] +**collections** | [**CollectionCounts**](CollectionCounts.md) | | [optional] +**comments** | **int** | | [optional] +**ratings** | **int** | | [optional] +**collection_feedback** | **int** | | [optional] +**stream** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.person_delete_result import PersonDeleteResult + +# TODO update the JSON string below +json = "{}" +# create an instance of PersonDeleteResult from a JSON string +person_delete_result_instance = PersonDeleteResult.from_json(json) +# print the JSON string representation of the object +print(PersonDeleteResult.to_json()) + +# convert the object into a dict +person_delete_result_dict = person_delete_result_instance.to_dict() +# create an instance of PersonDeleteResult from a dict +person_delete_result_from_dict = PersonDeleteResult.from_dict(person_delete_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/PersonReport.md b/edu_sharing_openapi/docs/PersonReport.md new file mode 100644 index 00000000..92452f4b --- /dev/null +++ b/edu_sharing_openapi/docs/PersonReport.md @@ -0,0 +1,30 @@ +# PersonReport + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**options** | [**PersonDeleteOptions**](PersonDeleteOptions.md) | | [optional] +**results** | [**List[PersonDeleteResult]**](PersonDeleteResult.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.person_report import PersonReport + +# TODO update the JSON string below +json = "{}" +# create an instance of PersonReport from a JSON string +person_report_instance = PersonReport.from_json(json) +# print the JSON string representation of the object +print(PersonReport.to_json()) + +# convert the object into a dict +person_report_dict = person_report_instance.to_dict() +# create an instance of PersonReport from a dict +person_report_from_dict = PersonReport.from_dict(person_report_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/PluginInfo.md b/edu_sharing_openapi/docs/PluginInfo.md new file mode 100644 index 00000000..2175032d --- /dev/null +++ b/edu_sharing_openapi/docs/PluginInfo.md @@ -0,0 +1,29 @@ +# PluginInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.plugin_info import PluginInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of PluginInfo from a JSON string +plugin_info_instance = PluginInfo.from_json(json) +# print the JSON string representation of the object +print(PluginInfo.to_json()) + +# convert the object into a dict +plugin_info_dict = plugin_info_instance.to_dict() +# create an instance of PluginInfo from a dict +plugin_info_from_dict = PluginInfo.from_dict(plugin_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/PluginStatus.md b/edu_sharing_openapi/docs/PluginStatus.md new file mode 100644 index 00000000..4223db1b --- /dev/null +++ b/edu_sharing_openapi/docs/PluginStatus.md @@ -0,0 +1,31 @@ +# PluginStatus + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**version** | **str** | | [optional] +**name** | **str** | | [optional] +**enabled** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.plugin_status import PluginStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of PluginStatus from a JSON string +plugin_status_instance = PluginStatus.from_json(json) +# print the JSON string representation of the object +print(PluginStatus.to_json()) + +# convert the object into a dict +plugin_status_dict = plugin_status_instance.to_dict() +# create an instance of PluginStatus from a dict +plugin_status_from_dict = PluginStatus.from_dict(plugin_status_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Preferences.md b/edu_sharing_openapi/docs/Preferences.md new file mode 100644 index 00000000..5a0ab652 --- /dev/null +++ b/edu_sharing_openapi/docs/Preferences.md @@ -0,0 +1,29 @@ +# Preferences + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**preferences** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.preferences import Preferences + +# TODO update the JSON string below +json = "{}" +# create an instance of Preferences from a JSON string +preferences_instance = Preferences.from_json(json) +# print the JSON string representation of the object +print(Preferences.to_json()) + +# convert the object into a dict +preferences_dict = preferences_instance.to_dict() +# create an instance of Preferences from a dict +preferences_from_dict = Preferences.from_dict(preferences_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Preview.md b/edu_sharing_openapi/docs/Preview.md new file mode 100644 index 00000000..1a5d6ef2 --- /dev/null +++ b/edu_sharing_openapi/docs/Preview.md @@ -0,0 +1,36 @@ +# Preview + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**is_icon** | **bool** | | +**is_generated** | **bool** | | [optional] +**type** | **str** | | [optional] +**mimetype** | **str** | | [optional] +**data** | **bytearray** | | [optional] +**url** | **str** | | +**width** | **int** | | +**height** | **int** | | + +## Example + +```python +from edu_sharing_client.models.preview import Preview + +# TODO update the JSON string below +json = "{}" +# create an instance of Preview from a JSON string +preview_instance = Preview.from_json(json) +# print the JSON string representation of the object +print(Preview.to_json()) + +# convert the object into a dict +preview_dict = preview_instance.to_dict() +# create an instance of Preview from a dict +preview_from_dict = Preview.from_dict(preview_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Profile.md b/edu_sharing_openapi/docs/Profile.md new file mode 100644 index 00000000..b9bb58b9 --- /dev/null +++ b/edu_sharing_openapi/docs/Profile.md @@ -0,0 +1,33 @@ +# Profile + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**group_email** | **str** | | [optional] +**mediacenter** | [**MediacenterProfileExtension**](MediacenterProfileExtension.md) | | [optional] +**display_name** | **str** | | [optional] +**group_type** | **str** | | [optional] +**scope_type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.profile import Profile + +# TODO update the JSON string below +json = "{}" +# create an instance of Profile from a JSON string +profile_instance = Profile.from_json(json) +# print the JSON string representation of the object +print(Profile.to_json()) + +# convert the object into a dict +profile_dict = profile_instance.to_dict() +# create an instance of Profile from a dict +profile_from_dict = Profile.from_dict(profile_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ProfileSettings.md b/edu_sharing_openapi/docs/ProfileSettings.md new file mode 100644 index 00000000..cad53870 --- /dev/null +++ b/edu_sharing_openapi/docs/ProfileSettings.md @@ -0,0 +1,29 @@ +# ProfileSettings + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**show_email** | **bool** | false | + +## Example + +```python +from edu_sharing_client.models.profile_settings import ProfileSettings + +# TODO update the JSON string below +json = "{}" +# create an instance of ProfileSettings from a JSON string +profile_settings_instance = ProfileSettings.from_json(json) +# print the JSON string representation of the object +print(ProfileSettings.to_json()) + +# convert the object into a dict +profile_settings_dict = profile_settings_instance.to_dict() +# create an instance of ProfileSettings from a dict +profile_settings_from_dict = ProfileSettings.from_dict(profile_settings_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ProposeForCollectionEventDTO.md b/edu_sharing_openapi/docs/ProposeForCollectionEventDTO.md new file mode 100644 index 00000000..98b3b38e --- /dev/null +++ b/edu_sharing_openapi/docs/ProposeForCollectionEventDTO.md @@ -0,0 +1,30 @@ +# ProposeForCollectionEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**collection** | [**CollectionDTO**](CollectionDTO.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.propose_for_collection_event_dto import ProposeForCollectionEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of ProposeForCollectionEventDTO from a JSON string +propose_for_collection_event_dto_instance = ProposeForCollectionEventDTO.from_json(json) +# print the JSON string representation of the object +print(ProposeForCollectionEventDTO.to_json()) + +# convert the object into a dict +propose_for_collection_event_dto_dict = propose_for_collection_event_dto_instance.to_dict() +# create an instance of ProposeForCollectionEventDTO from a dict +propose_for_collection_event_dto_from_dict = ProposeForCollectionEventDTO.from_dict(propose_for_collection_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Provider.md b/edu_sharing_openapi/docs/Provider.md new file mode 100644 index 00000000..3ddfcd08 --- /dev/null +++ b/edu_sharing_openapi/docs/Provider.md @@ -0,0 +1,33 @@ +# Provider + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**legal_name** | **str** | | [optional] +**url** | **str** | | [optional] +**email** | **str** | | [optional] +**area_served** | **str** | | [optional] +**location** | [**Location**](Location.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.provider import Provider + +# TODO update the JSON string below +json = "{}" +# create an instance of Provider from a JSON string +provider_instance = Provider.from_json(json) +# print the JSON string representation of the object +print(Provider.to_json()) + +# convert the object into a dict +provider_dict = provider_instance.to_dict() +# create an instance of Provider from a dict +provider_from_dict = Provider.from_dict(provider_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Query.md b/edu_sharing_openapi/docs/Query.md new file mode 100644 index 00000000..318d273b --- /dev/null +++ b/edu_sharing_openapi/docs/Query.md @@ -0,0 +1,30 @@ +# Query + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**condition** | [**Condition**](Condition.md) | | [optional] +**query** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.query import Query + +# TODO update the JSON string below +json = "{}" +# create an instance of Query from a JSON string +query_instance = Query.from_json(json) +# print the JSON string representation of the object +print(Query.to_json()) + +# convert the object into a dict +query_dict = query_instance.to_dict() +# create an instance of Query from a dict +query_from_dict = Query.from_dict(query_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RATINGV1Api.md b/edu_sharing_openapi/docs/RATINGV1Api.md new file mode 100644 index 00000000..ac63c016 --- /dev/null +++ b/edu_sharing_openapi/docs/RATINGV1Api.md @@ -0,0 +1,310 @@ +# edu_sharing_client.RATINGV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_or_update_rating**](RATINGV1Api.md#add_or_update_rating) | **PUT** /rating/v1/ratings/{repository}/{node} | create or update a rating +[**delete_rating**](RATINGV1Api.md#delete_rating) | **DELETE** /rating/v1/ratings/{repository}/{node} | delete a comment +[**get_accumulated_ratings**](RATINGV1Api.md#get_accumulated_ratings) | **GET** /rating/v1/ratings/{repository}/{node}/history | get the range of nodes which had tracked actions since a given timestamp +[**get_nodes_altered_in_range**](RATINGV1Api.md#get_nodes_altered_in_range) | **GET** /rating/v1/ratings/{repository}/nodes/altered | get the range of nodes which had tracked actions since a given timestamp + + +# **add_or_update_rating** +> add_or_update_rating(repository, node, rating, body) + +create or update a rating + +Adds the rating. If the current user already rated that element, the rating will be altered + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RATINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + rating = 3.4 # float | The rating (usually in range 1-5) + body = 'body_example' # str | Text content of rating + + try: + # create or update a rating + api_instance.add_or_update_rating(repository, node, rating, body) + except Exception as e: + print("Exception when calling RATINGV1Api->add_or_update_rating: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **rating** | **float**| The rating (usually in range 1-5) | + **body** | **str**| Text content of rating | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_rating** +> delete_rating(repository, node) + +delete a comment + +Delete the comment with the given id + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RATINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # delete a comment + api_instance.delete_rating(repository, node) + except Exception as e: + print("Exception when calling RATINGV1Api->delete_rating: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_accumulated_ratings** +> str get_accumulated_ratings(repository, node, date_from=date_from) + +get the range of nodes which had tracked actions since a given timestamp + +requires admin + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RATINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + date_from = 56 # int | date range from (optional) + + try: + # get the range of nodes which had tracked actions since a given timestamp + api_response = api_instance.get_accumulated_ratings(repository, node, date_from=date_from) + print("The response of RATINGV1Api->get_accumulated_ratings:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling RATINGV1Api->get_accumulated_ratings: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **date_from** | **int**| date range from | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_nodes_altered_in_range** +> str get_nodes_altered_in_range(repository, date_from) + +get the range of nodes which had tracked actions since a given timestamp + +requires admin + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RATINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + date_from = 56 # int | date range from + + try: + # get the range of nodes which had tracked actions since a given timestamp + api_response = api_instance.get_nodes_altered_in_range(repository, date_from) + print("The response of RATINGV1Api->get_nodes_altered_in_range:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling RATINGV1Api->get_nodes_altered_in_range: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **date_from** | **int**| date range from | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/REGISTERV1Api.md b/edu_sharing_openapi/docs/REGISTERV1Api.md new file mode 100644 index 00000000..83298d9f --- /dev/null +++ b/edu_sharing_openapi/docs/REGISTERV1Api.md @@ -0,0 +1,406 @@ +# edu_sharing_client.REGISTERV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**activate**](REGISTERV1Api.md#activate) | **POST** /register/v1/activate/{key} | Activate a new user (by using a supplied key) +[**mail_exists**](REGISTERV1Api.md#mail_exists) | **GET** /register/v1/exists/{mail} | Check if the given mail is already successfully registered +[**recover_password**](REGISTERV1Api.md#recover_password) | **POST** /register/v1/recover/{mail} | Send a mail to recover/reset password +[**register**](REGISTERV1Api.md#register) | **POST** /register/v1/register | Register a new user +[**resend_mail**](REGISTERV1Api.md#resend_mail) | **POST** /register/v1/resend/{mail} | Resend a registration mail for a given mail address +[**reset_password**](REGISTERV1Api.md#reset_password) | **POST** /register/v1/reset/{key}/{password} | Send a mail to recover/reset password + + +# **activate** +> activate(key) + +Activate a new user (by using a supplied key) + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.REGISTERV1Api(api_client) + key = 'key_example' # str | The key for the user to activate + + try: + # Activate a new user (by using a supplied key) + api_instance.activate(key) + except Exception as e: + print("Exception when calling REGISTERV1Api->activate: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **key** | **str**| The key for the user to activate | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **mail_exists** +> RegisterExists mail_exists(mail) + +Check if the given mail is already successfully registered + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.register_exists import RegisterExists +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.REGISTERV1Api(api_client) + mail = 'mail_example' # str | The mail (authority) of the user to check + + try: + # Check if the given mail is already successfully registered + api_response = api_instance.mail_exists(mail) + print("The response of REGISTERV1Api->mail_exists:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling REGISTERV1Api->mail_exists: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **mail** | **str**| The mail (authority) of the user to check | + +### Return type + +[**RegisterExists**](RegisterExists.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **recover_password** +> recover_password(mail) + +Send a mail to recover/reset password + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.REGISTERV1Api(api_client) + mail = 'mail_example' # str | The mail (authority) of the user to recover + + try: + # Send a mail to recover/reset password + api_instance.recover_password(mail) + except Exception as e: + print("Exception when calling REGISTERV1Api->recover_password: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **mail** | **str**| The mail (authority) of the user to recover | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **register** +> register(register_information=register_information) + +Register a new user + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.register_information import RegisterInformation +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.REGISTERV1Api(api_client) + register_information = edu_sharing_client.RegisterInformation() # RegisterInformation | (optional) + + try: + # Register a new user + api_instance.register(register_information=register_information) + except Exception as e: + print("Exception when calling REGISTERV1Api->register: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **register_information** | [**RegisterInformation**](RegisterInformation.md)| | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **resend_mail** +> resend_mail(mail) + +Resend a registration mail for a given mail address + +The method will return false if there is no pending registration for the given mail + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.REGISTERV1Api(api_client) + mail = 'mail_example' # str | The mail a registration is pending for and should be resend to + + try: + # Resend a registration mail for a given mail address + api_instance.resend_mail(mail) + except Exception as e: + print("Exception when calling REGISTERV1Api->resend_mail: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **mail** | **str**| The mail a registration is pending for and should be resend to | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **reset_password** +> reset_password(key, password) + +Send a mail to recover/reset password + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.REGISTERV1Api(api_client) + key = 'key_example' # str | The key for the password reset request + password = 'password_example' # str | The new password for the user + + try: + # Send a mail to recover/reset password + api_instance.reset_password(key, password) + except Exception as e: + print("Exception when calling REGISTERV1Api->reset_password: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **key** | **str**| The key for the password reset request | + **password** | **str**| The new password for the user | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/RELATIONV1Api.md b/edu_sharing_openapi/docs/RELATIONV1Api.md new file mode 100644 index 00000000..30beb2f5 --- /dev/null +++ b/edu_sharing_openapi/docs/RELATIONV1Api.md @@ -0,0 +1,238 @@ +# edu_sharing_client.RELATIONV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_relation**](RELATIONV1Api.md#create_relation) | **PUT** /relation/v1/relation/{repository}/{source}/{type}/{target} | create a relation between nodes +[**delete_relation**](RELATIONV1Api.md#delete_relation) | **DELETE** /relation/v1/relation/{repository}/{source}/{type}/{target} | delete a relation between nodes +[**get_relations**](RELATIONV1Api.md#get_relations) | **GET** /relation/v1/relation/{repository}/{node} | get all relation of the node + + +# **create_relation** +> create_relation(repository, source, type, target) + +create a relation between nodes + +Creates a relation between two nodes of the given type. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RELATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + source = 'source_example' # str | ID of node + type = 'type_example' # str | ID of node + target = 'target_example' # str | ID of node + + try: + # create a relation between nodes + api_instance.create_relation(repository, source, type, target) + except Exception as e: + print("Exception when calling RELATIONV1Api->create_relation: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **source** | **str**| ID of node | + **type** | **str**| ID of node | + **target** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_relation** +> delete_relation(repository, source, type, target) + +delete a relation between nodes + +Delete a relation between two nodes of the given type. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RELATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + source = 'source_example' # str | ID of node + type = 'type_example' # str | ID of node + target = 'target_example' # str | ID of node + + try: + # delete a relation between nodes + api_instance.delete_relation(repository, source, type, target) + except Exception as e: + print("Exception when calling RELATIONV1Api->delete_relation: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **source** | **str**| ID of node | + **type** | **str**| ID of node | + **target** | **str**| ID of node | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_relations** +> NodeRelation get_relations(repository, node) + +get all relation of the node + +Returns all relations of the node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_relation import NodeRelation +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RELATIONV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + + try: + # get all relation of the node + api_response = api_instance.get_relations(repository, node) + print("The response of RELATIONV1Api->get_relations:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling RELATIONV1Api->get_relations: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + +### Return type + +[**NodeRelation**](NodeRelation.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/RENDERINGV1Api.md b/edu_sharing_openapi/docs/RENDERINGV1Api.md new file mode 100644 index 00000000..06da670d --- /dev/null +++ b/edu_sharing_openapi/docs/RENDERINGV1Api.md @@ -0,0 +1,170 @@ +# edu_sharing_client.RENDERINGV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_details_snippet1**](RENDERINGV1Api.md#get_details_snippet1) | **GET** /rendering/v1/details/{repository}/{node} | Get metadata of node. +[**get_details_snippet_with_parameters**](RENDERINGV1Api.md#get_details_snippet_with_parameters) | **POST** /rendering/v1/details/{repository}/{node} | Get metadata of node. + + +# **get_details_snippet1** +> RenderingDetailsEntry get_details_snippet1(repository, node, version=version, display_mode=display_mode) + +Get metadata of node. + +Get metadata of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RENDERINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + version = 'version_example' # str | version of node (optional) + display_mode = 'display_mode_example' # str | Rendering displayMode (optional) + + try: + # Get metadata of node. + api_response = api_instance.get_details_snippet1(repository, node, version=version, display_mode=display_mode) + print("The response of RENDERINGV1Api->get_details_snippet1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling RENDERINGV1Api->get_details_snippet1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **version** | **str**| version of node | [optional] + **display_mode** | **str**| Rendering displayMode | [optional] + +### Return type + +[**RenderingDetailsEntry**](RenderingDetailsEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_details_snippet_with_parameters** +> RenderingDetailsEntry get_details_snippet_with_parameters(repository, node, version=version, display_mode=display_mode, request_body=request_body) + +Get metadata of node. + +Get metadata of node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.RENDERINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + version = 'version_example' # str | version of node (optional) + display_mode = 'display_mode_example' # str | Rendering displayMode (optional) + request_body = {'key': 'request_body_example'} # Dict[str, str] | additional parameters to send to the rendering service (optional) + + try: + # Get metadata of node. + api_response = api_instance.get_details_snippet_with_parameters(repository, node, version=version, display_mode=display_mode, request_body=request_body) + print("The response of RENDERINGV1Api->get_details_snippet_with_parameters:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling RENDERINGV1Api->get_details_snippet_with_parameters: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **version** | **str**| version of node | [optional] + **display_mode** | **str**| Rendering displayMode | [optional] + **request_body** | [**Dict[str, str]**](str.md)| additional parameters to send to the rendering service | [optional] + +### Return type + +[**RenderingDetailsEntry**](RenderingDetailsEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/RatingData.md b/edu_sharing_openapi/docs/RatingData.md new file mode 100644 index 00000000..0cd92c25 --- /dev/null +++ b/edu_sharing_openapi/docs/RatingData.md @@ -0,0 +1,31 @@ +# RatingData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**sum** | **float** | | [optional] +**count** | **int** | | [optional] +**rating** | **float** | | [optional] + +## Example + +```python +from edu_sharing_client.models.rating_data import RatingData + +# TODO update the JSON string below +json = "{}" +# create an instance of RatingData from a JSON string +rating_data_instance = RatingData.from_json(json) +# print the JSON string representation of the object +print(RatingData.to_json()) + +# convert the object into a dict +rating_data_dict = rating_data_instance.to_dict() +# create an instance of RatingData from a dict +rating_data_from_dict = RatingData.from_dict(rating_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RatingDetails.md b/edu_sharing_openapi/docs/RatingDetails.md new file mode 100644 index 00000000..edd1f0bc --- /dev/null +++ b/edu_sharing_openapi/docs/RatingDetails.md @@ -0,0 +1,31 @@ +# RatingDetails + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**overall** | [**RatingData**](RatingData.md) | | [optional] +**affiliation** | [**Dict[str, RatingData]**](RatingData.md) | | [optional] +**user** | **float** | | [optional] + +## Example + +```python +from edu_sharing_client.models.rating_details import RatingDetails + +# TODO update the JSON string below +json = "{}" +# create an instance of RatingDetails from a JSON string +rating_details_instance = RatingDetails.from_json(json) +# print the JSON string representation of the object +print(RatingDetails.to_json()) + +# convert the object into a dict +rating_details_dict = rating_details_instance.to_dict() +# create an instance of RatingDetails from a dict +rating_details_from_dict = RatingDetails.from_dict(rating_details_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RatingEventDTO.md b/edu_sharing_openapi/docs/RatingEventDTO.md new file mode 100644 index 00000000..55225ba0 --- /dev/null +++ b/edu_sharing_openapi/docs/RatingEventDTO.md @@ -0,0 +1,32 @@ +# RatingEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**new_rating** | **float** | | [optional] +**rating_sum** | **float** | | [optional] +**rating_count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.rating_event_dto import RatingEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of RatingEventDTO from a JSON string +rating_event_dto_instance = RatingEventDTO.from_json(json) +# print the JSON string representation of the object +print(RatingEventDTO.to_json()) + +# convert the object into a dict +rating_event_dto_dict = rating_event_dto_instance.to_dict() +# create an instance of RatingEventDTO from a dict +rating_event_dto_from_dict = RatingEventDTO.from_dict(rating_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RatingHistory.md b/edu_sharing_openapi/docs/RatingHistory.md new file mode 100644 index 00000000..6091ef7a --- /dev/null +++ b/edu_sharing_openapi/docs/RatingHistory.md @@ -0,0 +1,31 @@ +# RatingHistory + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**overall** | [**RatingData**](RatingData.md) | | [optional] +**affiliation** | [**Dict[str, RatingData]**](RatingData.md) | | [optional] +**timestamp** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.rating_history import RatingHistory + +# TODO update the JSON string below +json = "{}" +# create an instance of RatingHistory from a JSON string +rating_history_instance = RatingHistory.from_json(json) +# print the JSON string representation of the object +print(RatingHistory.to_json()) + +# convert the object into a dict +rating_history_dict = rating_history_instance.to_dict() +# create an instance of RatingHistory from a dict +rating_history_from_dict = RatingHistory.from_dict(rating_history_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ReferenceEntries.md b/edu_sharing_openapi/docs/ReferenceEntries.md new file mode 100644 index 00000000..3d739ca7 --- /dev/null +++ b/edu_sharing_openapi/docs/ReferenceEntries.md @@ -0,0 +1,30 @@ +# ReferenceEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**pagination** | [**Pagination**](Pagination.md) | | [optional] +**references** | [**List[CollectionReference]**](CollectionReference.md) | | + +## Example + +```python +from edu_sharing_client.models.reference_entries import ReferenceEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of ReferenceEntries from a JSON string +reference_entries_instance = ReferenceEntries.from_json(json) +# print the JSON string representation of the object +print(ReferenceEntries.to_json()) + +# convert the object into a dict +reference_entries_dict = reference_entries_instance.to_dict() +# create an instance of ReferenceEntries from a dict +reference_entries_from_dict = ReferenceEntries.from_dict(reference_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Register.md b/edu_sharing_openapi/docs/Register.md new file mode 100644 index 00000000..3e4c249d --- /dev/null +++ b/edu_sharing_openapi/docs/Register.md @@ -0,0 +1,33 @@ +# Register + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**local** | **bool** | | [optional] +**recover_password** | **bool** | | [optional] +**login_url** | **str** | | [optional] +**recover_url** | **str** | | [optional] +**required_fields** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.register import Register + +# TODO update the JSON string below +json = "{}" +# create an instance of Register from a JSON string +register_instance = Register.from_json(json) +# print the JSON string representation of the object +print(Register.to_json()) + +# convert the object into a dict +register_dict = register_instance.to_dict() +# create an instance of Register from a dict +register_from_dict = Register.from_dict(register_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RegisterExists.md b/edu_sharing_openapi/docs/RegisterExists.md new file mode 100644 index 00000000..5525e99b --- /dev/null +++ b/edu_sharing_openapi/docs/RegisterExists.md @@ -0,0 +1,29 @@ +# RegisterExists + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**exists** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.register_exists import RegisterExists + +# TODO update the JSON string below +json = "{}" +# create an instance of RegisterExists from a JSON string +register_exists_instance = RegisterExists.from_json(json) +# print the JSON string representation of the object +print(RegisterExists.to_json()) + +# convert the object into a dict +register_exists_dict = register_exists_instance.to_dict() +# create an instance of RegisterExists from a dict +register_exists_from_dict = RegisterExists.from_dict(register_exists_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RegisterInformation.md b/edu_sharing_openapi/docs/RegisterInformation.md new file mode 100644 index 00000000..46ad0d59 --- /dev/null +++ b/edu_sharing_openapi/docs/RegisterInformation.md @@ -0,0 +1,36 @@ +# RegisterInformation + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**vcard** | **str** | | [optional] +**first_name** | **str** | | [optional] +**last_name** | **str** | | [optional] +**email** | **str** | | [optional] +**password** | **str** | | [optional] +**organization** | **str** | | [optional] +**allow_notifications** | **bool** | | [optional] +**authority_name** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.register_information import RegisterInformation + +# TODO update the JSON string below +json = "{}" +# create an instance of RegisterInformation from a JSON string +register_information_instance = RegisterInformation.from_json(json) +# print the JSON string representation of the object +print(RegisterInformation.to_json()) + +# convert the object into a dict +register_information_dict = register_information_instance.to_dict() +# create an instance of RegisterInformation from a dict +register_information_from_dict = RegisterInformation.from_dict(register_information_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RegistrationUrl.md b/edu_sharing_openapi/docs/RegistrationUrl.md new file mode 100644 index 00000000..fc3abe4e --- /dev/null +++ b/edu_sharing_openapi/docs/RegistrationUrl.md @@ -0,0 +1,29 @@ +# RegistrationUrl + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.registration_url import RegistrationUrl + +# TODO update the JSON string below +json = "{}" +# create an instance of RegistrationUrl from a JSON string +registration_url_instance = RegistrationUrl.from_json(json) +# print the JSON string representation of the object +print(RegistrationUrl.to_json()) + +# convert the object into a dict +registration_url_dict = registration_url_instance.to_dict() +# create an instance of RegistrationUrl from a dict +registration_url_from_dict = RegistrationUrl.from_dict(registration_url_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RelationData.md b/edu_sharing_openapi/docs/RelationData.md new file mode 100644 index 00000000..dcbb82b5 --- /dev/null +++ b/edu_sharing_openapi/docs/RelationData.md @@ -0,0 +1,32 @@ +# RelationData + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**Node**](Node.md) | | [optional] +**creator** | [**User**](User.md) | | [optional] +**timestamp** | **datetime** | | [optional] +**type** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.relation_data import RelationData + +# TODO update the JSON string below +json = "{}" +# create an instance of RelationData from a JSON string +relation_data_instance = RelationData.from_json(json) +# print the JSON string representation of the object +print(RelationData.to_json()) + +# convert the object into a dict +relation_data_dict = relation_data_instance.to_dict() +# create an instance of RelationData from a dict +relation_data_from_dict = RelationData.from_dict(relation_data_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Remote.md b/edu_sharing_openapi/docs/Remote.md new file mode 100644 index 00000000..807871b2 --- /dev/null +++ b/edu_sharing_openapi/docs/Remote.md @@ -0,0 +1,30 @@ +# Remote + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repository** | [**Repo**](Repo.md) | | [optional] +**id** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.remote import Remote + +# TODO update the JSON string below +json = "{}" +# create an instance of Remote from a JSON string +remote_instance = Remote.from_json(json) +# print the JSON string representation of the object +print(Remote.to_json()) + +# convert the object into a dict +remote_dict = remote_instance.to_dict() +# create an instance of Remote from a dict +remote_from_dict = Remote.from_dict(remote_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RemoteAuthDescription.md b/edu_sharing_openapi/docs/RemoteAuthDescription.md new file mode 100644 index 00000000..c105b257 --- /dev/null +++ b/edu_sharing_openapi/docs/RemoteAuthDescription.md @@ -0,0 +1,30 @@ +# RemoteAuthDescription + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**url** | **str** | | [optional] +**token** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription + +# TODO update the JSON string below +json = "{}" +# create an instance of RemoteAuthDescription from a JSON string +remote_auth_description_instance = RemoteAuthDescription.from_json(json) +# print the JSON string representation of the object +print(RemoteAuthDescription.to_json()) + +# convert the object into a dict +remote_auth_description_dict = remote_auth_description_instance.to_dict() +# create an instance of RemoteAuthDescription from a dict +remote_auth_description_from_dict = RemoteAuthDescription.from_dict(remote_auth_description_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Rendering.md b/edu_sharing_openapi/docs/Rendering.md new file mode 100644 index 00000000..ce037cad --- /dev/null +++ b/edu_sharing_openapi/docs/Rendering.md @@ -0,0 +1,32 @@ +# Rendering + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**show_preview** | **bool** | | [optional] +**show_download_button** | **bool** | | [optional] +**prerender** | **bool** | | [optional] +**gdpr** | [**List[RenderingGdpr]**](RenderingGdpr.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.rendering import Rendering + +# TODO update the JSON string below +json = "{}" +# create an instance of Rendering from a JSON string +rendering_instance = Rendering.from_json(json) +# print the JSON string representation of the object +print(Rendering.to_json()) + +# convert the object into a dict +rendering_dict = rendering_instance.to_dict() +# create an instance of Rendering from a dict +rendering_from_dict = Rendering.from_dict(rendering_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RenderingDetailsEntry.md b/edu_sharing_openapi/docs/RenderingDetailsEntry.md new file mode 100644 index 00000000..3620c6d5 --- /dev/null +++ b/edu_sharing_openapi/docs/RenderingDetailsEntry.md @@ -0,0 +1,31 @@ +# RenderingDetailsEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**details_snippet** | **str** | | +**mime_type** | **str** | | +**node** | [**Node**](Node.md) | | + +## Example + +```python +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of RenderingDetailsEntry from a JSON string +rendering_details_entry_instance = RenderingDetailsEntry.from_json(json) +# print the JSON string representation of the object +print(RenderingDetailsEntry.to_json()) + +# convert the object into a dict +rendering_details_entry_dict = rendering_details_entry_instance.to_dict() +# create an instance of RenderingDetailsEntry from a dict +rendering_details_entry_from_dict = RenderingDetailsEntry.from_dict(rendering_details_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RenderingGdpr.md b/edu_sharing_openapi/docs/RenderingGdpr.md new file mode 100644 index 00000000..495ea26a --- /dev/null +++ b/edu_sharing_openapi/docs/RenderingGdpr.md @@ -0,0 +1,31 @@ +# RenderingGdpr + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**matcher** | **str** | | [optional] +**name** | **str** | | [optional] +**privacy_information_url** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.rendering_gdpr import RenderingGdpr + +# TODO update the JSON string below +json = "{}" +# create an instance of RenderingGdpr from a JSON string +rendering_gdpr_instance = RenderingGdpr.from_json(json) +# print the JSON string representation of the object +print(RenderingGdpr.to_json()) + +# convert the object into a dict +rendering_gdpr_dict = rendering_gdpr_instance.to_dict() +# create an instance of RenderingGdpr from a dict +rendering_gdpr_from_dict = RenderingGdpr.from_dict(rendering_gdpr_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Repo.md b/edu_sharing_openapi/docs/Repo.md new file mode 100644 index 00000000..cf468e21 --- /dev/null +++ b/edu_sharing_openapi/docs/Repo.md @@ -0,0 +1,35 @@ +# Repo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repository_type** | **str** | | [optional] +**rendering_supported** | **bool** | | [optional] +**id** | **str** | | [optional] +**title** | **str** | | [optional] +**icon** | **str** | | [optional] +**logo** | **str** | | [optional] +**is_home_repo** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.repo import Repo + +# TODO update the JSON string below +json = "{}" +# create an instance of Repo from a JSON string +repo_instance = Repo.from_json(json) +# print the JSON string representation of the object +print(Repo.to_json()) + +# convert the object into a dict +repo_dict = repo_instance.to_dict() +# create an instance of Repo from a dict +repo_from_dict = Repo.from_dict(repo_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RepoEntries.md b/edu_sharing_openapi/docs/RepoEntries.md new file mode 100644 index 00000000..79e34194 --- /dev/null +++ b/edu_sharing_openapi/docs/RepoEntries.md @@ -0,0 +1,29 @@ +# RepoEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repositories** | [**List[Repo]**](Repo.md) | | + +## Example + +```python +from edu_sharing_client.models.repo_entries import RepoEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of RepoEntries from a JSON string +repo_entries_instance = RepoEntries.from_json(json) +# print the JSON string representation of the object +print(RepoEntries.to_json()) + +# convert the object into a dict +repo_entries_dict = repo_entries_instance.to_dict() +# create an instance of RepoEntries from a dict +repo_entries_from_dict = RepoEntries.from_dict(repo_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RepositoryConfig.md b/edu_sharing_openapi/docs/RepositoryConfig.md new file mode 100644 index 00000000..966cbaab --- /dev/null +++ b/edu_sharing_openapi/docs/RepositoryConfig.md @@ -0,0 +1,29 @@ +# RepositoryConfig + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**frontpage** | [**Frontpage**](Frontpage.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.repository_config import RepositoryConfig + +# TODO update the JSON string below +json = "{}" +# create an instance of RepositoryConfig from a JSON string +repository_config_instance = RepositoryConfig.from_json(json) +# print the JSON string representation of the object +print(RepositoryConfig.to_json()) + +# convert the object into a dict +repository_config_dict = repository_config_instance.to_dict() +# create an instance of RepositoryConfig from a dict +repository_config_from_dict = RepositoryConfig.from_dict(repository_config_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RepositoryVersionInfo.md b/edu_sharing_openapi/docs/RepositoryVersionInfo.md new file mode 100644 index 00000000..b92233d0 --- /dev/null +++ b/edu_sharing_openapi/docs/RepositoryVersionInfo.md @@ -0,0 +1,32 @@ +# RepositoryVersionInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**version** | [**Version**](Version.md) | | [optional] +**maven** | [**VersionMaven**](VersionMaven.md) | | [optional] +**git** | [**VersionGit**](VersionGit.md) | | [optional] +**build** | [**VersionBuild**](VersionBuild.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.repository_version_info import RepositoryVersionInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of RepositoryVersionInfo from a JSON string +repository_version_info_instance = RepositoryVersionInfo.from_json(json) +# print the JSON string representation of the object +print(RepositoryVersionInfo.to_json()) + +# convert the object into a dict +repository_version_info_dict = repository_version_info_instance.to_dict() +# create an instance of RepositoryVersionInfo from a dict +repository_version_info_from_dict = RepositoryVersionInfo.from_dict(repository_version_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RestoreResult.md b/edu_sharing_openapi/docs/RestoreResult.md new file mode 100644 index 00000000..3ff2ac28 --- /dev/null +++ b/edu_sharing_openapi/docs/RestoreResult.md @@ -0,0 +1,34 @@ +# RestoreResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**archive_node_id** | **str** | | +**node_id** | **str** | | +**parent** | **str** | | +**path** | **str** | | +**name** | **str** | | +**restore_status** | **str** | | + +## Example + +```python +from edu_sharing_client.models.restore_result import RestoreResult + +# TODO update the JSON string below +json = "{}" +# create an instance of RestoreResult from a JSON string +restore_result_instance = RestoreResult.from_json(json) +# print the JSON string representation of the object +print(RestoreResult.to_json()) + +# convert the object into a dict +restore_result_dict = restore_result_instance.to_dict() +# create an instance of RestoreResult from a dict +restore_result_from_dict = RestoreResult.from_dict(restore_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/RestoreResults.md b/edu_sharing_openapi/docs/RestoreResults.md new file mode 100644 index 00000000..094e6800 --- /dev/null +++ b/edu_sharing_openapi/docs/RestoreResults.md @@ -0,0 +1,29 @@ +# RestoreResults + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**results** | [**List[RestoreResult]**](RestoreResult.md) | | + +## Example + +```python +from edu_sharing_client.models.restore_results import RestoreResults + +# TODO update the JSON string below +json = "{}" +# create an instance of RestoreResults from a JSON string +restore_results_instance = RestoreResults.from_json(json) +# print the JSON string representation of the object +print(RestoreResults.to_json()) + +# convert the object into a dict +restore_results_dict = restore_results_instance.to_dict() +# create an instance of RestoreResults from a dict +restore_results_from_dict = RestoreResults.from_dict(restore_results_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SEARCHV1Api.md b/edu_sharing_openapi/docs/SEARCHV1Api.md new file mode 100644 index 00000000..5eeb1fb3 --- /dev/null +++ b/edu_sharing_openapi/docs/SEARCHV1Api.md @@ -0,0 +1,860 @@ +# edu_sharing_client.SEARCHV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_metdata**](SEARCHV1Api.md#get_metdata) | **GET** /search/v1/metadata/{repository} | get nodes with metadata and collections +[**get_relevant_nodes**](SEARCHV1Api.md#get_relevant_nodes) | **GET** /search/v1/relevant/{repository} | Get relevant nodes for the current user +[**load_save_search**](SEARCHV1Api.md#load_save_search) | **GET** /search/v1/queries/load/{nodeId} | Load a saved search query. +[**save_search**](SEARCHV1Api.md#save_search) | **POST** /search/v1/queries/{repository}/{metadataset}/{query}/save | Save a search query. +[**search**](SEARCHV1Api.md#search) | **POST** /search/v1/queries/{repository}/{metadataset}/{query} | Perform queries based on metadata sets. +[**search_by_property**](SEARCHV1Api.md#search_by_property) | **GET** /search/v1/custom/{repository} | Search for custom properties with custom values +[**search_contributor**](SEARCHV1Api.md#search_contributor) | **GET** /search/v1/queries/{repository}/contributor | Search for contributors +[**search_facets**](SEARCHV1Api.md#search_facets) | **POST** /search/v1/queries/{repository}/{metadataset}/{query}/facets | Search in facets. +[**search_fingerprint**](SEARCHV1Api.md#search_fingerprint) | **POST** /search/v1/queries/{repository}/fingerprint/{nodeid} | Perform queries based on metadata sets. +[**search_lrmi**](SEARCHV1Api.md#search_lrmi) | **POST** /search/v1/queries/{repository}/{metadataset}/{query}/lrmi | Perform queries based on metadata sets. + + +# **get_metdata** +> NodeEntries get_metdata(repository, node_ids=node_ids, property_filter=property_filter) + +get nodes with metadata and collections + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node_ids = ['node_ids_example'] # List[str] | nodeIds (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # get nodes with metadata and collections + api_response = api_instance.get_metdata(repository, node_ids=node_ids, property_filter=property_filter) + print("The response of SEARCHV1Api->get_metdata:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->get_metdata: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node_ids** | [**List[str]**](str.md)| nodeIds | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**NodeEntries**](NodeEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_relevant_nodes** +> SearchResultNode get_relevant_nodes(repository, property_filter=property_filter, max_items=max_items, skip_count=skip_count) + +Get relevant nodes for the current user + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + + try: + # Get relevant nodes for the current user + api_response = api_instance.get_relevant_nodes(repository, property_filter=property_filter, max_items=max_items, skip_count=skip_count) + print("The response of SEARCHV1Api->get_relevant_nodes:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->get_relevant_nodes: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + +### Return type + +[**SearchResultNode**](SearchResultNode.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **load_save_search** +> Node load_save_search(node_id, content_type=content_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter, request_body=request_body) + +Load a saved search query. + +Load a saved search query. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node import Node +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + node_id = 'node_id_example' # str | Node id of the search item + content_type = 'content_type_example' # str | Type of element (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + request_body = ['request_body_example'] # List[str] | facets (optional) + + try: + # Load a saved search query. + api_response = api_instance.load_save_search(node_id, content_type=content_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter, request_body=request_body) + print("The response of SEARCHV1Api->load_save_search:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->load_save_search: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| Node id of the search item | + **content_type** | **str**| Type of element | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + **request_body** | [**List[str]**](str.md)| facets | [optional] + +### Return type + +[**Node**](Node.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **save_search** +> NodeEntry save_search(repository, metadataset, query, name, mds_query_criteria, replace=replace) + +Save a search query. + +Save a search query. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + query = 'query_example' # str | ID of query + name = 'name_example' # str | Name of the new search item + mds_query_criteria = [edu_sharing_client.MdsQueryCriteria()] # List[MdsQueryCriteria] | search parameters + replace = False # bool | Replace if search with the same name exists (optional) (default to False) + + try: + # Save a search query. + api_response = api_instance.save_search(repository, metadataset, query, name, mds_query_criteria, replace=replace) + print("The response of SEARCHV1Api->save_search:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->save_search: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **query** | **str**| ID of query | + **name** | **str**| Name of the new search item | + **mds_query_criteria** | [**List[MdsQueryCriteria]**](MdsQueryCriteria.md)| search parameters | + **replace** | **bool**| Replace if search with the same name exists | [optional] [default to False] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search** +> SearchResultNode search(repository, metadataset, query, search_parameters, content_type=content_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Perform queries based on metadata sets. + +Perform queries based on metadata sets. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + query = 'query_example' # str | ID of query + search_parameters = edu_sharing_client.SearchParameters() # SearchParameters | search parameters + content_type = 'content_type_example' # str | Type of element (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Perform queries based on metadata sets. + api_response = api_instance.search(repository, metadataset, query, search_parameters, content_type=content_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of SEARCHV1Api->search:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->search: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **query** | **str**| ID of query | + **search_parameters** | [**SearchParameters**](SearchParameters.md)| search parameters | + **content_type** | **str**| Type of element | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResultNode**](SearchResultNode.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_by_property** +> SearchResultNode search_by_property(repository, content_type=content_type, combine_mode=combine_mode, var_property=var_property, value=value, comparator=comparator, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Search for custom properties with custom values + +e.g. property=cm:name, value:*Test* + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + content_type = 'content_type_example' # str | Type of element (optional) + combine_mode = 'combine_mode_example' # str | Combine mode, AND or OR, defaults to AND (optional) + var_property = ['var_property_example'] # List[str] | One (or more) properties to search for, will be combined by specified combine mode (optional) + value = ['value_example'] # List[str] | One (or more) values to search for, matching the properties defined before (optional) + comparator = ['comparator_example'] # List[str] | (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Search for custom properties with custom values + api_response = api_instance.search_by_property(repository, content_type=content_type, combine_mode=combine_mode, var_property=var_property, value=value, comparator=comparator, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of SEARCHV1Api->search_by_property:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->search_by_property: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **content_type** | **str**| Type of element | [optional] + **combine_mode** | **str**| Combine mode, AND or OR, defaults to AND | [optional] + **var_property** | [**List[str]**](str.md)| One (or more) properties to search for, will be combined by specified combine mode | [optional] + **value** | [**List[str]**](str.md)| One (or more) values to search for, matching the properties defined before | [optional] + **comparator** | [**List[str]**](str.md)| (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResultNode**](SearchResultNode.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_contributor** +> str search_contributor(repository, search_word, contributor_kind, fields=fields, contributor_properties=contributor_properties) + +Search for contributors + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + search_word = 'search_word_example' # str | search word + contributor_kind = PERSON # str | contributor kind (default to PERSON) + fields = ['fields_example'] # List[str] | define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url'] (optional) + contributor_properties = ['contributor_properties_example'] # List[str] | define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator'] (optional) + + try: + # Search for contributors + api_response = api_instance.search_contributor(repository, search_word, contributor_kind, fields=fields, contributor_properties=contributor_properties) + print("The response of SEARCHV1Api->search_contributor:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->search_contributor: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **search_word** | **str**| search word | + **contributor_kind** | **str**| contributor kind | [default to PERSON] + **fields** | [**List[str]**](str.md)| define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url'] | [optional] + **contributor_properties** | [**List[str]**](str.md)| define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator'] | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_facets** +> SearchResultNode search_facets(repository, metadataset, query, search_parameters_facets) + +Search in facets. + +Perform queries based on metadata sets. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_parameters_facets import SearchParametersFacets +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + query = 'query_example' # str | ID of query + search_parameters_facets = edu_sharing_client.SearchParametersFacets() # SearchParametersFacets | facet parameters + + try: + # Search in facets. + api_response = api_instance.search_facets(repository, metadataset, query, search_parameters_facets) + print("The response of SEARCHV1Api->search_facets:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->search_facets: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **query** | **str**| ID of query | + **search_parameters_facets** | [**SearchParametersFacets**](SearchParametersFacets.md)| facet parameters | + +### Return type + +[**SearchResultNode**](SearchResultNode.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_fingerprint** +> SearchResultNode search_fingerprint(repository, nodeid, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Perform queries based on metadata sets. + +Perform queries based on metadata sets. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + nodeid = 'nodeid_example' # str | nodeid + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Perform queries based on metadata sets. + api_response = api_instance.search_fingerprint(repository, nodeid, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of SEARCHV1Api->search_fingerprint:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->search_fingerprint: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **nodeid** | **str**| nodeid | + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResultNode**](SearchResultNode.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search_lrmi** +> SearchResultLrmi search_lrmi(repository, metadataset, query, search_parameters, content_type=content_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + +Perform queries based on metadata sets. + +Perform queries based on metadata sets. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.models.search_result_lrmi import SearchResultLrmi +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SEARCHV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + metadataset = '-default-' # str | ID of metadataset (or \"-default-\" for default metadata set) (default to '-default-') + query = 'query_example' # str | ID of query + search_parameters = edu_sharing_client.SearchParameters() # SearchParameters | search parameters + content_type = 'content_type_example' # str | Type of element (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + property_filter = ['property_filter_example'] # List[str] | property filter for result nodes (or \"-all-\" for all properties) (optional) + + try: + # Perform queries based on metadata sets. + api_response = api_instance.search_lrmi(repository, metadataset, query, search_parameters, content_type=content_type, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, property_filter=property_filter) + print("The response of SEARCHV1Api->search_lrmi:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SEARCHV1Api->search_lrmi: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **metadataset** | **str**| ID of metadataset (or \"-default-\" for default metadata set) | [default to '-default-'] + **query** | **str**| ID of query | + **search_parameters** | [**SearchParameters**](SearchParameters.md)| search parameters | + **content_type** | **str**| Type of element | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **property_filter** | [**List[str]**](str.md)| property filter for result nodes (or \"-all-\" for all properties) | [optional] + +### Return type + +[**SearchResultLrmi**](SearchResultLrmi.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/SHARINGV1Api.md b/edu_sharing_openapi/docs/SHARINGV1Api.md new file mode 100644 index 00000000..cbbd0bb0 --- /dev/null +++ b/edu_sharing_openapi/docs/SHARINGV1Api.md @@ -0,0 +1,174 @@ +# edu_sharing_client.SHARINGV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get_children1**](SHARINGV1Api.md#get_children1) | **GET** /sharing/v1/sharing/{repository}/{node}/{share}/children | Get all children of this share. +[**get_info**](SHARINGV1Api.md#get_info) | **GET** /sharing/v1/sharing/{repository}/{node}/{share} | Get general info of a share. + + +# **get_children1** +> NodeEntries get_children1(repository, node, share, password=password, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + +Get all children of this share. + +Only valid for shared folders + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SHARINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + share = 'share_example' # str | Share token + password = 'password_example' # str | Password (required if share is locked) (optional) + max_items = 500 # int | maximum items per page (optional) (default to 500) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + + try: + # Get all children of this share. + api_response = api_instance.get_children1(repository, node, share, password=password, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending) + print("The response of SHARINGV1Api->get_children1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SHARINGV1Api->get_children1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **share** | **str**| Share token | + **password** | **str**| Password (required if share is locked) | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 500] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + +### Return type + +[**NodeEntries**](NodeEntries.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_info** +> SharingInfo get_info(repository, node, share, password=password) + +Get general info of a share. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.sharing_info import SharingInfo +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.SHARINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | ID of node + share = 'share_example' # str | Share token + password = 'password_example' # str | Password to validate (optional) (optional) + + try: + # Get general info of a share. + api_response = api_instance.get_info(repository, node, share, password=password) + print("The response of SHARINGV1Api->get_info:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling SHARINGV1Api->get_info: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| ID of node | + **share** | **str**| Share token | + **password** | **str**| Password to validate (optional) | [optional] + +### Return type + +[**SharingInfo**](SharingInfo.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/STATISTICV1Api.md b/edu_sharing_openapi/docs/STATISTICV1Api.md new file mode 100644 index 00000000..44eeb2bf --- /dev/null +++ b/edu_sharing_openapi/docs/STATISTICV1Api.md @@ -0,0 +1,478 @@ +# edu_sharing_client.STATISTICV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**get**](STATISTICV1Api.md#get) | **POST** /statistic/v1/facets/{context} | Get statistics of repository. +[**get_global_statistics**](STATISTICV1Api.md#get_global_statistics) | **GET** /statistic/v1/public | Get stats. +[**get_node_data**](STATISTICV1Api.md#get_node_data) | **GET** /statistic/v1/statistics/nodes/node/{id} | get the range of nodes which had tracked actions since a given timestamp +[**get_nodes_altered_in_range1**](STATISTICV1Api.md#get_nodes_altered_in_range1) | **GET** /statistic/v1/statistics/nodes/altered | get the range of nodes which had tracked actions since a given timestamp +[**get_statistics_node**](STATISTICV1Api.md#get_statistics_node) | **POST** /statistic/v1/statistics/nodes | get statistics for node actions +[**get_statistics_user**](STATISTICV1Api.md#get_statistics_user) | **POST** /statistic/v1/statistics/users | get statistics for user actions (login, logout) + + +# **get** +> Statistics get(context, filter, properties=properties) + +Get statistics of repository. + +Statistics. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.filter import Filter +from edu_sharing_client.models.statistics import Statistics +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STATISTICV1Api(api_client) + context = '-root-' # str | context, the node where to start (default to '-root-') + filter = edu_sharing_client.Filter() # Filter | filter + properties = ['properties_example'] # List[str] | properties (optional) + + try: + # Get statistics of repository. + api_response = api_instance.get(context, filter, properties=properties) + print("The response of STATISTICV1Api->get:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STATISTICV1Api->get: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **context** | **str**| context, the node where to start | [default to '-root-'] + **filter** | [**Filter**](Filter.md)| filter | + **properties** | [**List[str]**](str.md)| properties | [optional] + +### Return type + +[**Statistics**](Statistics.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_global_statistics** +> StatisticsGlobal get_global_statistics(group=group, sub_group=sub_group) + +Get stats. + +Get global statistics for this repository. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.statistics_global import StatisticsGlobal +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STATISTICV1Api(api_client) + group = 'group_example' # str | primary property to build facets and count+group values (optional) + sub_group = ['sub_group_example'] # List[str] | additional properties to build facets and count+sub-group values (optional) + + try: + # Get stats. + api_response = api_instance.get_global_statistics(group=group, sub_group=sub_group) + print("The response of STATISTICV1Api->get_global_statistics:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STATISTICV1Api->get_global_statistics: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **group** | **str**| primary property to build facets and count+group values | [optional] + **sub_group** | [**List[str]**](str.md)| additional properties to build facets and count+sub-group values | [optional] + +### Return type + +[**StatisticsGlobal**](StatisticsGlobal.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**401** | Authorization failed. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_node_data** +> str get_node_data(id, date_from) + +get the range of nodes which had tracked actions since a given timestamp + +requires admin + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STATISTICV1Api(api_client) + id = 'id_example' # str | node id to fetch data for + date_from = 56 # int | date range from + + try: + # get the range of nodes which had tracked actions since a given timestamp + api_response = api_instance.get_node_data(id, date_from) + print("The response of STATISTICV1Api->get_node_data:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STATISTICV1Api->get_node_data: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **id** | **str**| node id to fetch data for | + **date_from** | **int**| date range from | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_nodes_altered_in_range1** +> str get_nodes_altered_in_range1(date_from) + +get the range of nodes which had tracked actions since a given timestamp + +requires admin + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STATISTICV1Api(api_client) + date_from = 56 # int | date range from + + try: + # get the range of nodes which had tracked actions since a given timestamp + api_response = api_instance.get_nodes_altered_in_range1(date_from) + print("The response of STATISTICV1Api->get_nodes_altered_in_range1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STATISTICV1Api->get_nodes_altered_in_range1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **date_from** | **int**| date range from | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_statistics_node** +> str get_statistics_node(grouping, date_from, date_to, mediacenter=mediacenter, additional_fields=additional_fields, group_field=group_field, request_body=request_body) + +get statistics for node actions + +requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_NODES for global stats or to be admin of the requested mediacenter + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STATISTICV1Api(api_client) + grouping = 'grouping_example' # str | Grouping type (by date) + date_from = 56 # int | date range from + date_to = 56 # int | date range to + mediacenter = 'mediacenter_example' # str | the mediacenter to filter for statistics (optional) + additional_fields = ['additional_fields_example'] # List[str] | additionals fields of the custom json object stored in each query that should be returned (optional) + group_field = ['group_field_example'] # List[str] | grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) (optional) + request_body = {'key': 'request_body_example'} # Dict[str, str] | filters for the custom json object stored in each entry (optional) + + try: + # get statistics for node actions + api_response = api_instance.get_statistics_node(grouping, date_from, date_to, mediacenter=mediacenter, additional_fields=additional_fields, group_field=group_field, request_body=request_body) + print("The response of STATISTICV1Api->get_statistics_node:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STATISTICV1Api->get_statistics_node: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **grouping** | **str**| Grouping type (by date) | + **date_from** | **int**| date range from | + **date_to** | **int**| date range to | + **mediacenter** | **str**| the mediacenter to filter for statistics | [optional] + **additional_fields** | [**List[str]**](str.md)| additionals fields of the custom json object stored in each query that should be returned | [optional] + **group_field** | [**List[str]**](str.md)| grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) | [optional] + **request_body** | [**Dict[str, str]**](str.md)| filters for the custom json object stored in each entry | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_statistics_user** +> str get_statistics_user(grouping, date_from, date_to, mediacenter=mediacenter, additional_fields=additional_fields, group_field=group_field, request_body=request_body) + +get statistics for user actions (login, logout) + +requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_USER for global stats or to be admin of the requested mediacenter + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STATISTICV1Api(api_client) + grouping = 'grouping_example' # str | Grouping type (by date) + date_from = 56 # int | date range from + date_to = 56 # int | date range to + mediacenter = 'mediacenter_example' # str | the mediacenter to filter for statistics (optional) + additional_fields = ['additional_fields_example'] # List[str] | additionals fields of the custom json object stored in each query that should be returned (optional) + group_field = ['group_field_example'] # List[str] | grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) (optional) + request_body = {'key': 'request_body_example'} # Dict[str, str] | filters for the custom json object stored in each entry (optional) + + try: + # get statistics for user actions (login, logout) + api_response = api_instance.get_statistics_user(grouping, date_from, date_to, mediacenter=mediacenter, additional_fields=additional_fields, group_field=group_field, request_body=request_body) + print("The response of STATISTICV1Api->get_statistics_user:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STATISTICV1Api->get_statistics_user: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **grouping** | **str**| Grouping type (by date) | + **date_from** | **int**| date range from | + **date_to** | **int**| date range to | + **mediacenter** | **str**| the mediacenter to filter for statistics | [optional] + **additional_fields** | [**List[str]**](str.md)| additionals fields of the custom json object stored in each query that should be returned | [optional] + **group_field** | [**List[str]**](str.md)| grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) | [optional] + **request_body** | [**Dict[str, str]**](str.md)| filters for the custom json object stored in each entry | [optional] + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/STREAMV1Api.md b/edu_sharing_openapi/docs/STREAMV1Api.md new file mode 100644 index 00000000..cafafeb2 --- /dev/null +++ b/edu_sharing_openapi/docs/STREAMV1Api.md @@ -0,0 +1,464 @@ +# edu_sharing_client.STREAMV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**add_entry**](STREAMV1Api.md#add_entry) | **PUT** /stream/v1/add/{repository} | add a new stream object. +[**can_access**](STREAMV1Api.md#can_access) | **GET** /stream/v1/access/{repository}/{node} | test +[**delete_entry**](STREAMV1Api.md#delete_entry) | **DELETE** /stream/v1/delete/{repository}/{entry} | delete a stream object +[**get_property_values**](STREAMV1Api.md#get_property_values) | **GET** /stream/v1/properties/{repository}/{property} | Get top values for a property +[**search1**](STREAMV1Api.md#search1) | **POST** /stream/v1/search/{repository} | Get the stream content for the current user with the given status. +[**update_entry**](STREAMV1Api.md#update_entry) | **PUT** /stream/v1/status/{repository}/{entry} | update status for a stream object and authority + + +# **add_entry** +> StreamEntryInput add_entry(repository, stream_entry_input) + +add a new stream object. + +will return the object and add the id to the object if creation succeeded + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.stream_entry_input import StreamEntryInput +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STREAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + stream_entry_input = edu_sharing_client.StreamEntryInput() # StreamEntryInput | Stream object to add + + try: + # add a new stream object. + api_response = api_instance.add_entry(repository, stream_entry_input) + print("The response of STREAMV1Api->add_entry:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STREAMV1Api->add_entry: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **stream_entry_input** | [**StreamEntryInput**](StreamEntryInput.md)| Stream object to add | + +### Return type + +[**StreamEntryInput**](StreamEntryInput.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **can_access** +> str can_access(repository, node) + +test + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STREAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + node = 'node_example' # str | The property to aggregate + + try: + # test + api_response = api_instance.can_access(repository, node) + print("The response of STREAMV1Api->can_access:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STREAMV1Api->can_access: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **node** | **str**| The property to aggregate | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **delete_entry** +> delete_entry(repository, entry) + +delete a stream object + +the current user must be author of the given stream object + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STREAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + entry = 'entry_example' # str | entry id to delete + + try: + # delete a stream object + api_instance.delete_entry(repository, entry) + except Exception as e: + print("Exception when calling STREAMV1Api->delete_entry: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **entry** | **str**| entry id to delete | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_property_values** +> str get_property_values(repository, var_property) + +Get top values for a property + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STREAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + var_property = 'var_property_example' # str | The property to aggregate + + try: + # Get top values for a property + api_response = api_instance.get_property_values(repository, var_property) + print("The response of STREAMV1Api->get_property_values:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STREAMV1Api->get_property_values: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **var_property** | **str**| The property to aggregate | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **search1** +> StreamList search1(repository, status=status, query=query, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, request_body=request_body) + +Get the stream content for the current user with the given status. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.stream_list import StreamList +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STREAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + status = 'status_example' # str | Stream object status to search for (optional) + query = 'query_example' # str | generic text to search for (in title or description) (optional) + max_items = 10 # int | maximum items per page (optional) (default to 10) + skip_count = 0 # int | skip a number of items (optional) (default to 0) + sort_properties = ['sort_properties_example'] # List[str] | sort properties, currently supported: created, priority, default: priority desc, created desc (optional) + sort_ascending = [True] # List[bool] | sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index (optional) + request_body = {'key': 'request_body_example'} # Dict[str, str] | map with property + value to search (optional) + + try: + # Get the stream content for the current user with the given status. + api_response = api_instance.search1(repository, status=status, query=query, max_items=max_items, skip_count=skip_count, sort_properties=sort_properties, sort_ascending=sort_ascending, request_body=request_body) + print("The response of STREAMV1Api->search1:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling STREAMV1Api->search1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **status** | **str**| Stream object status to search for | [optional] + **query** | **str**| generic text to search for (in title or description) | [optional] + **max_items** | **int**| maximum items per page | [optional] [default to 10] + **skip_count** | **int**| skip a number of items | [optional] [default to 0] + **sort_properties** | [**List[str]**](str.md)| sort properties, currently supported: created, priority, default: priority desc, created desc | [optional] + **sort_ascending** | [**List[bool]**](bool.md)| sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index | [optional] + **request_body** | [**Dict[str, str]**](str.md)| map with property + value to search | [optional] + +### Return type + +[**StreamList**](StreamList.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **update_entry** +> update_entry(repository, entry, authority, status) + +update status for a stream object and authority + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.STREAMV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + entry = 'entry_example' # str | entry id to update + authority = 'authority_example' # str | authority to set/change status + status = 'status_example' # str | New status for this authority + + try: + # update status for a stream object and authority + api_instance.update_entry(repository, entry, authority, status) + except Exception as e: + print("Exception when calling STREAMV1Api->update_entry: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **entry** | **str**| entry id to update | + **authority** | **str**| authority to set/change status | + **status** | **str**| New status for this authority | + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/SearchParameters.md b/edu_sharing_openapi/docs/SearchParameters.md new file mode 100644 index 00000000..49122580 --- /dev/null +++ b/edu_sharing_openapi/docs/SearchParameters.md @@ -0,0 +1,38 @@ +# SearchParameters + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**permissions** | **List[str]** | | [optional] +**resolve_collections** | **bool** | | [optional] +**resolve_usernames** | **bool** | | [optional] +**return_suggestions** | **bool** | | [optional] +**excludes** | **List[str]** | | [optional] +**facets** | **List[str]** | | [optional] +**facet_min_count** | **int** | | [optional] [default to 5] +**facet_limit** | **int** | | [optional] [default to 10] +**facet_suggest** | **str** | | [optional] +**criteria** | [**List[MdsQueryCriteria]**](MdsQueryCriteria.md) | | + +## Example + +```python +from edu_sharing_client.models.search_parameters import SearchParameters + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchParameters from a JSON string +search_parameters_instance = SearchParameters.from_json(json) +# print the JSON string representation of the object +print(SearchParameters.to_json()) + +# convert the object into a dict +search_parameters_dict = search_parameters_instance.to_dict() +# create an instance of SearchParameters from a dict +search_parameters_from_dict = SearchParameters.from_dict(search_parameters_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SearchParametersFacets.md b/edu_sharing_openapi/docs/SearchParametersFacets.md new file mode 100644 index 00000000..e863c60e --- /dev/null +++ b/edu_sharing_openapi/docs/SearchParametersFacets.md @@ -0,0 +1,33 @@ +# SearchParametersFacets + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**facets** | **List[str]** | | +**facet_min_count** | **int** | | [optional] [default to 5] +**facet_limit** | **int** | | [optional] [default to 10] +**facet_suggest** | **str** | | [optional] +**criteria** | [**List[MdsQueryCriteria]**](MdsQueryCriteria.md) | | + +## Example + +```python +from edu_sharing_client.models.search_parameters_facets import SearchParametersFacets + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchParametersFacets from a JSON string +search_parameters_facets_instance = SearchParametersFacets.from_json(json) +# print the JSON string representation of the object +print(SearchParametersFacets.to_json()) + +# convert the object into a dict +search_parameters_facets_dict = search_parameters_facets_instance.to_dict() +# create an instance of SearchParametersFacets from a dict +search_parameters_facets_from_dict = SearchParametersFacets.from_dict(search_parameters_facets_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SearchResult.md b/edu_sharing_openapi/docs/SearchResult.md new file mode 100644 index 00000000..7566929d --- /dev/null +++ b/edu_sharing_openapi/docs/SearchResult.md @@ -0,0 +1,31 @@ +# SearchResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**nodes** | [**List[Node]**](Node.md) | | +**pagination** | [**Pagination**](Pagination.md) | | +**facets** | [**List[Facet]**](Facet.md) | | + +## Example + +```python +from edu_sharing_client.models.search_result import SearchResult + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchResult from a JSON string +search_result_instance = SearchResult.from_json(json) +# print the JSON string representation of the object +print(SearchResult.to_json()) + +# convert the object into a dict +search_result_dict = search_result_instance.to_dict() +# create an instance of SearchResult from a dict +search_result_from_dict = SearchResult.from_dict(search_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SearchResultElastic.md b/edu_sharing_openapi/docs/SearchResultElastic.md new file mode 100644 index 00000000..7ad28a27 --- /dev/null +++ b/edu_sharing_openapi/docs/SearchResultElastic.md @@ -0,0 +1,34 @@ +# SearchResultElastic + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**suggests** | [**List[Suggest]**](Suggest.md) | | [optional] +**elastic_response** | **str** | | [optional] +**nodes** | **List[object]** | | +**pagination** | [**Pagination**](Pagination.md) | | +**facets** | [**List[Facet]**](Facet.md) | | +**ignored** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.search_result_elastic import SearchResultElastic + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchResultElastic from a JSON string +search_result_elastic_instance = SearchResultElastic.from_json(json) +# print the JSON string representation of the object +print(SearchResultElastic.to_json()) + +# convert the object into a dict +search_result_elastic_dict = search_result_elastic_instance.to_dict() +# create an instance of SearchResultElastic from a dict +search_result_elastic_from_dict = SearchResultElastic.from_dict(search_result_elastic_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SearchResultLrmi.md b/edu_sharing_openapi/docs/SearchResultLrmi.md new file mode 100644 index 00000000..132381c4 --- /dev/null +++ b/edu_sharing_openapi/docs/SearchResultLrmi.md @@ -0,0 +1,33 @@ +# SearchResultLrmi + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**suggests** | [**List[Suggest]**](Suggest.md) | | [optional] +**nodes** | **List[str]** | | +**pagination** | [**Pagination**](Pagination.md) | | +**facets** | [**List[Facet]**](Facet.md) | | +**ignored** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.search_result_lrmi import SearchResultLrmi + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchResultLrmi from a JSON string +search_result_lrmi_instance = SearchResultLrmi.from_json(json) +# print the JSON string representation of the object +print(SearchResultLrmi.to_json()) + +# convert the object into a dict +search_result_lrmi_dict = search_result_lrmi_instance.to_dict() +# create an instance of SearchResultLrmi from a dict +search_result_lrmi_from_dict = SearchResultLrmi.from_dict(search_result_lrmi_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SearchResultNode.md b/edu_sharing_openapi/docs/SearchResultNode.md new file mode 100644 index 00000000..9b8e9407 --- /dev/null +++ b/edu_sharing_openapi/docs/SearchResultNode.md @@ -0,0 +1,33 @@ +# SearchResultNode + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**suggests** | [**List[Suggest]**](Suggest.md) | | [optional] +**nodes** | [**List[Node]**](Node.md) | | +**pagination** | [**Pagination**](Pagination.md) | | +**facets** | [**List[Facet]**](Facet.md) | | +**ignored** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.search_result_node import SearchResultNode + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchResultNode from a JSON string +search_result_node_instance = SearchResultNode.from_json(json) +# print the JSON string representation of the object +print(SearchResultNode.to_json()) + +# convert the object into a dict +search_result_node_dict = search_result_node_instance.to_dict() +# create an instance of SearchResultNode from a dict +search_result_node_from_dict = SearchResultNode.from_dict(search_result_node_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SearchVCard.md b/edu_sharing_openapi/docs/SearchVCard.md new file mode 100644 index 00000000..14572020 --- /dev/null +++ b/edu_sharing_openapi/docs/SearchVCard.md @@ -0,0 +1,29 @@ +# SearchVCard + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**vcard** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.search_v_card import SearchVCard + +# TODO update the JSON string below +json = "{}" +# create an instance of SearchVCard from a JSON string +search_v_card_instance = SearchVCard.from_json(json) +# print the JSON string representation of the object +print(SearchVCard.to_json()) + +# convert the object into a dict +search_v_card_dict = search_v_card_instance.to_dict() +# create an instance of SearchVCard from a dict +search_v_card_from_dict = SearchVCard.from_dict(search_v_card_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ServerUpdateInfo.md b/edu_sharing_openapi/docs/ServerUpdateInfo.md new file mode 100644 index 00000000..d6f206b5 --- /dev/null +++ b/edu_sharing_openapi/docs/ServerUpdateInfo.md @@ -0,0 +1,34 @@ +# ServerUpdateInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**description** | **str** | | [optional] +**order** | **int** | | [optional] +**auto** | **bool** | | [optional] +**testable** | **bool** | | [optional] +**executed_at** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.server_update_info import ServerUpdateInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of ServerUpdateInfo from a JSON string +server_update_info_instance = ServerUpdateInfo.from_json(json) +# print the JSON string representation of the object +print(ServerUpdateInfo.to_json()) + +# convert the object into a dict +server_update_info_dict = server_update_info_instance.to_dict() +# create an instance of ServerUpdateInfo from a dict +server_update_info_from_dict = ServerUpdateInfo.from_dict(server_update_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Service.md b/edu_sharing_openapi/docs/Service.md new file mode 100644 index 00000000..4770c155 --- /dev/null +++ b/edu_sharing_openapi/docs/Service.md @@ -0,0 +1,41 @@ +# Service + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**url** | **str** | | [optional] +**icon** | **str** | | [optional] +**logo** | **str** | | [optional] +**in_language** | **str** | | [optional] +**type** | **str** | | [optional] +**description** | **str** | | [optional] +**audience** | [**List[Audience]**](Audience.md) | | [optional] +**provider** | [**Provider**](Provider.md) | | [optional] +**start_date** | **str** | | [optional] +**interfaces** | [**List[Interface]**](Interface.md) | | [optional] +**about** | **List[str]** | | [optional] +**is_accessible_for_free** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.service import Service + +# TODO update the JSON string below +json = "{}" +# create an instance of Service from a JSON string +service_instance = Service.from_json(json) +# print the JSON string representation of the object +print(Service.to_json()) + +# convert the object into a dict +service_dict = service_instance.to_dict() +# create an instance of Service from a dict +service_from_dict = Service.from_dict(service_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ServiceInstance.md b/edu_sharing_openapi/docs/ServiceInstance.md new file mode 100644 index 00000000..48b845ca --- /dev/null +++ b/edu_sharing_openapi/docs/ServiceInstance.md @@ -0,0 +1,30 @@ +# ServiceInstance + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**version** | [**ServiceVersion**](ServiceVersion.md) | | +**endpoint** | **str** | | + +## Example + +```python +from edu_sharing_client.models.service_instance import ServiceInstance + +# TODO update the JSON string below +json = "{}" +# create an instance of ServiceInstance from a JSON string +service_instance_instance = ServiceInstance.from_json(json) +# print the JSON string representation of the object +print(ServiceInstance.to_json()) + +# convert the object into a dict +service_instance_dict = service_instance_instance.to_dict() +# create an instance of ServiceInstance from a dict +service_instance_from_dict = ServiceInstance.from_dict(service_instance_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ServiceVersion.md b/edu_sharing_openapi/docs/ServiceVersion.md new file mode 100644 index 00000000..55c8aaf8 --- /dev/null +++ b/edu_sharing_openapi/docs/ServiceVersion.md @@ -0,0 +1,32 @@ +# ServiceVersion + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**repository** | **str** | | [optional] +**renderservice** | **str** | | [optional] +**major** | **int** | | +**minor** | **int** | | + +## Example + +```python +from edu_sharing_client.models.service_version import ServiceVersion + +# TODO update the JSON string below +json = "{}" +# create an instance of ServiceVersion from a JSON string +service_version_instance = ServiceVersion.from_json(json) +# print the JSON string representation of the object +print(ServiceVersion.to_json()) + +# convert the object into a dict +service_version_dict = service_version_instance.to_dict() +# create an instance of ServiceVersion from a dict +service_version_from_dict = ServiceVersion.from_dict(service_version_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Services.md b/edu_sharing_openapi/docs/Services.md new file mode 100644 index 00000000..19871eef --- /dev/null +++ b/edu_sharing_openapi/docs/Services.md @@ -0,0 +1,29 @@ +# Services + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**visualization** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.services import Services + +# TODO update the JSON string below +json = "{}" +# create an instance of Services from a JSON string +services_instance = Services.from_json(json) +# print the JSON string representation of the object +print(Services.to_json()) + +# convert the object into a dict +services_dict = services_instance.to_dict() +# create an instance of Services from a dict +services_from_dict = Services.from_dict(services_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SharedFolderOptions.md b/edu_sharing_openapi/docs/SharedFolderOptions.md new file mode 100644 index 00000000..ac67708e --- /dev/null +++ b/edu_sharing_openapi/docs/SharedFolderOptions.md @@ -0,0 +1,32 @@ +# SharedFolderOptions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**folders** | **str** | | [optional] +**private_files** | **str** | | [optional] +**cc_files** | **str** | | [optional] +**move** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.shared_folder_options import SharedFolderOptions + +# TODO update the JSON string below +json = "{}" +# create an instance of SharedFolderOptions from a JSON string +shared_folder_options_instance = SharedFolderOptions.from_json(json) +# print the JSON string representation of the object +print(SharedFolderOptions.to_json()) + +# convert the object into a dict +shared_folder_options_dict = shared_folder_options_instance.to_dict() +# create an instance of SharedFolderOptions from a dict +shared_folder_options_from_dict = SharedFolderOptions.from_dict(shared_folder_options_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SharingInfo.md b/edu_sharing_openapi/docs/SharingInfo.md new file mode 100644 index 00000000..2c8ed592 --- /dev/null +++ b/edu_sharing_openapi/docs/SharingInfo.md @@ -0,0 +1,33 @@ +# SharingInfo + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**password_matches** | **bool** | | [optional] +**password** | **bool** | | [optional] +**expired** | **bool** | | [optional] +**invited_by** | [**Person**](Person.md) | | [optional] +**node** | [**Node**](Node.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.sharing_info import SharingInfo + +# TODO update the JSON string below +json = "{}" +# create an instance of SharingInfo from a JSON string +sharing_info_instance = SharingInfo.from_json(json) +# print the JSON string representation of the object +print(SharingInfo.to_json()) + +# convert the object into a dict +sharing_info_dict = sharing_info_instance.to_dict() +# create an instance of SharingInfo from a dict +sharing_info_from_dict = SharingInfo.from_dict(sharing_info_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SimpleEdit.md b/edu_sharing_openapi/docs/SimpleEdit.md new file mode 100644 index 00000000..dfd3a176 --- /dev/null +++ b/edu_sharing_openapi/docs/SimpleEdit.md @@ -0,0 +1,32 @@ +# SimpleEdit + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**global_groups** | [**List[SimpleEditGlobalGroups]**](SimpleEditGlobalGroups.md) | | [optional] +**organization** | [**SimpleEditOrganization**](SimpleEditOrganization.md) | | [optional] +**organization_filter** | **str** | | [optional] +**licenses** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.simple_edit import SimpleEdit + +# TODO update the JSON string below +json = "{}" +# create an instance of SimpleEdit from a JSON string +simple_edit_instance = SimpleEdit.from_json(json) +# print the JSON string representation of the object +print(SimpleEdit.to_json()) + +# convert the object into a dict +simple_edit_dict = simple_edit_instance.to_dict() +# create an instance of SimpleEdit from a dict +simple_edit_from_dict = SimpleEdit.from_dict(simple_edit_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SimpleEditGlobalGroups.md b/edu_sharing_openapi/docs/SimpleEditGlobalGroups.md new file mode 100644 index 00000000..99bdf19a --- /dev/null +++ b/edu_sharing_openapi/docs/SimpleEditGlobalGroups.md @@ -0,0 +1,30 @@ +# SimpleEditGlobalGroups + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**toolpermission** | **str** | | [optional] +**groups** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.simple_edit_global_groups import SimpleEditGlobalGroups + +# TODO update the JSON string below +json = "{}" +# create an instance of SimpleEditGlobalGroups from a JSON string +simple_edit_global_groups_instance = SimpleEditGlobalGroups.from_json(json) +# print the JSON string representation of the object +print(SimpleEditGlobalGroups.to_json()) + +# convert the object into a dict +simple_edit_global_groups_dict = simple_edit_global_groups_instance.to_dict() +# create an instance of SimpleEditGlobalGroups from a dict +simple_edit_global_groups_from_dict = SimpleEditGlobalGroups.from_dict(simple_edit_global_groups_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SimpleEditOrganization.md b/edu_sharing_openapi/docs/SimpleEditOrganization.md new file mode 100644 index 00000000..0ac63848 --- /dev/null +++ b/edu_sharing_openapi/docs/SimpleEditOrganization.md @@ -0,0 +1,29 @@ +# SimpleEditOrganization + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**group_types** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.simple_edit_organization import SimpleEditOrganization + +# TODO update the JSON string below +json = "{}" +# create an instance of SimpleEditOrganization from a JSON string +simple_edit_organization_instance = SimpleEditOrganization.from_json(json) +# print the JSON string representation of the object +print(SimpleEditOrganization.to_json()) + +# convert the object into a dict +simple_edit_organization_dict = simple_edit_organization_instance.to_dict() +# create an instance of SimpleEditOrganization from a dict +simple_edit_organization_from_dict = SimpleEditOrganization.from_dict(simple_edit_organization_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Sort.md b/edu_sharing_openapi/docs/Sort.md new file mode 100644 index 00000000..e74818f5 --- /dev/null +++ b/edu_sharing_openapi/docs/Sort.md @@ -0,0 +1,31 @@ +# Sort + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**sorted** | **bool** | | [optional] +**empty** | **bool** | | [optional] +**unsorted** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.sort import Sort + +# TODO update the JSON string below +json = "{}" +# create an instance of Sort from a JSON string +sort_instance = Sort.from_json(json) +# print the JSON string representation of the object +print(Sort.to_json()) + +# convert the object into a dict +sort_dict = sort_instance.to_dict() +# create an instance of Sort from a dict +sort_from_dict = Sort.from_dict(sort_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticEntity.md b/edu_sharing_openapi/docs/StatisticEntity.md new file mode 100644 index 00000000..0c3b3dab --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticEntity.md @@ -0,0 +1,30 @@ +# StatisticEntity + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**value** | **str** | | +**count** | **int** | | + +## Example + +```python +from edu_sharing_client.models.statistic_entity import StatisticEntity + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticEntity from a JSON string +statistic_entity_instance = StatisticEntity.from_json(json) +# print the JSON string representation of the object +print(StatisticEntity.to_json()) + +# convert the object into a dict +statistic_entity_dict = statistic_entity_instance.to_dict() +# create an instance of StatisticEntity from a dict +statistic_entity_from_dict = StatisticEntity.from_dict(statistic_entity_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticEntry.md b/edu_sharing_openapi/docs/StatisticEntry.md new file mode 100644 index 00000000..8dec5e60 --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticEntry.md @@ -0,0 +1,30 @@ +# StatisticEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_property** | **str** | | +**entities** | [**List[StatisticEntity]**](StatisticEntity.md) | | + +## Example + +```python +from edu_sharing_client.models.statistic_entry import StatisticEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticEntry from a JSON string +statistic_entry_instance = StatisticEntry.from_json(json) +# print the JSON string representation of the object +print(StatisticEntry.to_json()) + +# convert the object into a dict +statistic_entry_dict = statistic_entry_instance.to_dict() +# create an instance of StatisticEntry from a dict +statistic_entry_from_dict = StatisticEntry.from_dict(statistic_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Statistics.md b/edu_sharing_openapi/docs/Statistics.md new file mode 100644 index 00000000..17df6ff4 --- /dev/null +++ b/edu_sharing_openapi/docs/Statistics.md @@ -0,0 +1,29 @@ +# Statistics + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**entries** | [**List[StatisticEntry]**](StatisticEntry.md) | | + +## Example + +```python +from edu_sharing_client.models.statistics import Statistics + +# TODO update the JSON string below +json = "{}" +# create an instance of Statistics from a JSON string +statistics_instance = Statistics.from_json(json) +# print the JSON string representation of the object +print(Statistics.to_json()) + +# convert the object into a dict +statistics_dict = statistics_instance.to_dict() +# create an instance of Statistics from a dict +statistics_from_dict = Statistics.from_dict(statistics_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticsGlobal.md b/edu_sharing_openapi/docs/StatisticsGlobal.md new file mode 100644 index 00000000..d23bbe26 --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticsGlobal.md @@ -0,0 +1,31 @@ +# StatisticsGlobal + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**overall** | [**StatisticsGroup**](StatisticsGroup.md) | | [optional] +**groups** | [**List[StatisticsKeyGroup]**](StatisticsKeyGroup.md) | | [optional] +**user** | [**StatisticsUser**](StatisticsUser.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.statistics_global import StatisticsGlobal + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticsGlobal from a JSON string +statistics_global_instance = StatisticsGlobal.from_json(json) +# print the JSON string representation of the object +print(StatisticsGlobal.to_json()) + +# convert the object into a dict +statistics_global_dict = statistics_global_instance.to_dict() +# create an instance of StatisticsGlobal from a dict +statistics_global_from_dict = StatisticsGlobal.from_dict(statistics_global_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticsGroup.md b/edu_sharing_openapi/docs/StatisticsGroup.md new file mode 100644 index 00000000..4dc50ca7 --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticsGroup.md @@ -0,0 +1,30 @@ +# StatisticsGroup + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**count** | **int** | | [optional] +**sub_groups** | [**List[StatisticsSubGroup]**](StatisticsSubGroup.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.statistics_group import StatisticsGroup + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticsGroup from a JSON string +statistics_group_instance = StatisticsGroup.from_json(json) +# print the JSON string representation of the object +print(StatisticsGroup.to_json()) + +# convert the object into a dict +statistics_group_dict = statistics_group_instance.to_dict() +# create an instance of StatisticsGroup from a dict +statistics_group_from_dict = StatisticsGroup.from_dict(statistics_group_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticsKeyGroup.md b/edu_sharing_openapi/docs/StatisticsKeyGroup.md new file mode 100644 index 00000000..7e1c4dad --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticsKeyGroup.md @@ -0,0 +1,32 @@ +# StatisticsKeyGroup + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **str** | | [optional] +**display_name** | **str** | | [optional] +**count** | **int** | | [optional] +**sub_groups** | [**List[StatisticsSubGroup]**](StatisticsSubGroup.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticsKeyGroup from a JSON string +statistics_key_group_instance = StatisticsKeyGroup.from_json(json) +# print the JSON string representation of the object +print(StatisticsKeyGroup.to_json()) + +# convert the object into a dict +statistics_key_group_dict = statistics_key_group_instance.to_dict() +# create an instance of StatisticsKeyGroup from a dict +statistics_key_group_from_dict = StatisticsKeyGroup.from_dict(statistics_key_group_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticsSubGroup.md b/edu_sharing_openapi/docs/StatisticsSubGroup.md new file mode 100644 index 00000000..fda78a60 --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticsSubGroup.md @@ -0,0 +1,30 @@ +# StatisticsSubGroup + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**count** | [**List[SubGroupItem]**](SubGroupItem.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticsSubGroup from a JSON string +statistics_sub_group_instance = StatisticsSubGroup.from_json(json) +# print the JSON string representation of the object +print(StatisticsSubGroup.to_json()) + +# convert the object into a dict +statistics_sub_group_dict = statistics_sub_group_instance.to_dict() +# create an instance of StatisticsSubGroup from a dict +statistics_sub_group_from_dict = StatisticsSubGroup.from_dict(statistics_sub_group_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StatisticsUser.md b/edu_sharing_openapi/docs/StatisticsUser.md new file mode 100644 index 00000000..e3a75c33 --- /dev/null +++ b/edu_sharing_openapi/docs/StatisticsUser.md @@ -0,0 +1,29 @@ +# StatisticsUser + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.statistics_user import StatisticsUser + +# TODO update the JSON string below +json = "{}" +# create an instance of StatisticsUser from a JSON string +statistics_user_instance = StatisticsUser.from_json(json) +# print the JSON string representation of the object +print(StatisticsUser.to_json()) + +# convert the object into a dict +statistics_user_dict = statistics_user_instance.to_dict() +# create an instance of StatisticsUser from a dict +statistics_user_from_dict = StatisticsUser.from_dict(statistics_user_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StoredService.md b/edu_sharing_openapi/docs/StoredService.md new file mode 100644 index 00000000..3e7e81b7 --- /dev/null +++ b/edu_sharing_openapi/docs/StoredService.md @@ -0,0 +1,42 @@ +# StoredService + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | [optional] +**url** | **str** | | [optional] +**icon** | **str** | | [optional] +**logo** | **str** | | [optional] +**in_language** | **str** | | [optional] +**type** | **str** | | [optional] +**description** | **str** | | [optional] +**audience** | [**List[Audience]**](Audience.md) | | [optional] +**provider** | [**Provider**](Provider.md) | | [optional] +**start_date** | **str** | | [optional] +**interfaces** | [**List[Interface]**](Interface.md) | | [optional] +**about** | **List[str]** | | [optional] +**id** | **str** | | [optional] +**is_accessible_for_free** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.stored_service import StoredService + +# TODO update the JSON string below +json = "{}" +# create an instance of StoredService from a JSON string +stored_service_instance = StoredService.from_json(json) +# print the JSON string representation of the object +print(StoredService.to_json()) + +# convert the object into a dict +stored_service_dict = stored_service_instance.to_dict() +# create an instance of StoredService from a dict +stored_service_from_dict = StoredService.from_dict(stored_service_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Stream.md b/edu_sharing_openapi/docs/Stream.md new file mode 100644 index 00000000..4d9637ce --- /dev/null +++ b/edu_sharing_openapi/docs/Stream.md @@ -0,0 +1,29 @@ +# Stream + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**enabled** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.stream import Stream + +# TODO update the JSON string below +json = "{}" +# create an instance of Stream from a JSON string +stream_instance = Stream.from_json(json) +# print the JSON string representation of the object +print(Stream.to_json()) + +# convert the object into a dict +stream_dict = stream_instance.to_dict() +# create an instance of Stream from a dict +stream_from_dict = Stream.from_dict(stream_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StreamEntry.md b/edu_sharing_openapi/docs/StreamEntry.md new file mode 100644 index 00000000..1f5ea03d --- /dev/null +++ b/edu_sharing_openapi/docs/StreamEntry.md @@ -0,0 +1,36 @@ +# StreamEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**description** | **str** | | [optional] +**nodes** | [**List[Node]**](Node.md) | | [optional] +**properties** | **Dict[str, object]** | | [optional] +**priority** | **int** | | [optional] +**author** | [**UserSimple**](UserSimple.md) | | [optional] +**created** | **int** | | [optional] +**modified** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.stream_entry import StreamEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamEntry from a JSON string +stream_entry_instance = StreamEntry.from_json(json) +# print the JSON string representation of the object +print(StreamEntry.to_json()) + +# convert the object into a dict +stream_entry_dict = stream_entry_instance.to_dict() +# create an instance of StreamEntry from a dict +stream_entry_from_dict = StreamEntry.from_dict(stream_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StreamEntryInput.md b/edu_sharing_openapi/docs/StreamEntryInput.md new file mode 100644 index 00000000..ee3b712d --- /dev/null +++ b/edu_sharing_openapi/docs/StreamEntryInput.md @@ -0,0 +1,34 @@ +# StreamEntryInput + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**title** | **str** | | [optional] +**description** | **str** | | [optional] +**nodes** | **List[str]** | | [optional] +**properties** | **Dict[str, object]** | | [optional] +**priority** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.stream_entry_input import StreamEntryInput + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamEntryInput from a JSON string +stream_entry_input_instance = StreamEntryInput.from_json(json) +# print the JSON string representation of the object +print(StreamEntryInput.to_json()) + +# convert the object into a dict +stream_entry_input_dict = stream_entry_input_instance.to_dict() +# create an instance of StreamEntryInput from a dict +stream_entry_input_from_dict = StreamEntryInput.from_dict(stream_entry_input_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/StreamList.md b/edu_sharing_openapi/docs/StreamList.md new file mode 100644 index 00000000..2c9ec555 --- /dev/null +++ b/edu_sharing_openapi/docs/StreamList.md @@ -0,0 +1,30 @@ +# StreamList + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**stream** | [**List[StreamEntry]**](StreamEntry.md) | | [optional] +**pagination** | [**Pagination**](Pagination.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.stream_list import StreamList + +# TODO update the JSON string below +json = "{}" +# create an instance of StreamList from a JSON string +stream_list_instance = StreamList.from_json(json) +# print the JSON string representation of the object +print(StreamList.to_json()) + +# convert the object into a dict +stream_list_dict = stream_list_instance.to_dict() +# create an instance of StreamList from a dict +stream_list_from_dict = StreamList.from_dict(stream_list_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SubGroupItem.md b/edu_sharing_openapi/docs/SubGroupItem.md new file mode 100644 index 00000000..fc7f0954 --- /dev/null +++ b/edu_sharing_openapi/docs/SubGroupItem.md @@ -0,0 +1,31 @@ +# SubGroupItem + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **str** | | [optional] +**display_name** | **str** | | [optional] +**count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.sub_group_item import SubGroupItem + +# TODO update the JSON string below +json = "{}" +# create an instance of SubGroupItem from a JSON string +sub_group_item_instance = SubGroupItem.from_json(json) +# print the JSON string representation of the object +print(SubGroupItem.to_json()) + +# convert the object into a dict +sub_group_item_dict = sub_group_item_instance.to_dict() +# create an instance of SubGroupItem from a dict +sub_group_item_from_dict = SubGroupItem.from_dict(sub_group_item_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Suggest.md b/edu_sharing_openapi/docs/Suggest.md new file mode 100644 index 00000000..67400d04 --- /dev/null +++ b/edu_sharing_openapi/docs/Suggest.md @@ -0,0 +1,31 @@ +# Suggest + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**text** | **str** | suggested text | +**highlighted** | **str** | suggested text with corrected words highlighted | [optional] +**score** | **float** | score of the suggestion | + +## Example + +```python +from edu_sharing_client.models.suggest import Suggest + +# TODO update the JSON string below +json = "{}" +# create an instance of Suggest from a JSON string +suggest_instance = Suggest.from_json(json) +# print the JSON string representation of the object +print(Suggest.to_json()) + +# convert the object into a dict +suggest_dict = suggest_instance.to_dict() +# create an instance of Suggest from a dict +suggest_from_dict = Suggest.from_dict(suggest_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Suggestion.md b/edu_sharing_openapi/docs/Suggestion.md new file mode 100644 index 00000000..52cc5a9b --- /dev/null +++ b/edu_sharing_openapi/docs/Suggestion.md @@ -0,0 +1,31 @@ +# Suggestion + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**replacement_string** | **str** | | +**display_string** | **str** | | +**key** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.suggestion import Suggestion + +# TODO update the JSON string below +json = "{}" +# create an instance of Suggestion from a JSON string +suggestion_instance = Suggestion.from_json(json) +# print the JSON string representation of the object +print(Suggestion.to_json()) + +# convert the object into a dict +suggestion_dict = suggestion_instance.to_dict() +# create an instance of Suggestion from a dict +suggestion_from_dict = Suggestion.from_dict(suggestion_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/SuggestionParam.md b/edu_sharing_openapi/docs/SuggestionParam.md new file mode 100644 index 00000000..5c02a271 --- /dev/null +++ b/edu_sharing_openapi/docs/SuggestionParam.md @@ -0,0 +1,30 @@ +# SuggestionParam + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**value_parameters** | [**ValueParameters**](ValueParameters.md) | | [optional] +**criteria** | [**List[MdsQueryCriteria]**](MdsQueryCriteria.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.suggestion_param import SuggestionParam + +# TODO update the JSON string below +json = "{}" +# create an instance of SuggestionParam from a JSON string +suggestion_param_instance = SuggestionParam.from_json(json) +# print the JSON string representation of the object +print(SuggestionParam.to_json()) + +# convert the object into a dict +suggestion_param_dict = suggestion_param_instance.to_dict() +# create an instance of SuggestionParam from a dict +suggestion_param_from_dict = SuggestionParam.from_dict(suggestion_param_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Suggestions.md b/edu_sharing_openapi/docs/Suggestions.md new file mode 100644 index 00000000..222a017c --- /dev/null +++ b/edu_sharing_openapi/docs/Suggestions.md @@ -0,0 +1,29 @@ +# Suggestions + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**values** | [**List[Suggestion]**](Suggestion.md) | | + +## Example + +```python +from edu_sharing_client.models.suggestions import Suggestions + +# TODO update the JSON string below +json = "{}" +# create an instance of Suggestions from a JSON string +suggestions_instance = Suggestions.from_json(json) +# print the JSON string representation of the object +print(Suggestions.to_json()) + +# convert the object into a dict +suggestions_dict = suggestions_instance.to_dict() +# create an instance of Suggestions from a dict +suggestions_from_dict = Suggestions.from_dict(suggestions_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/TOOLV1Api.md b/edu_sharing_openapi/docs/TOOLV1Api.md new file mode 100644 index 00000000..9a0b1923 --- /dev/null +++ b/edu_sharing_openapi/docs/TOOLV1Api.md @@ -0,0 +1,481 @@ +# edu_sharing_client.TOOLV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**create_tool_defintition**](TOOLV1Api.md#create_tool_defintition) | **POST** /tool/v1/tools/{repository}/tooldefinitions | Create a new tool definition object. +[**create_tool_instance**](TOOLV1Api.md#create_tool_instance) | **POST** /tool/v1/tools/{repository}/{toolDefinition}/toolinstances | Create a new tool Instance object. +[**create_tool_object**](TOOLV1Api.md#create_tool_object) | **POST** /tool/v1/tools/{repository}/{toolinstance}/toolobject | Create a new tool object for a given tool instance. +[**get_all_tool_definitions**](TOOLV1Api.md#get_all_tool_definitions) | **GET** /tool/v1/tools/{repository}/tooldefinitions | Get all ToolDefinitions. +[**get_instance**](TOOLV1Api.md#get_instance) | **GET** /tool/v1/tools/{repository}/{nodeid}/toolinstance | Get Instances of a ToolDefinition. +[**get_instances**](TOOLV1Api.md#get_instances) | **GET** /tool/v1/tools/{repository}/{toolDefinition}/toolinstances | Get Instances of a ToolDefinition. + + +# **create_tool_defintition** +> NodeEntry create_tool_defintition(repository, request_body, rename_if_exists=rename_if_exists, version_comment=version_comment) + +Create a new tool definition object. + +Create a new tool definition object. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TOOLV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + request_body = None # Dict[str, List[str]] | properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} + rename_if_exists = False # bool | rename if the same node name exists (optional) (default to False) + version_comment = 'version_comment_example' # str | comment, leave empty = no inital version (optional) + + try: + # Create a new tool definition object. + api_response = api_instance.create_tool_defintition(repository, request_body, rename_if_exists=rename_if_exists, version_comment=version_comment) + print("The response of TOOLV1Api->create_tool_defintition:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling TOOLV1Api->create_tool_defintition: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **request_body** | [**Dict[str, List[str]]**](List.md)| properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} | + **rename_if_exists** | **bool**| rename if the same node name exists | [optional] [default to False] + **version_comment** | **str**| comment, leave empty = no inital version | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_tool_instance** +> NodeEntry create_tool_instance(repository, tool_definition, request_body, rename_if_exists=rename_if_exists, version_comment=version_comment) + +Create a new tool Instance object. + +Create a new tool Instance object. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TOOLV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + tool_definition = 'tool_definition_example' # str | ID of parent node must have tool_definition aspect + request_body = None # Dict[str, List[str]] | properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} + rename_if_exists = False # bool | rename if the same node name exists (optional) (default to False) + version_comment = 'version_comment_example' # str | comment, leave empty = no inital version (optional) + + try: + # Create a new tool Instance object. + api_response = api_instance.create_tool_instance(repository, tool_definition, request_body, rename_if_exists=rename_if_exists, version_comment=version_comment) + print("The response of TOOLV1Api->create_tool_instance:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling TOOLV1Api->create_tool_instance: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **tool_definition** | **str**| ID of parent node must have tool_definition aspect | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} | + **rename_if_exists** | **bool**| rename if the same node name exists | [optional] [default to False] + **version_comment** | **str**| comment, leave empty = no inital version | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **create_tool_object** +> NodeEntry create_tool_object(repository, toolinstance, request_body, rename_if_exists=rename_if_exists, version_comment=version_comment) + +Create a new tool object for a given tool instance. + +Create a new tool object for a given tool instance. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TOOLV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + toolinstance = 'toolinstance_example' # str | ID of parent node (a tool instance object) + request_body = None # Dict[str, List[str]] | properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} + rename_if_exists = False # bool | rename if the same node name exists (optional) (default to False) + version_comment = 'version_comment_example' # str | comment, leave empty = no inital version (optional) + + try: + # Create a new tool object for a given tool instance. + api_response = api_instance.create_tool_object(repository, toolinstance, request_body, rename_if_exists=rename_if_exists, version_comment=version_comment) + print("The response of TOOLV1Api->create_tool_object:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling TOOLV1Api->create_tool_object: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **toolinstance** | **str**| ID of parent node (a tool instance object) | + **request_body** | [**Dict[str, List[str]]**](List.md)| properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} | + **rename_if_exists** | **bool**| rename if the same node name exists | [optional] [default to False] + **version_comment** | **str**| comment, leave empty = no inital version | [optional] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**409** | Duplicate Entity/Node conflict (Node with same name exists) | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_all_tool_definitions** +> NodeEntry get_all_tool_definitions(repository) + +Get all ToolDefinitions. + +Get all ToolDefinitions. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TOOLV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + + try: + # Get all ToolDefinitions. + api_response = api_instance.get_all_tool_definitions(repository) + print("The response of TOOLV1Api->get_all_tool_definitions:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling TOOLV1Api->get_all_tool_definitions: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_instance** +> NodeEntry get_instance(repository, nodeid) + +Get Instances of a ToolDefinition. + +Get Instances of a ToolDefinition. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TOOLV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + nodeid = 'nodeid_example' # str | ID of node + + try: + # Get Instances of a ToolDefinition. + api_response = api_instance.get_instance(repository, nodeid) + print("The response of TOOLV1Api->get_instance:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling TOOLV1Api->get_instance: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **nodeid** | **str**| ID of node | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_instances** +> NodeEntry get_instances(repository, tool_definition) + +Get Instances of a ToolDefinition. + +Get Instances of a ToolDefinition. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TOOLV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + tool_definition = 'tool_definition_example' # str | ID of node + + try: + # Get Instances of a ToolDefinition. + api_response = api_instance.get_instances(repository, tool_definition) + print("The response of TOOLV1Api->get_instances:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling TOOLV1Api->get_instances: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **tool_definition** | **str**| ID of node | + +### Return type + +[**NodeEntry**](NodeEntry.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/TRACKINGV1Api.md b/edu_sharing_openapi/docs/TRACKINGV1Api.md new file mode 100644 index 00000000..72672243 --- /dev/null +++ b/edu_sharing_openapi/docs/TRACKINGV1Api.md @@ -0,0 +1,83 @@ +# edu_sharing_client.TRACKINGV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**track_event**](TRACKINGV1Api.md#track_event) | **PUT** /tracking/v1/tracking/{repository}/{event} | Track a user interaction + + +# **track_event** +> track_event(repository, event, node=node) + +Track a user interaction + +Currently limited to video / audio play interactions + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.TRACKINGV1Api(api_client) + repository = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + event = 'event_example' # str | type of event to track + node = 'node_example' # str | node id for which the event is tracked. For some event, this can be null (optional) + + try: + # Track a user interaction + api_instance.track_event(repository, event, node=node) + except Exception as e: + print("Exception when calling TRACKINGV1Api->track_event: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **event** | **str**| type of event to track | + **node** | **str**| node id for which the event is tracked. For some event, this can be null | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/Tool.md b/edu_sharing_openapi/docs/Tool.md new file mode 100644 index 00000000..79ab411e --- /dev/null +++ b/edu_sharing_openapi/docs/Tool.md @@ -0,0 +1,34 @@ +# Tool + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**domain** | **str** | | [optional] +**description** | **str** | | [optional] +**app_id** | **str** | | [optional] +**name** | **str** | | [optional] +**logo** | **str** | | [optional] +**custom_content_option** | **bool** | | [optional] + +## Example + +```python +from edu_sharing_client.models.tool import Tool + +# TODO update the JSON string below +json = "{}" +# create an instance of Tool from a JSON string +tool_instance = Tool.from_json(json) +# print the JSON string representation of the object +print(Tool.to_json()) + +# convert the object into a dict +tool_dict = tool_instance.to_dict() +# create an instance of Tool from a dict +tool_from_dict = Tool.from_dict(tool_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Tools.md b/edu_sharing_openapi/docs/Tools.md new file mode 100644 index 00000000..59fabb06 --- /dev/null +++ b/edu_sharing_openapi/docs/Tools.md @@ -0,0 +1,29 @@ +# Tools + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**tools** | [**List[Tool]**](Tool.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.tools import Tools + +# TODO update the JSON string below +json = "{}" +# create an instance of Tools from a JSON string +tools_instance = Tools.from_json(json) +# print the JSON string representation of the object +print(Tools.to_json()) + +# convert the object into a dict +tools_dict = tools_instance.to_dict() +# create an instance of Tools from a dict +tools_from_dict = Tools.from_dict(tools_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Tracking.md b/edu_sharing_openapi/docs/Tracking.md new file mode 100644 index 00000000..45f741d2 --- /dev/null +++ b/edu_sharing_openapi/docs/Tracking.md @@ -0,0 +1,33 @@ +# Tracking + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**counts** | **Dict[str, int]** | | [optional] +**var_date** | **str** | | [optional] +**fields** | **Dict[str, object]** | | [optional] +**groups** | **Dict[str, Dict[str, Dict[str, int]]]** | | [optional] +**authority** | [**TrackingAuthority**](TrackingAuthority.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.tracking import Tracking + +# TODO update the JSON string below +json = "{}" +# create an instance of Tracking from a JSON string +tracking_instance = Tracking.from_json(json) +# print the JSON string representation of the object +print(Tracking.to_json()) + +# convert the object into a dict +tracking_dict = tracking_instance.to_dict() +# create an instance of Tracking from a dict +tracking_from_dict = Tracking.from_dict(tracking_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/TrackingAuthority.md b/edu_sharing_openapi/docs/TrackingAuthority.md new file mode 100644 index 00000000..2d4be25c --- /dev/null +++ b/edu_sharing_openapi/docs/TrackingAuthority.md @@ -0,0 +1,31 @@ +# TrackingAuthority + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**hash** | **str** | | [optional] +**organization** | [**List[Organization]**](Organization.md) | | [optional] +**mediacenter** | [**List[Group]**](Group.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.tracking_authority import TrackingAuthority + +# TODO update the JSON string below +json = "{}" +# create an instance of TrackingAuthority from a JSON string +tracking_authority_instance = TrackingAuthority.from_json(json) +# print the JSON string representation of the object +print(TrackingAuthority.to_json()) + +# convert the object into a dict +tracking_authority_dict = tracking_authority_instance.to_dict() +# create an instance of TrackingAuthority from a dict +tracking_authority_from_dict = TrackingAuthority.from_dict(tracking_authority_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/TrackingNode.md b/edu_sharing_openapi/docs/TrackingNode.md new file mode 100644 index 00000000..ab59de71 --- /dev/null +++ b/edu_sharing_openapi/docs/TrackingNode.md @@ -0,0 +1,34 @@ +# TrackingNode + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**counts** | **Dict[str, int]** | | [optional] +**var_date** | **str** | | [optional] +**fields** | **Dict[str, object]** | | [optional] +**groups** | **Dict[str, Dict[str, Dict[str, int]]]** | | [optional] +**node** | [**Node**](Node.md) | | [optional] +**authority** | [**TrackingAuthority**](TrackingAuthority.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.tracking_node import TrackingNode + +# TODO update the JSON string below +json = "{}" +# create an instance of TrackingNode from a JSON string +tracking_node_instance = TrackingNode.from_json(json) +# print the JSON string representation of the object +print(TrackingNode.to_json()) + +# convert the object into a dict +tracking_node_dict = tracking_node_instance.to_dict() +# create an instance of TrackingNode from a dict +tracking_node_from_dict = TrackingNode.from_dict(tracking_node_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/USAGEV1Api.md b/edu_sharing_openapi/docs/USAGEV1Api.md new file mode 100644 index 00000000..adbc7a6b --- /dev/null +++ b/edu_sharing_openapi/docs/USAGEV1Api.md @@ -0,0 +1,523 @@ +# edu_sharing_client.USAGEV1Api + +All URIs are relative to *https://stable.demo.edu-sharing.net/edu-sharing/rest* + +Method | HTTP request | Description +------------- | ------------- | ------------- +[**delete_usage**](USAGEV1Api.md#delete_usage) | **DELETE** /usage/v1/usages/node/{nodeId}/{usageId} | Delete an usage of a node. +[**get_usages**](USAGEV1Api.md#get_usages) | **GET** /usage/v1/usages/{appId} | Get all usages of an application. +[**get_usages1**](USAGEV1Api.md#get_usages1) | **GET** /usage/v1/usages/repository/{repositoryId}/{nodeId} | +[**get_usages_by_course**](USAGEV1Api.md#get_usages_by_course) | **GET** /usage/v1/usages/course/{appId}/{courseId} | Get all usages of an course. +[**get_usages_by_node**](USAGEV1Api.md#get_usages_by_node) | **GET** /usage/v1/usages/node/{nodeId} | Get all usages of an node. +[**get_usages_by_node_collections**](USAGEV1Api.md#get_usages_by_node_collections) | **GET** /usage/v1/usages/node/{nodeId}/collections | Get all collections where this node is used. +[**set_usage**](USAGEV1Api.md#set_usage) | **POST** /usage/v1/usages/repository/{repositoryId} | Set a usage for a node. app signature headers and authenticated user required. + + +# **delete_usage** +> Usages delete_usage(node_id, usage_id) + +Delete an usage of a node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.usages import Usages +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + node_id = 'node_id_example' # str | ID of node + usage_id = 'usage_id_example' # str | ID of usage + + try: + # Delete an usage of a node. + api_response = api_instance.delete_usage(node_id, usage_id) + print("The response of USAGEV1Api->delete_usage:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling USAGEV1Api->delete_usage: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| ID of node | + **usage_id** | **str**| ID of usage | + +### Return type + +[**Usages**](Usages.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_usages** +> Usages get_usages(app_id) + +Get all usages of an application. + +Get all usages of an application. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.usages import Usages +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + app_id = 'app_id_example' # str | ID of application (or \"-home-\" for home repository) + + try: + # Get all usages of an application. + api_response = api_instance.get_usages(app_id) + print("The response of USAGEV1Api->get_usages:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling USAGEV1Api->get_usages: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **app_id** | **str**| ID of application (or \"-home-\" for home repository) | + +### Return type + +[**Usages**](Usages.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_usages1** +> get_usages1(repository_id, node_id, var_from=var_from, to=to) + + + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + repository_id = '-home-' # str | ID of repository (default to '-home-') + node_id = '-all-' # str | ID of node. Use -all- for getting usages of all nodes (default to '-all-') + var_from = 56 # int | from date (optional) + to = 56 # int | to date (optional) + + try: + api_instance.get_usages1(repository_id, node_id, var_from=var_from, to=to) + except Exception as e: + print("Exception when calling USAGEV1Api->get_usages1: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository_id** | **str**| ID of repository | [default to '-home-'] + **node_id** | **str**| ID of node. Use -all- for getting usages of all nodes | [default to '-all-'] + **var_from** | **int**| from date | [optional] + **to** | **int**| to date | [optional] + +### Return type + +void (empty response body) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**0** | default response | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_usages_by_course** +> Usages get_usages_by_course(app_id, course_id) + +Get all usages of an course. + +Get all usages of an course. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.usages import Usages +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + app_id = 'app_id_example' # str | ID of application (or \"-home-\" for home repository) + course_id = 'course_id_example' # str | ID of course + + try: + # Get all usages of an course. + api_response = api_instance.get_usages_by_course(app_id, course_id) + print("The response of USAGEV1Api->get_usages_by_course:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling USAGEV1Api->get_usages_by_course: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **app_id** | **str**| ID of application (or \"-home-\" for home repository) | + **course_id** | **str**| ID of course | + +### Return type + +[**Usages**](Usages.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_usages_by_node** +> Usages get_usages_by_node(node_id) + +Get all usages of an node. + +Get all usages of an node. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.usages import Usages +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + node_id = 'node_id_example' # str | ID of node + + try: + # Get all usages of an node. + api_response = api_instance.get_usages_by_node(node_id) + print("The response of USAGEV1Api->get_usages_by_node:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling USAGEV1Api->get_usages_by_node: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| ID of node | + +### Return type + +[**Usages**](Usages.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **get_usages_by_node_collections** +> str get_usages_by_node_collections(node_id) + +Get all collections where this node is used. + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + node_id = 'node_id_example' # str | ID of node + + try: + # Get all collections where this node is used. + api_response = api_instance.get_usages_by_node_collections(node_id) + print("The response of USAGEV1Api->get_usages_by_node_collections:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling USAGEV1Api->get_usages_by_node_collections: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **node_id** | **str**| ID of node | + +### Return type + +**str** + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **set_usage** +> Usage set_usage(repository_id, create_usage) + +Set a usage for a node. app signature headers and authenticated user required. + +headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + +### Example + + +```python +import edu_sharing_client +from edu_sharing_client.models.create_usage import CreateUsage +from edu_sharing_client.models.usage import Usage +from edu_sharing_client.rest import ApiException +from pprint import pprint + +# Defining the host is optional and defaults to https://stable.demo.edu-sharing.net/edu-sharing/rest +# See configuration.py for a list of all supported configuration parameters. +configuration = edu_sharing_client.Configuration( + host = "https://stable.demo.edu-sharing.net/edu-sharing/rest" +) + + +# Enter a context with an instance of the API client +with edu_sharing_client.ApiClient(configuration) as api_client: + # Create an instance of the API class + api_instance = edu_sharing_client.USAGEV1Api(api_client) + repository_id = '-home-' # str | ID of repository (or \"-home-\" for home repository) (default to '-home-') + create_usage = edu_sharing_client.CreateUsage() # CreateUsage | usage date + + try: + # Set a usage for a node. app signature headers and authenticated user required. + api_response = api_instance.set_usage(repository_id, create_usage) + print("The response of USAGEV1Api->set_usage:\n") + pprint(api_response) + except Exception as e: + print("Exception when calling USAGEV1Api->set_usage: %s\n" % e) +``` + + + +### Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **repository_id** | **str**| ID of repository (or \"-home-\" for home repository) | [default to '-home-'] + **create_usage** | [**CreateUsage**](CreateUsage.md)| usage date | + +### Return type + +[**Usage**](Usage.md) + +### Authorization + +No authorization required + +### HTTP request headers + + - **Content-Type**: application/json + - **Accept**: application/json + +### HTTP response details + +| Status code | Description | Response headers | +|-------------|-------------|------------------| +**200** | OK. | - | +**400** | Preconditions are not present. | - | +**401** | Authorization failed. | - | +**403** | Session user has insufficient rights to perform this operation. | - | +**404** | Ressources are not found. | - | +**500** | Fatal error occured. | - | + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + diff --git a/edu_sharing_openapi/docs/UploadResult.md b/edu_sharing_openapi/docs/UploadResult.md new file mode 100644 index 00000000..c5755f33 --- /dev/null +++ b/edu_sharing_openapi/docs/UploadResult.md @@ -0,0 +1,29 @@ +# UploadResult + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**file** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.upload_result import UploadResult + +# TODO update the JSON string below +json = "{}" +# create an instance of UploadResult from a JSON string +upload_result_instance = UploadResult.from_json(json) +# print the JSON string representation of the object +print(UploadResult.to_json()) + +# convert the object into a dict +upload_result_dict = upload_result_instance.to_dict() +# create an instance of UploadResult from a dict +upload_result_from_dict = UploadResult.from_dict(upload_result_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Usage.md b/edu_sharing_openapi/docs/Usage.md new file mode 100644 index 00000000..2953fb89 --- /dev/null +++ b/edu_sharing_openapi/docs/Usage.md @@ -0,0 +1,48 @@ +# Usage + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**from_used** | **datetime** | | [optional] +**to_used** | **datetime** | | [optional] +**usage_counter** | **int** | | [optional] +**app_subtype** | **str** | | [optional] +**app_type** | **str** | | [optional] +**type** | **str** | | [optional] +**created** | **datetime** | | [optional] +**modified** | **datetime** | | [optional] +**app_user** | **str** | | +**app_user_mail** | **str** | | +**course_id** | **str** | | +**distinct_persons** | **int** | | [optional] +**app_id** | **str** | | +**node_id** | **str** | | +**parent_node_id** | **str** | | +**usage_version** | **str** | | +**usage_xml_params** | [**Parameters**](Parameters.md) | | [optional] +**usage_xml_params_raw** | **str** | | [optional] +**resource_id** | **str** | | +**guid** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.usage import Usage + +# TODO update the JSON string below +json = "{}" +# create an instance of Usage from a JSON string +usage_instance = Usage.from_json(json) +# print the JSON string representation of the object +print(Usage.to_json()) + +# convert the object into a dict +usage_dict = usage_instance.to_dict() +# create an instance of Usage from a dict +usage_from_dict = Usage.from_dict(usage_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Usages.md b/edu_sharing_openapi/docs/Usages.md new file mode 100644 index 00000000..e7056e45 --- /dev/null +++ b/edu_sharing_openapi/docs/Usages.md @@ -0,0 +1,29 @@ +# Usages + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**usages** | [**List[Usage]**](Usage.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.usages import Usages + +# TODO update the JSON string below +json = "{}" +# create an instance of Usages from a JSON string +usages_instance = Usages.from_json(json) +# print the JSON string representation of the object +print(Usages.to_json()) + +# convert the object into a dict +usages_dict = usages_instance.to_dict() +# create an instance of Usages from a dict +usages_from_dict = Usages.from_dict(usages_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/User.md b/edu_sharing_openapi/docs/User.md new file mode 100644 index 00000000..931854ac --- /dev/null +++ b/edu_sharing_openapi/docs/User.md @@ -0,0 +1,39 @@ +# User + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**editable** | **bool** | | [optional] +**status** | [**UserStatus**](UserStatus.md) | | [optional] +**organizations** | [**List[Organization]**](Organization.md) | | [optional] +**quota** | [**UserQuota**](UserQuota.md) | | [optional] +**authority_name** | **str** | | +**authority_type** | **str** | | [optional] +**user_name** | **str** | | [optional] +**profile** | [**UserProfile**](UserProfile.md) | | [optional] +**home_folder** | [**NodeRef**](NodeRef.md) | | +**shared_folders** | [**List[NodeRef]**](NodeRef.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.user import User + +# TODO update the JSON string below +json = "{}" +# create an instance of User from a JSON string +user_instance = User.from_json(json) +# print the JSON string representation of the object +print(User.to_json()) + +# convert the object into a dict +user_dict = user_instance.to_dict() +# create an instance of User from a dict +user_from_dict = User.from_dict(user_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserCredential.md b/edu_sharing_openapi/docs/UserCredential.md new file mode 100644 index 00000000..a72e2ee4 --- /dev/null +++ b/edu_sharing_openapi/docs/UserCredential.md @@ -0,0 +1,30 @@ +# UserCredential + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**old_password** | **str** | | [optional] +**new_password** | **str** | | + +## Example + +```python +from edu_sharing_client.models.user_credential import UserCredential + +# TODO update the JSON string below +json = "{}" +# create an instance of UserCredential from a JSON string +user_credential_instance = UserCredential.from_json(json) +# print the JSON string representation of the object +print(UserCredential.to_json()) + +# convert the object into a dict +user_credential_dict = user_credential_instance.to_dict() +# create an instance of UserCredential from a dict +user_credential_from_dict = UserCredential.from_dict(user_credential_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserDataDTO.md b/edu_sharing_openapi/docs/UserDataDTO.md new file mode 100644 index 00000000..6f798421 --- /dev/null +++ b/edu_sharing_openapi/docs/UserDataDTO.md @@ -0,0 +1,32 @@ +# UserDataDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**first_name** | **str** | | [optional] +**last_name** | **str** | | [optional] +**mailbox** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_data_dto import UserDataDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of UserDataDTO from a JSON string +user_data_dto_instance = UserDataDTO.from_json(json) +# print the JSON string representation of the object +print(UserDataDTO.to_json()) + +# convert the object into a dict +user_data_dto_dict = user_data_dto_instance.to_dict() +# create an instance of UserDataDTO from a dict +user_data_dto_from_dict = UserDataDTO.from_dict(user_data_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserEntries.md b/edu_sharing_openapi/docs/UserEntries.md new file mode 100644 index 00000000..5b7ee3bb --- /dev/null +++ b/edu_sharing_openapi/docs/UserEntries.md @@ -0,0 +1,30 @@ +# UserEntries + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**users** | [**List[UserSimple]**](UserSimple.md) | | +**pagination** | [**Pagination**](Pagination.md) | | + +## Example + +```python +from edu_sharing_client.models.user_entries import UserEntries + +# TODO update the JSON string below +json = "{}" +# create an instance of UserEntries from a JSON string +user_entries_instance = UserEntries.from_json(json) +# print the JSON string representation of the object +print(UserEntries.to_json()) + +# convert the object into a dict +user_entries_dict = user_entries_instance.to_dict() +# create an instance of UserEntries from a dict +user_entries_from_dict = UserEntries.from_dict(user_entries_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserEntry.md b/edu_sharing_openapi/docs/UserEntry.md new file mode 100644 index 00000000..24b294ad --- /dev/null +++ b/edu_sharing_openapi/docs/UserEntry.md @@ -0,0 +1,30 @@ +# UserEntry + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**edit_profile** | **bool** | | [optional] +**person** | [**User**](User.md) | | + +## Example + +```python +from edu_sharing_client.models.user_entry import UserEntry + +# TODO update the JSON string below +json = "{}" +# create an instance of UserEntry from a JSON string +user_entry_instance = UserEntry.from_json(json) +# print the JSON string representation of the object +print(UserEntry.to_json()) + +# convert the object into a dict +user_entry_dict = user_entry_instance.to_dict() +# create an instance of UserEntry from a dict +user_entry_from_dict = UserEntry.from_dict(user_entry_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserProfile.md b/edu_sharing_openapi/docs/UserProfile.md new file mode 100644 index 00000000..7891917f --- /dev/null +++ b/edu_sharing_openapi/docs/UserProfile.md @@ -0,0 +1,38 @@ +# UserProfile + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**primary_affiliation** | **str** | | [optional] +**skills** | **List[str]** | | [optional] +**types** | **List[str]** | | [optional] +**vcard** | **str** | | [optional] +**type** | **List[str]** | | [optional] +**first_name** | **str** | | [optional] +**last_name** | **str** | | [optional] +**email** | **str** | | [optional] +**avatar** | **str** | | [optional] +**about** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_profile import UserProfile + +# TODO update the JSON string below +json = "{}" +# create an instance of UserProfile from a JSON string +user_profile_instance = UserProfile.from_json(json) +# print the JSON string representation of the object +print(UserProfile.to_json()) + +# convert the object into a dict +user_profile_dict = user_profile_instance.to_dict() +# create an instance of UserProfile from a dict +user_profile_from_dict = UserProfile.from_dict(user_profile_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserProfileAppAuth.md b/edu_sharing_openapi/docs/UserProfileAppAuth.md new file mode 100644 index 00000000..43007737 --- /dev/null +++ b/edu_sharing_openapi/docs/UserProfileAppAuth.md @@ -0,0 +1,39 @@ +# UserProfileAppAuth + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**primary_affiliation** | **str** | | [optional] +**skills** | **List[str]** | | [optional] +**types** | **List[str]** | | [optional] +**extended_attributes** | **Dict[str, List[str]]** | | [optional] +**vcard** | **str** | | [optional] +**type** | **List[str]** | | [optional] +**first_name** | **str** | | [optional] +**last_name** | **str** | | [optional] +**email** | **str** | | [optional] +**avatar** | **str** | | [optional] +**about** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_profile_app_auth import UserProfileAppAuth + +# TODO update the JSON string below +json = "{}" +# create an instance of UserProfileAppAuth from a JSON string +user_profile_app_auth_instance = UserProfileAppAuth.from_json(json) +# print the JSON string representation of the object +print(UserProfileAppAuth.to_json()) + +# convert the object into a dict +user_profile_app_auth_dict = user_profile_app_auth_instance.to_dict() +# create an instance of UserProfileAppAuth from a dict +user_profile_app_auth_from_dict = UserProfileAppAuth.from_dict(user_profile_app_auth_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserProfileEdit.md b/edu_sharing_openapi/docs/UserProfileEdit.md new file mode 100644 index 00000000..f1261869 --- /dev/null +++ b/edu_sharing_openapi/docs/UserProfileEdit.md @@ -0,0 +1,39 @@ +# UserProfileEdit + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**primary_affiliation** | **str** | | [optional] +**skills** | **List[str]** | | [optional] +**types** | **List[str]** | | [optional] +**size_quota** | **int** | | [optional] +**vcard** | **str** | | [optional] +**type** | **List[str]** | | [optional] +**first_name** | **str** | | [optional] +**last_name** | **str** | | [optional] +**email** | **str** | | [optional] +**avatar** | **str** | | [optional] +**about** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_profile_edit import UserProfileEdit + +# TODO update the JSON string below +json = "{}" +# create an instance of UserProfileEdit from a JSON string +user_profile_edit_instance = UserProfileEdit.from_json(json) +# print the JSON string representation of the object +print(UserProfileEdit.to_json()) + +# convert the object into a dict +user_profile_edit_dict = user_profile_edit_instance.to_dict() +# create an instance of UserProfileEdit from a dict +user_profile_edit_from_dict = UserProfileEdit.from_dict(user_profile_edit_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserQuota.md b/edu_sharing_openapi/docs/UserQuota.md new file mode 100644 index 00000000..5efdcefd --- /dev/null +++ b/edu_sharing_openapi/docs/UserQuota.md @@ -0,0 +1,31 @@ +# UserQuota + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**enabled** | **bool** | | [optional] +**size_current** | **int** | | [optional] +**size_quota** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_quota import UserQuota + +# TODO update the JSON string below +json = "{}" +# create an instance of UserQuota from a JSON string +user_quota_instance = UserQuota.from_json(json) +# print the JSON string representation of the object +print(UserQuota.to_json()) + +# convert the object into a dict +user_quota_dict = user_quota_instance.to_dict() +# create an instance of UserQuota from a dict +user_quota_from_dict = UserQuota.from_dict(user_quota_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserSimple.md b/edu_sharing_openapi/docs/UserSimple.md new file mode 100644 index 00000000..ecbb6dce --- /dev/null +++ b/edu_sharing_openapi/docs/UserSimple.md @@ -0,0 +1,36 @@ +# UserSimple + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**properties** | **Dict[str, List[str]]** | | [optional] +**editable** | **bool** | | [optional] +**status** | [**UserStatus**](UserStatus.md) | | [optional] +**organizations** | [**List[Organization]**](Organization.md) | | [optional] +**authority_name** | **str** | | +**authority_type** | **str** | | [optional] +**user_name** | **str** | | [optional] +**profile** | [**UserProfile**](UserProfile.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_simple import UserSimple + +# TODO update the JSON string below +json = "{}" +# create an instance of UserSimple from a JSON string +user_simple_instance = UserSimple.from_json(json) +# print the JSON string representation of the object +print(UserSimple.to_json()) + +# convert the object into a dict +user_simple_dict = user_simple_instance.to_dict() +# create an instance of UserSimple from a dict +user_simple_from_dict = UserSimple.from_dict(user_simple_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserStats.md b/edu_sharing_openapi/docs/UserStats.md new file mode 100644 index 00000000..aac7bd0c --- /dev/null +++ b/edu_sharing_openapi/docs/UserStats.md @@ -0,0 +1,31 @@ +# UserStats + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node_count** | **int** | | [optional] +**node_count_cc** | **int** | | [optional] +**collection_count** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_stats import UserStats + +# TODO update the JSON string below +json = "{}" +# create an instance of UserStats from a JSON string +user_stats_instance = UserStats.from_json(json) +# print the JSON string representation of the object +print(UserStats.to_json()) + +# convert the object into a dict +user_stats_dict = user_stats_instance.to_dict() +# create an instance of UserStats from a dict +user_stats_from_dict = UserStats.from_dict(user_stats_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/UserStatus.md b/edu_sharing_openapi/docs/UserStatus.md new file mode 100644 index 00000000..bb1f48d6 --- /dev/null +++ b/edu_sharing_openapi/docs/UserStatus.md @@ -0,0 +1,30 @@ +# UserStatus + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**status** | **str** | | [optional] +**var_date** | **int** | | [optional] + +## Example + +```python +from edu_sharing_client.models.user_status import UserStatus + +# TODO update the JSON string below +json = "{}" +# create an instance of UserStatus from a JSON string +user_status_instance = UserStatus.from_json(json) +# print the JSON string representation of the object +print(UserStatus.to_json()) + +# convert the object into a dict +user_status_dict = user_status_instance.to_dict() +# create an instance of UserStatus from a dict +user_status_from_dict = UserStatus.from_dict(user_status_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Value.md b/edu_sharing_openapi/docs/Value.md new file mode 100644 index 00000000..3edf4c67 --- /dev/null +++ b/edu_sharing_openapi/docs/Value.md @@ -0,0 +1,30 @@ +# Value + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**value** | **str** | | +**count** | **int** | | + +## Example + +```python +from edu_sharing_client.models.value import Value + +# TODO update the JSON string below +json = "{}" +# create an instance of Value from a JSON string +value_instance = Value.from_json(json) +# print the JSON string representation of the object +print(Value.to_json()) + +# convert the object into a dict +value_dict = value_instance.to_dict() +# create an instance of Value from a dict +value_from_dict = Value.from_dict(value_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/ValueParameters.md b/edu_sharing_openapi/docs/ValueParameters.md new file mode 100644 index 00000000..1f06df50 --- /dev/null +++ b/edu_sharing_openapi/docs/ValueParameters.md @@ -0,0 +1,31 @@ +# ValueParameters + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**query** | **str** | | +**var_property** | **str** | | +**pattern** | **str** | prefix of the value (or \"-all-\" for all values) | + +## Example + +```python +from edu_sharing_client.models.value_parameters import ValueParameters + +# TODO update the JSON string below +json = "{}" +# create an instance of ValueParameters from a JSON string +value_parameters_instance = ValueParameters.from_json(json) +# print the JSON string representation of the object +print(ValueParameters.to_json()) + +# convert the object into a dict +value_parameters_dict = value_parameters_instance.to_dict() +# create an instance of ValueParameters from a dict +value_parameters_from_dict = ValueParameters.from_dict(value_parameters_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Values.md b/edu_sharing_openapi/docs/Values.md new file mode 100644 index 00000000..1ae8c639 --- /dev/null +++ b/edu_sharing_openapi/docs/Values.md @@ -0,0 +1,93 @@ +# Values + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**supported_languages** | **List[str]** | | [optional] +**extension** | **str** | | [optional] +**login_url** | **str** | | [optional] +**login_allow_local** | **bool** | | [optional] +**login_providers_url** | **str** | | [optional] +**login_provider_target_url** | **str** | | [optional] +**register** | [**Register**](Register.md) | | [optional] +**recover_password_url** | **str** | | [optional] +**imprint_url** | **str** | | [optional] +**privacy_information_url** | **str** | | [optional] +**help_url** | **str** | | [optional] +**whats_new_url** | **str** | | [optional] +**edit_profile_url** | **str** | | [optional] +**edit_profile** | **bool** | | [optional] +**workspace_columns** | **List[str]** | | [optional] +**workspace_shared_to_me_default_all** | **bool** | | [optional] +**hide_main_menu** | **List[str]** | | [optional] +**logout** | [**LogoutInfo**](LogoutInfo.md) | | [optional] +**menu_entries** | [**List[MenuEntry]**](MenuEntry.md) | | [optional] +**custom_options** | [**List[ContextMenuEntry]**](ContextMenuEntry.md) | | [optional] +**user_menu_overrides** | [**List[ContextMenuEntry]**](ContextMenuEntry.md) | | [optional] +**allowed_licenses** | **List[str]** | | [optional] +**custom_licenses** | [**List[License]**](License.md) | | [optional] +**workflow** | [**ConfigWorkflow**](ConfigWorkflow.md) | | [optional] +**license_dialog_on_upload** | **bool** | | [optional] +**node_report** | **bool** | | [optional] +**branding** | **bool** | | [optional] +**rating** | [**ConfigRating**](ConfigRating.md) | | [optional] +**publishing_notice** | **bool** | | [optional] +**site_title** | **str** | | [optional] +**user_display_name** | **str** | | [optional] +**user_secondary_display_name** | **str** | | [optional] +**user_affiliation** | **bool** | | [optional] +**default_username** | **str** | | [optional] +**default_password** | **str** | | [optional] +**banner** | [**Banner**](Banner.md) | | [optional] +**available_mds** | [**List[AvailableMds]**](AvailableMds.md) | | [optional] +**available_repositories** | **List[str]** | | [optional] +**search_view_type** | **int** | | [optional] +**workspace_view_type** | **int** | | [optional] +**items_per_request** | **int** | | [optional] +**rendering** | [**Rendering**](Rendering.md) | | [optional] +**session_expired_dialog** | **object** | | [optional] +**login_default_location** | **str** | | [optional] +**search_group_results** | **bool** | | [optional] +**mainnav** | [**Mainnav**](Mainnav.md) | | [optional] +**search_sidenav_mode** | **str** | | [optional] +**guest** | [**Guest**](Guest.md) | | [optional] +**collections** | [**Collections**](Collections.md) | | [optional] +**license_agreement** | [**LicenseAgreement**](LicenseAgreement.md) | | [optional] +**services** | [**Services**](Services.md) | | [optional] +**help_menu_options** | [**List[HelpMenuOptions]**](HelpMenuOptions.md) | | [optional] +**images** | [**List[Image]**](Image.md) | | [optional] +**icons** | [**List[FontIcon]**](FontIcon.md) | | [optional] +**stream** | [**Stream**](Stream.md) | | [optional] +**admin** | [**Admin**](Admin.md) | | [optional] +**simple_edit** | [**SimpleEdit**](SimpleEdit.md) | | [optional] +**frontpage** | [**ConfigFrontpage**](ConfigFrontpage.md) | | [optional] +**upload** | [**ConfigUpload**](ConfigUpload.md) | | [optional] +**publish** | [**ConfigPublish**](ConfigPublish.md) | | [optional] +**remote** | [**ConfigRemote**](ConfigRemote.md) | | [optional] +**custom_css** | **str** | | [optional] +**theme_colors** | [**ConfigThemeColors**](ConfigThemeColors.md) | | [optional] +**privacy** | [**ConfigPrivacy**](ConfigPrivacy.md) | | [optional] +**tutorial** | [**ConfigTutorial**](ConfigTutorial.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.values import Values + +# TODO update the JSON string below +json = "{}" +# create an instance of Values from a JSON string +values_instance = Values.from_json(json) +# print the JSON string representation of the object +print(Values.to_json()) + +# convert the object into a dict +values_dict = values_instance.to_dict() +# create an instance of Values from a dict +values_from_dict = Values.from_dict(values_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Variables.md b/edu_sharing_openapi/docs/Variables.md new file mode 100644 index 00000000..43e8b4e4 --- /dev/null +++ b/edu_sharing_openapi/docs/Variables.md @@ -0,0 +1,30 @@ +# Variables + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**var_global** | **Dict[str, str]** | | [optional] +**current** | **Dict[str, str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.variables import Variables + +# TODO update the JSON string below +json = "{}" +# create an instance of Variables from a JSON string +variables_instance = Variables.from_json(json) +# print the JSON string representation of the object +print(Variables.to_json()) + +# convert the object into a dict +variables_dict = variables_instance.to_dict() +# create an instance of Variables from a dict +variables_from_dict = Variables.from_dict(variables_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/Version.md b/edu_sharing_openapi/docs/Version.md new file mode 100644 index 00000000..0a79c652 --- /dev/null +++ b/edu_sharing_openapi/docs/Version.md @@ -0,0 +1,34 @@ +# Version + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**full** | **str** | | [optional] +**major** | **str** | | [optional] +**minor** | **str** | | [optional] +**patch** | **str** | | [optional] +**qualifier** | **str** | | [optional] +**build** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.version import Version + +# TODO update the JSON string below +json = "{}" +# create an instance of Version from a JSON string +version_instance = Version.from_json(json) +# print the JSON string representation of the object +print(Version.to_json()) + +# convert the object into a dict +version_dict = version_instance.to_dict() +# create an instance of Version from a dict +version_from_dict = Version.from_dict(version_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/VersionBuild.md b/edu_sharing_openapi/docs/VersionBuild.md new file mode 100644 index 00000000..222f7850 --- /dev/null +++ b/edu_sharing_openapi/docs/VersionBuild.md @@ -0,0 +1,29 @@ +# VersionBuild + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**timestamp** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.version_build import VersionBuild + +# TODO update the JSON string below +json = "{}" +# create an instance of VersionBuild from a JSON string +version_build_instance = VersionBuild.from_json(json) +# print the JSON string representation of the object +print(VersionBuild.to_json()) + +# convert the object into a dict +version_build_dict = version_build_instance.to_dict() +# create an instance of VersionBuild from a dict +version_build_from_dict = VersionBuild.from_dict(version_build_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/VersionGit.md b/edu_sharing_openapi/docs/VersionGit.md new file mode 100644 index 00000000..32225c5a --- /dev/null +++ b/edu_sharing_openapi/docs/VersionGit.md @@ -0,0 +1,30 @@ +# VersionGit + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**branch** | **str** | | [optional] +**commit** | [**VersionGitCommit**](VersionGitCommit.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.version_git import VersionGit + +# TODO update the JSON string below +json = "{}" +# create an instance of VersionGit from a JSON string +version_git_instance = VersionGit.from_json(json) +# print the JSON string representation of the object +print(VersionGit.to_json()) + +# convert the object into a dict +version_git_dict = version_git_instance.to_dict() +# create an instance of VersionGit from a dict +version_git_from_dict = VersionGit.from_dict(version_git_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/VersionGitCommit.md b/edu_sharing_openapi/docs/VersionGitCommit.md new file mode 100644 index 00000000..3d700e8a --- /dev/null +++ b/edu_sharing_openapi/docs/VersionGitCommit.md @@ -0,0 +1,30 @@ +# VersionGitCommit + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**timestamp** | [**VersionTimestamp**](VersionTimestamp.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.version_git_commit import VersionGitCommit + +# TODO update the JSON string below +json = "{}" +# create an instance of VersionGitCommit from a JSON string +version_git_commit_instance = VersionGitCommit.from_json(json) +# print the JSON string representation of the object +print(VersionGitCommit.to_json()) + +# convert the object into a dict +version_git_commit_dict = version_git_commit_instance.to_dict() +# create an instance of VersionGitCommit from a dict +version_git_commit_from_dict = VersionGitCommit.from_dict(version_git_commit_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/VersionMaven.md b/edu_sharing_openapi/docs/VersionMaven.md new file mode 100644 index 00000000..f0d6a06d --- /dev/null +++ b/edu_sharing_openapi/docs/VersionMaven.md @@ -0,0 +1,30 @@ +# VersionMaven + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**bom** | **Dict[str, str]** | | [optional] +**project** | [**VersionProject**](VersionProject.md) | | [optional] + +## Example + +```python +from edu_sharing_client.models.version_maven import VersionMaven + +# TODO update the JSON string below +json = "{}" +# create an instance of VersionMaven from a JSON string +version_maven_instance = VersionMaven.from_json(json) +# print the JSON string representation of the object +print(VersionMaven.to_json()) + +# convert the object into a dict +version_maven_dict = version_maven_instance.to_dict() +# create an instance of VersionMaven from a dict +version_maven_from_dict = VersionMaven.from_dict(version_maven_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/VersionProject.md b/edu_sharing_openapi/docs/VersionProject.md new file mode 100644 index 00000000..3dd52e58 --- /dev/null +++ b/edu_sharing_openapi/docs/VersionProject.md @@ -0,0 +1,31 @@ +# VersionProject + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**artifact_id** | **str** | | [optional] +**group_id** | **str** | | [optional] +**version** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.version_project import VersionProject + +# TODO update the JSON string below +json = "{}" +# create an instance of VersionProject from a JSON string +version_project_instance = VersionProject.from_json(json) +# print the JSON string representation of the object +print(VersionProject.to_json()) + +# convert the object into a dict +version_project_dict = version_project_instance.to_dict() +# create an instance of VersionProject from a dict +version_project_from_dict = VersionProject.from_dict(version_project_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/VersionTimestamp.md b/edu_sharing_openapi/docs/VersionTimestamp.md new file mode 100644 index 00000000..371e05c4 --- /dev/null +++ b/edu_sharing_openapi/docs/VersionTimestamp.md @@ -0,0 +1,29 @@ +# VersionTimestamp + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**datetime** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.version_timestamp import VersionTimestamp + +# TODO update the JSON string below +json = "{}" +# create an instance of VersionTimestamp from a JSON string +version_timestamp_instance = VersionTimestamp.from_json(json) +# print the JSON string representation of the object +print(VersionTimestamp.to_json()) + +# convert the object into a dict +version_timestamp_dict = version_timestamp_instance.to_dict() +# create an instance of VersionTimestamp from a dict +version_timestamp_from_dict = VersionTimestamp.from_dict(version_timestamp_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/WebsiteInformation.md b/edu_sharing_openapi/docs/WebsiteInformation.md new file mode 100644 index 00000000..272c0368 --- /dev/null +++ b/edu_sharing_openapi/docs/WebsiteInformation.md @@ -0,0 +1,34 @@ +# WebsiteInformation + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**duplicate_nodes** | [**List[Node]**](Node.md) | | [optional] +**title** | **str** | | [optional] +**page** | **str** | | [optional] +**description** | **str** | | [optional] +**license** | **str** | | [optional] +**keywords** | **List[str]** | | [optional] + +## Example + +```python +from edu_sharing_client.models.website_information import WebsiteInformation + +# TODO update the JSON string below +json = "{}" +# create an instance of WebsiteInformation from a JSON string +website_information_instance = WebsiteInformation.from_json(json) +# print the JSON string representation of the object +print(WebsiteInformation.to_json()) + +# convert the object into a dict +website_information_dict = website_information_instance.to_dict() +# create an instance of WebsiteInformation from a dict +website_information_from_dict = WebsiteInformation.from_dict(website_information_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/WidgetDataDTO.md b/edu_sharing_openapi/docs/WidgetDataDTO.md new file mode 100644 index 00000000..e30f0f13 --- /dev/null +++ b/edu_sharing_openapi/docs/WidgetDataDTO.md @@ -0,0 +1,30 @@ +# WidgetDataDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**id** | **str** | | [optional] +**caption** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.widget_data_dto import WidgetDataDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of WidgetDataDTO from a JSON string +widget_data_dto_instance = WidgetDataDTO.from_json(json) +# print the JSON string representation of the object +print(WidgetDataDTO.to_json()) + +# convert the object into a dict +widget_data_dto_dict = widget_data_dto_instance.to_dict() +# create an instance of WidgetDataDTO from a dict +widget_data_dto_from_dict = WidgetDataDTO.from_dict(widget_data_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/WorkflowEventDTO.md b/edu_sharing_openapi/docs/WorkflowEventDTO.md new file mode 100644 index 00000000..05c1018d --- /dev/null +++ b/edu_sharing_openapi/docs/WorkflowEventDTO.md @@ -0,0 +1,31 @@ +# WorkflowEventDTO + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**node** | [**NodeDataDTO**](NodeDataDTO.md) | | [optional] +**workflow_status** | **str** | | [optional] +**user_comment** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.workflow_event_dto import WorkflowEventDTO + +# TODO update the JSON string below +json = "{}" +# create an instance of WorkflowEventDTO from a JSON string +workflow_event_dto_instance = WorkflowEventDTO.from_json(json) +# print the JSON string representation of the object +print(WorkflowEventDTO.to_json()) + +# convert the object into a dict +workflow_event_dto_dict = workflow_event_dto_instance.to_dict() +# create an instance of WorkflowEventDTO from a dict +workflow_event_dto_from_dict = WorkflowEventDTO.from_dict(workflow_event_dto_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/docs/WorkflowHistory.md b/edu_sharing_openapi/docs/WorkflowHistory.md new file mode 100644 index 00000000..591539aa --- /dev/null +++ b/edu_sharing_openapi/docs/WorkflowHistory.md @@ -0,0 +1,33 @@ +# WorkflowHistory + + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**time** | **int** | | [optional] +**editor** | [**UserSimple**](UserSimple.md) | | [optional] +**receiver** | [**List[Authority]**](Authority.md) | | [optional] +**status** | **str** | | [optional] +**comment** | **str** | | [optional] + +## Example + +```python +from edu_sharing_client.models.workflow_history import WorkflowHistory + +# TODO update the JSON string below +json = "{}" +# create an instance of WorkflowHistory from a JSON string +workflow_history_instance = WorkflowHistory.from_json(json) +# print the JSON string representation of the object +print(WorkflowHistory.to_json()) + +# convert the object into a dict +workflow_history_dict = workflow_history_instance.to_dict() +# create an instance of WorkflowHistory from a dict +workflow_history_from_dict = WorkflowHistory.from_dict(workflow_history_dict) +``` +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/edu_sharing_openapi/edu_sharing_client/__init__.py b/edu_sharing_openapi/edu_sharing_client/__init__.py new file mode 100644 index 00000000..7a58c140 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/__init__.py @@ -0,0 +1,340 @@ +# coding: utf-8 + +# flake8: noqa + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +__version__ = "1.0.0" + +# import apis into sdk package +from edu_sharing_client.api.about_api import ABOUTApi +from edu_sharing_client.api.adminv1_api import ADMINV1Api +from edu_sharing_client.api.archivev1_api import ARCHIVEV1Api +from edu_sharing_client.api.authenticationv1_api import AUTHENTICATIONV1Api +from edu_sharing_client.api.bulkv1_api import BULKV1Api +from edu_sharing_client.api.clientutilsv1_api import CLIENTUTILSV1Api +from edu_sharing_client.api.collectionv1_api import COLLECTIONV1Api +from edu_sharing_client.api.commentv1_api import COMMENTV1Api +from edu_sharing_client.api.configv1_api import CONFIGV1Api +from edu_sharing_client.api.connectorv1_api import CONNECTORV1Api +from edu_sharing_client.api.feedbackv1_api import FEEDBACKV1Api +from edu_sharing_client.api.iamv1_api import IAMV1Api +from edu_sharing_client.api.knowledgev1_api import KNOWLEDGEV1Api +from edu_sharing_client.api.lti_platform_v13_api import LTIPlatformV13Api +from edu_sharing_client.api.ltiv13_api import LTIV13Api +from edu_sharing_client.api.mdsv1_api import MDSV1Api +from edu_sharing_client.api.mediacenterv1_api import MEDIACENTERV1Api +from edu_sharing_client.api.networkv1_api import NETWORKV1Api +from edu_sharing_client.api.nodev1_api import NODEV1Api +from edu_sharing_client.api.notificationv1_api import NOTIFICATIONV1Api +from edu_sharing_client.api.organizationv1_api import ORGANIZATIONV1Api +from edu_sharing_client.api.ratingv1_api import RATINGV1Api +from edu_sharing_client.api.registerv1_api import REGISTERV1Api +from edu_sharing_client.api.relationv1_api import RELATIONV1Api +from edu_sharing_client.api.renderingv1_api import RENDERINGV1Api +from edu_sharing_client.api.searchv1_api import SEARCHV1Api +from edu_sharing_client.api.sharingv1_api import SHARINGV1Api +from edu_sharing_client.api.statisticv1_api import STATISTICV1Api +from edu_sharing_client.api.streamv1_api import STREAMV1Api +from edu_sharing_client.api.toolv1_api import TOOLV1Api +from edu_sharing_client.api.trackingv1_api import TRACKINGV1Api +from edu_sharing_client.api.usagev1_api import USAGEV1Api + +# import ApiClient +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.api_client import ApiClient +from edu_sharing_client.configuration import Configuration +from edu_sharing_client.exceptions import OpenApiException +from edu_sharing_client.exceptions import ApiTypeError +from edu_sharing_client.exceptions import ApiValueError +from edu_sharing_client.exceptions import ApiKeyError +from edu_sharing_client.exceptions import ApiAttributeError +from edu_sharing_client.exceptions import ApiException + +# import models into sdk package +from edu_sharing_client.models.ace import ACE +from edu_sharing_client.models.acl import ACL +from edu_sharing_client.models.about import About +from edu_sharing_client.models.about_service import AboutService +from edu_sharing_client.models.abstract_entries import AbstractEntries +from edu_sharing_client.models.add_to_collection_event_dto import AddToCollectionEventDTO +from edu_sharing_client.models.admin import Admin +from edu_sharing_client.models.admin_statistics import AdminStatistics +from edu_sharing_client.models.application import Application +from edu_sharing_client.models.audience import Audience +from edu_sharing_client.models.authentication_token import AuthenticationToken +from edu_sharing_client.models.authority import Authority +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.models.available_mds import AvailableMds +from edu_sharing_client.models.banner import Banner +from edu_sharing_client.models.cache_cluster import CacheCluster +from edu_sharing_client.models.cache_info import CacheInfo +from edu_sharing_client.models.cache_member import CacheMember +from edu_sharing_client.models.catalog import Catalog +from edu_sharing_client.models.collection import Collection +from edu_sharing_client.models.collection_counts import CollectionCounts +from edu_sharing_client.models.collection_dto import CollectionDTO +from edu_sharing_client.models.collection_entries import CollectionEntries +from edu_sharing_client.models.collection_entry import CollectionEntry +from edu_sharing_client.models.collection_options import CollectionOptions +from edu_sharing_client.models.collection_proposal_entries import CollectionProposalEntries +from edu_sharing_client.models.collection_reference import CollectionReference +from edu_sharing_client.models.collections import Collections +from edu_sharing_client.models.collections_result import CollectionsResult +from edu_sharing_client.models.comment import Comment +from edu_sharing_client.models.comment_event_dto import CommentEventDTO +from edu_sharing_client.models.comments import Comments +from edu_sharing_client.models.condition import Condition +from edu_sharing_client.models.config import Config +from edu_sharing_client.models.config_frontpage import ConfigFrontpage +from edu_sharing_client.models.config_privacy import ConfigPrivacy +from edu_sharing_client.models.config_publish import ConfigPublish +from edu_sharing_client.models.config_rating import ConfigRating +from edu_sharing_client.models.config_remote import ConfigRemote +from edu_sharing_client.models.config_theme_color import ConfigThemeColor +from edu_sharing_client.models.config_theme_colors import ConfigThemeColors +from edu_sharing_client.models.config_tutorial import ConfigTutorial +from edu_sharing_client.models.config_upload import ConfigUpload +from edu_sharing_client.models.config_workflow import ConfigWorkflow +from edu_sharing_client.models.config_workflow_list import ConfigWorkflowList +from edu_sharing_client.models.connector import Connector +from edu_sharing_client.models.connector_file_type import ConnectorFileType +from edu_sharing_client.models.connector_list import ConnectorList +from edu_sharing_client.models.content import Content +from edu_sharing_client.models.context_menu_entry import ContextMenuEntry +from edu_sharing_client.models.contributor import Contributor +from edu_sharing_client.models.counts import Counts +from edu_sharing_client.models.create import Create +from edu_sharing_client.models.create_usage import CreateUsage +from edu_sharing_client.models.delete_option import DeleteOption +from edu_sharing_client.models.dynamic_config import DynamicConfig +from edu_sharing_client.models.dynamic_registration_token import DynamicRegistrationToken +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens +from edu_sharing_client.models.element import Element +from edu_sharing_client.models.error_response import ErrorResponse +from edu_sharing_client.models.excel_result import ExcelResult +from edu_sharing_client.models.facet import Facet +from edu_sharing_client.models.feature_info import FeatureInfo +from edu_sharing_client.models.feedback_data import FeedbackData +from edu_sharing_client.models.feedback_result import FeedbackResult +from edu_sharing_client.models.filter import Filter +from edu_sharing_client.models.filter_entry import FilterEntry +from edu_sharing_client.models.font_icon import FontIcon +from edu_sharing_client.models.frontpage import Frontpage +from edu_sharing_client.models.general import General +from edu_sharing_client.models.geo import Geo +from edu_sharing_client.models.group import Group +from edu_sharing_client.models.group_entries import GroupEntries +from edu_sharing_client.models.group_entry import GroupEntry +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.group_signup_details import GroupSignupDetails +from edu_sharing_client.models.guest import Guest +from edu_sharing_client.models.handle_param import HandleParam +from edu_sharing_client.models.help_menu_options import HelpMenuOptions +from edu_sharing_client.models.home_folder_options import HomeFolderOptions +from edu_sharing_client.models.icon import Icon +from edu_sharing_client.models.image import Image +from edu_sharing_client.models.interface import Interface +from edu_sharing_client.models.invite_event_dto import InviteEventDTO +from edu_sharing_client.models.json_object import JSONObject +from edu_sharing_client.models.job import Job +from edu_sharing_client.models.job_builder import JobBuilder +from edu_sharing_client.models.job_data_map import JobDataMap +from edu_sharing_client.models.job_description import JobDescription +from edu_sharing_client.models.job_detail import JobDetail +from edu_sharing_client.models.job_detail_job_data_map import JobDetailJobDataMap +from edu_sharing_client.models.job_entry import JobEntry +from edu_sharing_client.models.job_field_description import JobFieldDescription +from edu_sharing_client.models.job_info import JobInfo +from edu_sharing_client.models.job_key import JobKey +from edu_sharing_client.models.key_value_pair import KeyValuePair +from edu_sharing_client.models.lti_platform_configuration import LTIPlatformConfiguration +from edu_sharing_client.models.lti_session import LTISession +from edu_sharing_client.models.lti_tool_configuration import LTIToolConfiguration +from edu_sharing_client.models.language import Language +from edu_sharing_client.models.level import Level +from edu_sharing_client.models.license import License +from edu_sharing_client.models.license_agreement import LicenseAgreement +from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode +from edu_sharing_client.models.licenses import Licenses +from edu_sharing_client.models.location import Location +from edu_sharing_client.models.log_entry import LogEntry +from edu_sharing_client.models.logger_config_result import LoggerConfigResult +from edu_sharing_client.models.login import Login +from edu_sharing_client.models.login_credentials import LoginCredentials +from edu_sharing_client.models.logout_info import LogoutInfo +from edu_sharing_client.models.mainnav import Mainnav +from edu_sharing_client.models.manual_registration_data import ManualRegistrationData +from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult +from edu_sharing_client.models.mds import Mds +from edu_sharing_client.models.mds_column import MdsColumn +from edu_sharing_client.models.mds_entries import MdsEntries +from edu_sharing_client.models.mds_group import MdsGroup +from edu_sharing_client.models.mds_list import MdsList +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from edu_sharing_client.models.mds_sort import MdsSort +from edu_sharing_client.models.mds_sort_column import MdsSortColumn +from edu_sharing_client.models.mds_sort_default import MdsSortDefault +from edu_sharing_client.models.mds_subwidget import MdsSubwidget +from edu_sharing_client.models.mds_value import MdsValue +from edu_sharing_client.models.mds_view import MdsView +from edu_sharing_client.models.mds_widget import MdsWidget +from edu_sharing_client.models.mds_widget_condition import MdsWidgetCondition +from edu_sharing_client.models.mediacenter import Mediacenter +from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension +from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult +from edu_sharing_client.models.menu_entry import MenuEntry +from edu_sharing_client.models.message import Message +from edu_sharing_client.models.metadata_set_info import MetadataSetInfo +from edu_sharing_client.models.metadata_suggestion_event_dto import MetadataSuggestionEventDTO +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.node_collection_proposal_count import NodeCollectionProposalCount +from edu_sharing_client.models.node_data import NodeData +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.models.node_issue_event_dto import NodeIssueEventDTO +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.models.node_locked import NodeLocked +from edu_sharing_client.models.node_permission_entry import NodePermissionEntry +from edu_sharing_client.models.node_permissions import NodePermissions +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.node_relation import NodeRelation +from edu_sharing_client.models.node_remote import NodeRemote +from edu_sharing_client.models.node_share import NodeShare +from edu_sharing_client.models.node_stats import NodeStats +from edu_sharing_client.models.node_text import NodeText +from edu_sharing_client.models.node_version import NodeVersion +from edu_sharing_client.models.node_version_entries import NodeVersionEntries +from edu_sharing_client.models.node_version_entry import NodeVersionEntry +from edu_sharing_client.models.node_version_ref import NodeVersionRef +from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries +from edu_sharing_client.models.notification_config import NotificationConfig +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.notification_intervals import NotificationIntervals +from edu_sharing_client.models.notification_response_page import NotificationResponsePage +from edu_sharing_client.models.notify_entry import NotifyEntry +from edu_sharing_client.models.open_id_configuration import OpenIdConfiguration +from edu_sharing_client.models.open_id_registration_result import OpenIdRegistrationResult +from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.models.organization_entries import OrganizationEntries +from edu_sharing_client.models.pageable import Pageable +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.parameters import Parameters +from edu_sharing_client.models.parent_entries import ParentEntries +from edu_sharing_client.models.person import Person +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions +from edu_sharing_client.models.person_delete_result import PersonDeleteResult +from edu_sharing_client.models.person_report import PersonReport +from edu_sharing_client.models.plugin_info import PluginInfo +from edu_sharing_client.models.plugin_status import PluginStatus +from edu_sharing_client.models.preferences import Preferences +from edu_sharing_client.models.preview import Preview +from edu_sharing_client.models.profile import Profile +from edu_sharing_client.models.profile_settings import ProfileSettings +from edu_sharing_client.models.propose_for_collection_event_dto import ProposeForCollectionEventDTO +from edu_sharing_client.models.provider import Provider +from edu_sharing_client.models.query import Query +from edu_sharing_client.models.rating_data import RatingData +from edu_sharing_client.models.rating_details import RatingDetails +from edu_sharing_client.models.rating_event_dto import RatingEventDTO +from edu_sharing_client.models.rating_history import RatingHistory +from edu_sharing_client.models.reference_entries import ReferenceEntries +from edu_sharing_client.models.register import Register +from edu_sharing_client.models.register_exists import RegisterExists +from edu_sharing_client.models.register_information import RegisterInformation +from edu_sharing_client.models.registration_url import RegistrationUrl +from edu_sharing_client.models.relation_data import RelationData +from edu_sharing_client.models.remote import Remote +from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription +from edu_sharing_client.models.rendering import Rendering +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry +from edu_sharing_client.models.rendering_gdpr import RenderingGdpr +from edu_sharing_client.models.repo import Repo +from edu_sharing_client.models.repo_entries import RepoEntries +from edu_sharing_client.models.repository_config import RepositoryConfig +from edu_sharing_client.models.repository_version_info import RepositoryVersionInfo +from edu_sharing_client.models.restore_result import RestoreResult +from edu_sharing_client.models.restore_results import RestoreResults +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.models.search_parameters_facets import SearchParametersFacets +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.models.search_result_elastic import SearchResultElastic +from edu_sharing_client.models.search_result_lrmi import SearchResultLrmi +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.models.search_v_card import SearchVCard +from edu_sharing_client.models.server_update_info import ServerUpdateInfo +from edu_sharing_client.models.service import Service +from edu_sharing_client.models.service_instance import ServiceInstance +from edu_sharing_client.models.service_version import ServiceVersion +from edu_sharing_client.models.services import Services +from edu_sharing_client.models.shared_folder_options import SharedFolderOptions +from edu_sharing_client.models.sharing_info import SharingInfo +from edu_sharing_client.models.simple_edit import SimpleEdit +from edu_sharing_client.models.simple_edit_global_groups import SimpleEditGlobalGroups +from edu_sharing_client.models.simple_edit_organization import SimpleEditOrganization +from edu_sharing_client.models.sort import Sort +from edu_sharing_client.models.statistic_entity import StatisticEntity +from edu_sharing_client.models.statistic_entry import StatisticEntry +from edu_sharing_client.models.statistics import Statistics +from edu_sharing_client.models.statistics_global import StatisticsGlobal +from edu_sharing_client.models.statistics_group import StatisticsGroup +from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup +from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup +from edu_sharing_client.models.statistics_user import StatisticsUser +from edu_sharing_client.models.stored_service import StoredService +from edu_sharing_client.models.stream import Stream +from edu_sharing_client.models.stream_entry import StreamEntry +from edu_sharing_client.models.stream_entry_input import StreamEntryInput +from edu_sharing_client.models.stream_list import StreamList +from edu_sharing_client.models.sub_group_item import SubGroupItem +from edu_sharing_client.models.suggest import Suggest +from edu_sharing_client.models.suggestion import Suggestion +from edu_sharing_client.models.suggestion_param import SuggestionParam +from edu_sharing_client.models.suggestions import Suggestions +from edu_sharing_client.models.tool import Tool +from edu_sharing_client.models.tools import Tools +from edu_sharing_client.models.tracking import Tracking +from edu_sharing_client.models.tracking_authority import TrackingAuthority +from edu_sharing_client.models.tracking_node import TrackingNode +from edu_sharing_client.models.upload_result import UploadResult +from edu_sharing_client.models.usage import Usage +from edu_sharing_client.models.usages import Usages +from edu_sharing_client.models.user import User +from edu_sharing_client.models.user_credential import UserCredential +from edu_sharing_client.models.user_data_dto import UserDataDTO +from edu_sharing_client.models.user_entries import UserEntries +from edu_sharing_client.models.user_entry import UserEntry +from edu_sharing_client.models.user_profile import UserProfile +from edu_sharing_client.models.user_profile_app_auth import UserProfileAppAuth +from edu_sharing_client.models.user_profile_edit import UserProfileEdit +from edu_sharing_client.models.user_quota import UserQuota +from edu_sharing_client.models.user_simple import UserSimple +from edu_sharing_client.models.user_stats import UserStats +from edu_sharing_client.models.user_status import UserStatus +from edu_sharing_client.models.value import Value +from edu_sharing_client.models.value_parameters import ValueParameters +from edu_sharing_client.models.values import Values +from edu_sharing_client.models.variables import Variables +from edu_sharing_client.models.version import Version +from edu_sharing_client.models.version_build import VersionBuild +from edu_sharing_client.models.version_git import VersionGit +from edu_sharing_client.models.version_git_commit import VersionGitCommit +from edu_sharing_client.models.version_maven import VersionMaven +from edu_sharing_client.models.version_project import VersionProject +from edu_sharing_client.models.version_timestamp import VersionTimestamp +from edu_sharing_client.models.website_information import WebsiteInformation +from edu_sharing_client.models.widget_data_dto import WidgetDataDTO +from edu_sharing_client.models.workflow_event_dto import WorkflowEventDTO +from edu_sharing_client.models.workflow_history import WorkflowHistory diff --git a/edu_sharing_openapi/edu_sharing_client/api/__init__.py b/edu_sharing_openapi/edu_sharing_client/api/__init__.py new file mode 100644 index 00000000..7f5b0978 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/__init__.py @@ -0,0 +1,36 @@ +# flake8: noqa + +# import apis into api package +from edu_sharing_client.api.about_api import ABOUTApi +from edu_sharing_client.api.adminv1_api import ADMINV1Api +from edu_sharing_client.api.archivev1_api import ARCHIVEV1Api +from edu_sharing_client.api.authenticationv1_api import AUTHENTICATIONV1Api +from edu_sharing_client.api.bulkv1_api import BULKV1Api +from edu_sharing_client.api.clientutilsv1_api import CLIENTUTILSV1Api +from edu_sharing_client.api.collectionv1_api import COLLECTIONV1Api +from edu_sharing_client.api.commentv1_api import COMMENTV1Api +from edu_sharing_client.api.configv1_api import CONFIGV1Api +from edu_sharing_client.api.connectorv1_api import CONNECTORV1Api +from edu_sharing_client.api.feedbackv1_api import FEEDBACKV1Api +from edu_sharing_client.api.iamv1_api import IAMV1Api +from edu_sharing_client.api.knowledgev1_api import KNOWLEDGEV1Api +from edu_sharing_client.api.lti_platform_v13_api import LTIPlatformV13Api +from edu_sharing_client.api.ltiv13_api import LTIV13Api +from edu_sharing_client.api.mdsv1_api import MDSV1Api +from edu_sharing_client.api.mediacenterv1_api import MEDIACENTERV1Api +from edu_sharing_client.api.networkv1_api import NETWORKV1Api +from edu_sharing_client.api.nodev1_api import NODEV1Api +from edu_sharing_client.api.notificationv1_api import NOTIFICATIONV1Api +from edu_sharing_client.api.organizationv1_api import ORGANIZATIONV1Api +from edu_sharing_client.api.ratingv1_api import RATINGV1Api +from edu_sharing_client.api.registerv1_api import REGISTERV1Api +from edu_sharing_client.api.relationv1_api import RELATIONV1Api +from edu_sharing_client.api.renderingv1_api import RENDERINGV1Api +from edu_sharing_client.api.searchv1_api import SEARCHV1Api +from edu_sharing_client.api.sharingv1_api import SHARINGV1Api +from edu_sharing_client.api.statisticv1_api import STATISTICV1Api +from edu_sharing_client.api.streamv1_api import STREAMV1Api +from edu_sharing_client.api.toolv1_api import TOOLV1Api +from edu_sharing_client.api.trackingv1_api import TRACKINGV1Api +from edu_sharing_client.api.usagev1_api import USAGEV1Api + diff --git a/edu_sharing_openapi/edu_sharing_client/api/about_api.py b/edu_sharing_openapi/edu_sharing_client/api/about_api.py new file mode 100644 index 00000000..1b5a3eac --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/about_api.py @@ -0,0 +1,824 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import StrictInt, StrictStr, field_validator +from typing import Optional +from edu_sharing_client.models.about import About +from edu_sharing_client.models.licenses import Licenses + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class ABOUTApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def about( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> About: + """Discover the API. + + Get all services provided by this API. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._about_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "About", + '401': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def about_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[About]: + """Discover the API. + + Get all services provided by this API. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._about_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "About", + '401': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def about_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Discover the API. + + Get all services provided by this API. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._about_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "About", + '401': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _about_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/_about', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def licenses( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Licenses: + """License information. + + Get information about used 3rd-party licenses. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._licenses_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Licenses", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def licenses_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Licenses]: + """License information. + + Get information about used 3rd-party licenses. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._licenses_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Licenses", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def licenses_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """License information. + + Get information about used 3rd-party licenses. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._licenses_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Licenses", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _licenses_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/_about/licenses', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def status( + self, + mode: StrictStr, + timeout_seconds: Optional[StrictInt] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """status of repo services + + returns http status 200 when ok + + :param mode: (required) + :type mode: str + :param timeout_seconds: + :type timeout_seconds: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._status_serialize( + mode=mode, + timeout_seconds=timeout_seconds, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def status_with_http_info( + self, + mode: StrictStr, + timeout_seconds: Optional[StrictInt] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """status of repo services + + returns http status 200 when ok + + :param mode: (required) + :type mode: str + :param timeout_seconds: + :type timeout_seconds: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._status_serialize( + mode=mode, + timeout_seconds=timeout_seconds, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def status_without_preload_content( + self, + mode: StrictStr, + timeout_seconds: Optional[StrictInt] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """status of repo services + + returns http status 200 when ok + + :param mode: (required) + :type mode: str + :param timeout_seconds: + :type timeout_seconds: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._status_serialize( + mode=mode, + timeout_seconds=timeout_seconds, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _status_serialize( + self, + mode, + timeout_seconds, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if mode is not None: + _path_params['mode'] = mode + # process the query parameters + if timeout_seconds is not None: + + _query_params.append(('timeoutSeconds', timeout_seconds)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/_about/status/{mode}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/adminv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/adminv1_api.py new file mode 100644 index 00000000..933f5bde --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/adminv1_api.py @@ -0,0 +1,15648 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import Any, Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.admin_statistics import AdminStatistics +from edu_sharing_client.models.cache_cluster import CacheCluster +from edu_sharing_client.models.cache_info import CacheInfo +from edu_sharing_client.models.collections_result import CollectionsResult +from edu_sharing_client.models.excel_result import ExcelResult +from edu_sharing_client.models.logger_config_result import LoggerConfigResult +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions +from edu_sharing_client.models.person_report import PersonReport +from edu_sharing_client.models.repository_config import RepositoryConfig +from edu_sharing_client.models.repository_version_info import RepositoryVersionInfo +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.models.search_result_elastic import SearchResultElastic +from edu_sharing_client.models.upload_result import UploadResult + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class ADMINV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_application( + self, + xml: Annotated[Dict[str, Any], Field(description="XML file for app to register")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """register/add an application via xml file + + register the xml file provided. + + :param xml: XML file for app to register (required) + :type xml: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_application_serialize( + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_application_with_http_info( + self, + xml: Annotated[Dict[str, Any], Field(description="XML file for app to register")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """register/add an application via xml file + + register the xml file provided. + + :param xml: XML file for app to register (required) + :type xml: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_application_serialize( + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_application_without_preload_content( + self, + xml: Annotated[Dict[str, Any], Field(description="XML file for app to register")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """register/add an application via xml file + + register the xml file provided. + + :param xml: XML file for app to register (required) + :type xml: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_application_serialize( + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_application_serialize( + self, + xml, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if xml is not None: + _form_params.append(('xml', xml)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/applications/xml', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def add_application1( + self, + url: Annotated[StrictStr, Field(description="Remote application metadata url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """register/add an application + + register the specified application. + + :param url: Remote application metadata url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_application1_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_application1_with_http_info( + self, + url: Annotated[StrictStr, Field(description="Remote application metadata url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """register/add an application + + register the specified application. + + :param url: Remote application metadata url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_application1_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_application1_without_preload_content( + self, + url: Annotated[StrictStr, Field(description="Remote application metadata url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """register/add an application + + register the specified application. + + :param url: Remote application metadata url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_application1_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_application1_serialize( + self, + url, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if url is not None: + + _query_params.append(('url', url)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/applications', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def add_toolpermission( + self, + name: Annotated[StrictStr, Field(description="Name/ID of toolpermission")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Node: + """add a new toolpermissions + + + :param name: Name/ID of toolpermission (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_toolpermission_serialize( + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_toolpermission_with_http_info( + self, + name: Annotated[StrictStr, Field(description="Name/ID of toolpermission")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Node]: + """add a new toolpermissions + + + :param name: Name/ID of toolpermission (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_toolpermission_serialize( + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_toolpermission_without_preload_content( + self, + name: Annotated[StrictStr, Field(description="Name/ID of toolpermission")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """add a new toolpermissions + + + :param name: Name/ID of toolpermission (required) + :type name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_toolpermission_serialize( + name=name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_toolpermission_serialize( + self, + name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/toolpermissions/add/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def apply_template( + self, + template: Annotated[StrictStr, Field(description="Template Filename")], + group: Annotated[StrictStr, Field(description="Group name (authority name)")], + folder: Annotated[Optional[StrictStr], Field(description="Folder name")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """apply a folder template + + apply a folder template. + + :param template: Template Filename (required) + :type template: str + :param group: Group name (authority name) (required) + :type group: str + :param folder: Folder name + :type folder: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._apply_template_serialize( + template=template, + group=group, + folder=folder, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def apply_template_with_http_info( + self, + template: Annotated[StrictStr, Field(description="Template Filename")], + group: Annotated[StrictStr, Field(description="Group name (authority name)")], + folder: Annotated[Optional[StrictStr], Field(description="Folder name")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """apply a folder template + + apply a folder template. + + :param template: Template Filename (required) + :type template: str + :param group: Group name (authority name) (required) + :type group: str + :param folder: Folder name + :type folder: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._apply_template_serialize( + template=template, + group=group, + folder=folder, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def apply_template_without_preload_content( + self, + template: Annotated[StrictStr, Field(description="Template Filename")], + group: Annotated[StrictStr, Field(description="Group name (authority name)")], + folder: Annotated[Optional[StrictStr], Field(description="Folder name")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """apply a folder template + + apply a folder template. + + :param template: Template Filename (required) + :type template: str + :param group: Group name (authority name) (required) + :type group: str + :param folder: Folder name + :type folder: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._apply_template_serialize( + template=template, + group=group, + folder=folder, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _apply_template_serialize( + self, + template, + group, + folder, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if template is not None: + + _query_params.append(('template', template)) + + if group is not None: + + _query_params.append(('group', group)) + + if folder is not None: + + _query_params.append(('folder', folder)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/applyTemplate', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def cancel_job( + self, + job: StrictStr, + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """cancel a running job + + + :param job: (required) + :type job: str + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._cancel_job_serialize( + job=job, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def cancel_job_with_http_info( + self, + job: StrictStr, + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """cancel a running job + + + :param job: (required) + :type job: str + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._cancel_job_serialize( + job=job, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def cancel_job_without_preload_content( + self, + job: StrictStr, + force: Optional[StrictBool] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """cancel a running job + + + :param job: (required) + :type job: str + :param force: + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._cancel_job_serialize( + job=job, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _cancel_job_serialize( + self, + job, + force, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if job is not None: + _path_params['job'] = job + # process the query parameters + if force is not None: + + _query_params.append(('force', force)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/admin/v1/jobs/{job}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_logging( + self, + name: Annotated[StrictStr, Field(description="name")], + loglevel: Annotated[StrictStr, Field(description="loglevel")], + appender: Annotated[Optional[StrictStr], Field(description="appender")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Change the loglevel for classes at runtime. + + Root appenders are used. Check the appender treshold. + + :param name: name (required) + :type name: str + :param loglevel: loglevel (required) + :type loglevel: str + :param appender: appender + :type appender: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_logging_serialize( + name=name, + loglevel=loglevel, + appender=appender, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_logging_with_http_info( + self, + name: Annotated[StrictStr, Field(description="name")], + loglevel: Annotated[StrictStr, Field(description="loglevel")], + appender: Annotated[Optional[StrictStr], Field(description="appender")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Change the loglevel for classes at runtime. + + Root appenders are used. Check the appender treshold. + + :param name: name (required) + :type name: str + :param loglevel: loglevel (required) + :type loglevel: str + :param appender: appender + :type appender: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_logging_serialize( + name=name, + loglevel=loglevel, + appender=appender, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_logging_without_preload_content( + self, + name: Annotated[StrictStr, Field(description="name")], + loglevel: Annotated[StrictStr, Field(description="loglevel")], + appender: Annotated[Optional[StrictStr], Field(description="appender")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change the loglevel for classes at runtime. + + Root appenders are used. Check the appender treshold. + + :param name: name (required) + :type name: str + :param loglevel: loglevel (required) + :type loglevel: str + :param appender: appender + :type appender: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_logging_serialize( + name=name, + loglevel=loglevel, + appender=appender, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_logging_serialize( + self, + name, + loglevel, + appender, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if name is not None: + + _query_params.append(('name', name)) + + if loglevel is not None: + + _query_params.append(('loglevel', loglevel)) + + if appender is not None: + + _query_params.append(('appender', appender)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/log/config', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def clear_cache( + self, + bean: Annotated[Optional[StrictStr], Field(description="bean")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """clear cache + + clear cache + + :param bean: bean + :type bean: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._clear_cache_serialize( + bean=bean, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def clear_cache_with_http_info( + self, + bean: Annotated[Optional[StrictStr], Field(description="bean")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """clear cache + + clear cache + + :param bean: bean + :type bean: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._clear_cache_serialize( + bean=bean, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def clear_cache_without_preload_content( + self, + bean: Annotated[Optional[StrictStr], Field(description="bean")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """clear cache + + clear cache + + :param bean: bean + :type bean: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._clear_cache_serialize( + bean=bean, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _clear_cache_serialize( + self, + bean, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if bean is not None: + + _query_params.append(('bean', bean)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/cache/clearCache', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_preview( + self, + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """create preview. + + create preview. + + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_preview_serialize( + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_preview_with_http_info( + self, + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """create preview. + + create preview. + + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_preview_serialize( + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_preview_without_preload_content( + self, + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create preview. + + create preview. + + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_preview_serialize( + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_preview_serialize( + self, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/nodes/preview/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_person( + self, + username: Annotated[List[StrictStr], Field(description="names of the users to delete")], + person_delete_options: Annotated[Optional[PersonDeleteOptions], Field(description="options object what and how to delete user contents")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> PersonReport: + """delete persons + + delete the given persons. Their status must be set to \"todelete\" + + :param username: names of the users to delete (required) + :type username: List[str] + :param person_delete_options: options object what and how to delete user contents + :type person_delete_options: PersonDeleteOptions + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_person_serialize( + username=username, + person_delete_options=person_delete_options, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "PersonReport", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_person_with_http_info( + self, + username: Annotated[List[StrictStr], Field(description="names of the users to delete")], + person_delete_options: Annotated[Optional[PersonDeleteOptions], Field(description="options object what and how to delete user contents")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[PersonReport]: + """delete persons + + delete the given persons. Their status must be set to \"todelete\" + + :param username: names of the users to delete (required) + :type username: List[str] + :param person_delete_options: options object what and how to delete user contents + :type person_delete_options: PersonDeleteOptions + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_person_serialize( + username=username, + person_delete_options=person_delete_options, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "PersonReport", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_person_without_preload_content( + self, + username: Annotated[List[StrictStr], Field(description="names of the users to delete")], + person_delete_options: Annotated[Optional[PersonDeleteOptions], Field(description="options object what and how to delete user contents")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete persons + + delete the given persons. Their status must be set to \"todelete\" + + :param username: names of the users to delete (required) + :type username: List[str] + :param person_delete_options: options object what and how to delete user contents + :type person_delete_options: PersonDeleteOptions + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_person_serialize( + username=username, + person_delete_options=person_delete_options, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "PersonReport", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_person_serialize( + self, + username, + person_delete_options, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'username': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if username is not None: + + _query_params.append(('username', username)) + + # process the header parameters + # process the form parameters + # process the body parameter + if person_delete_options is not None: + _body_params = person_delete_options + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/deletePersons', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def export_by_lucene( + self, + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + properties: Annotated[Optional[List[StrictStr]], Field(description="properties to fetch, use parent:: to include parent property values")] = None, + store: Annotated[Optional[StrictStr], Field(description="store, workspace or archive")] = None, + authority_scope: Annotated[Optional[List[StrictStr]], Field(description="authority scope to search for")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Search for custom lucene query and choose specific properties to load + + e.g. @cm\\:name:\"*\" + + :param query: query + :type query: str + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param properties: properties to fetch, use parent:: to include parent property values + :type properties: List[str] + :param store: store, workspace or archive + :type store: str + :param authority_scope: authority scope to search for + :type authority_scope: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_by_lucene_serialize( + query=query, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + properties=properties, + store=store, + authority_scope=authority_scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def export_by_lucene_with_http_info( + self, + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + properties: Annotated[Optional[List[StrictStr]], Field(description="properties to fetch, use parent:: to include parent property values")] = None, + store: Annotated[Optional[StrictStr], Field(description="store, workspace or archive")] = None, + authority_scope: Annotated[Optional[List[StrictStr]], Field(description="authority scope to search for")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Search for custom lucene query and choose specific properties to load + + e.g. @cm\\:name:\"*\" + + :param query: query + :type query: str + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param properties: properties to fetch, use parent:: to include parent property values + :type properties: List[str] + :param store: store, workspace or archive + :type store: str + :param authority_scope: authority scope to search for + :type authority_scope: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_by_lucene_serialize( + query=query, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + properties=properties, + store=store, + authority_scope=authority_scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def export_by_lucene_without_preload_content( + self, + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + properties: Annotated[Optional[List[StrictStr]], Field(description="properties to fetch, use parent:: to include parent property values")] = None, + store: Annotated[Optional[StrictStr], Field(description="store, workspace or archive")] = None, + authority_scope: Annotated[Optional[List[StrictStr]], Field(description="authority scope to search for")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search for custom lucene query and choose specific properties to load + + e.g. @cm\\:name:\"*\" + + :param query: query + :type query: str + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param properties: properties to fetch, use parent:: to include parent property values + :type properties: List[str] + :param store: store, workspace or archive + :type store: str + :param authority_scope: authority scope to search for + :type authority_scope: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_by_lucene_serialize( + query=query, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + properties=properties, + store=store, + authority_scope=authority_scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _export_by_lucene_serialize( + self, + query, + sort_properties, + sort_ascending, + properties, + store, + authority_scope, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'properties': 'multi', + 'authorityScope': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if query is not None: + + _query_params.append(('query', query)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if properties is not None: + + _query_params.append(('properties', properties)) + + if store is not None: + + _query_params.append(('store', store)) + + if authority_scope is not None: + + _query_params.append(('authorityScope', authority_scope)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/lucene/export', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def export_lom( + self, + filter_query: Annotated[StrictStr, Field(description="filterQuery")], + target_dir: Annotated[StrictStr, Field(description="targetDir")], + sub_object_handler: Annotated[StrictBool, Field(description="subObjectHandler")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Export Nodes with LOM Metadata Format + + Export Nodes with LOM Metadata Format. + + :param filter_query: filterQuery (required) + :type filter_query: str + :param target_dir: targetDir (required) + :type target_dir: str + :param sub_object_handler: subObjectHandler (required) + :type sub_object_handler: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_lom_serialize( + filter_query=filter_query, + target_dir=target_dir, + sub_object_handler=sub_object_handler, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def export_lom_with_http_info( + self, + filter_query: Annotated[StrictStr, Field(description="filterQuery")], + target_dir: Annotated[StrictStr, Field(description="targetDir")], + sub_object_handler: Annotated[StrictBool, Field(description="subObjectHandler")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Export Nodes with LOM Metadata Format + + Export Nodes with LOM Metadata Format. + + :param filter_query: filterQuery (required) + :type filter_query: str + :param target_dir: targetDir (required) + :type target_dir: str + :param sub_object_handler: subObjectHandler (required) + :type sub_object_handler: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_lom_serialize( + filter_query=filter_query, + target_dir=target_dir, + sub_object_handler=sub_object_handler, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def export_lom_without_preload_content( + self, + filter_query: Annotated[StrictStr, Field(description="filterQuery")], + target_dir: Annotated[StrictStr, Field(description="targetDir")], + sub_object_handler: Annotated[StrictBool, Field(description="subObjectHandler")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Export Nodes with LOM Metadata Format + + Export Nodes with LOM Metadata Format. + + :param filter_query: filterQuery (required) + :type filter_query: str + :param target_dir: targetDir (required) + :type target_dir: str + :param sub_object_handler: subObjectHandler (required) + :type sub_object_handler: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_lom_serialize( + filter_query=filter_query, + target_dir=target_dir, + sub_object_handler=sub_object_handler, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _export_lom_serialize( + self, + filter_query, + target_dir, + sub_object_handler, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if filter_query is not None: + + _query_params.append(('filterQuery', filter_query)) + + if target_dir is not None: + + _query_params.append(('targetDir', target_dir)) + + if sub_object_handler is not None: + + _query_params.append(('subObjectHandler', sub_object_handler)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/export/lom', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_all_jobs( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get all available jobs + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_jobs_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_all_jobs_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get all available jobs + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_jobs_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_all_jobs_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get all available jobs + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_jobs_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_all_jobs_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/jobs/all', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_all_toolpermissions( + self, + authority: Annotated[StrictStr, Field(description="Authority to load (user or group)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get all toolpermissions for an authority + + Returns explicit (rights set for this authority) + effective (resulting rights for this authority) toolpermission + + :param authority: Authority to load (user or group) (required) + :type authority: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_toolpermissions_serialize( + authority=authority, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_all_toolpermissions_with_http_info( + self, + authority: Annotated[StrictStr, Field(description="Authority to load (user or group)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get all toolpermissions for an authority + + Returns explicit (rights set for this authority) + effective (resulting rights for this authority) toolpermission + + :param authority: Authority to load (user or group) (required) + :type authority: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_toolpermissions_serialize( + authority=authority, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_all_toolpermissions_without_preload_content( + self, + authority: Annotated[StrictStr, Field(description="Authority to load (user or group)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get all toolpermissions for an authority + + Returns explicit (rights set for this authority) + effective (resulting rights for this authority) toolpermission + + :param authority: Authority to load (user or group) (required) + :type authority: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_toolpermissions_serialize( + authority=authority, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_all_toolpermissions_serialize( + self, + authority, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if authority is not None: + _path_params['authority'] = authority + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/toolpermissions/{authority}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_application_xml( + self, + xml: Annotated[StrictStr, Field(description="Properties Filename (*.xml)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """list any xml properties (like from homeApplication.properties.xml) + + list any xml properties (like from homeApplication.properties.xml) + + :param xml: Properties Filename (*.xml) (required) + :type xml: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_application_xml_serialize( + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_application_xml_with_http_info( + self, + xml: Annotated[StrictStr, Field(description="Properties Filename (*.xml)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """list any xml properties (like from homeApplication.properties.xml) + + list any xml properties (like from homeApplication.properties.xml) + + :param xml: Properties Filename (*.xml) (required) + :type xml: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_application_xml_serialize( + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_application_xml_without_preload_content( + self, + xml: Annotated[StrictStr, Field(description="Properties Filename (*.xml)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list any xml properties (like from homeApplication.properties.xml) + + list any xml properties (like from homeApplication.properties.xml) + + :param xml: Properties Filename (*.xml) (required) + :type xml: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_application_xml_serialize( + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_application_xml_serialize( + self, + xml, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if xml is not None: + _path_params['xml'] = xml + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/applications/{xml}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_applications( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """list applications + + List all registered applications. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_applications_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_applications_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """list applications + + List all registered applications. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_applications_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_applications_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list applications + + List all registered applications. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_applications_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_applications_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/applications', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_cache_entries( + self, + id: Annotated[StrictStr, Field(description="Id/bean name of the cache")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get entries of a cache + + Get entries of a cache. + + :param id: Id/bean name of the cache (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cache_entries_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_cache_entries_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Id/bean name of the cache")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get entries of a cache + + Get entries of a cache. + + :param id: Id/bean name of the cache (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cache_entries_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_cache_entries_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Id/bean name of the cache")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get entries of a cache + + Get entries of a cache. + + :param id: Id/bean name of the cache (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cache_entries_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_cache_entries_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/cache/cacheEntries/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_cache_info( + self, + id: Annotated[StrictStr, Field(description="Id/bean name of the cache")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CacheInfo: + """Get information about a cache + + Get information about a cache. + + :param id: Id/bean name of the cache (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cache_info_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_cache_info_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Id/bean name of the cache")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CacheInfo]: + """Get information about a cache + + Get information about a cache. + + :param id: Id/bean name of the cache (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cache_info_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_cache_info_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Id/bean name of the cache")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get information about a cache + + Get information about a cache. + + :param id: Id/bean name of the cache (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cache_info_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_cache_info_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/cache/cacheInfo/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_catalina_out( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get last info from catalina out + + Get catalina.out log. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_catalina_out_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_catalina_out_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get last info from catalina out + + Get catalina.out log. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_catalina_out_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_catalina_out_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get last info from catalina out + + Get catalina.out log. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_catalina_out_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_catalina_out_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/catalina', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_cluster( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CacheCluster: + """Get information about the Cluster + + Get information the Cluster + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cluster_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheCluster", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_cluster_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CacheCluster]: + """Get information about the Cluster + + Get information the Cluster + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cluster_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheCluster", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_cluster_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get information about the Cluster + + Get information the Cluster + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_cluster_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheCluster", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_cluster_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/clusterInfo', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_clusters( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CacheCluster: + """Get information about the Cluster + + Get information the Cluster + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_clusters_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheCluster", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_clusters_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CacheCluster]: + """Get information about the Cluster + + Get information the Cluster + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_clusters_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheCluster", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_clusters_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get information about the Cluster + + Get information the Cluster + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_clusters_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CacheCluster", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_clusters_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/clusterInfos', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_config( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RepositoryConfig: + """get the repository config object + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepositoryConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_config_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RepositoryConfig]: + """get the repository config object + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepositoryConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_config_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the repository config object + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepositoryConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_config_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/repositoryConfig', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_config_file( + self, + filename: Annotated[StrictStr, Field(description="filename to fetch")], + path_prefix: Annotated[StrictStr, Field(description="path prefix this file belongs to")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get a base system config file (e.g. edu-sharing.conf) + + + :param filename: filename to fetch (required) + :type filename: str + :param path_prefix: path prefix this file belongs to (required) + :type path_prefix: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config_file_serialize( + filename=filename, + path_prefix=path_prefix, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_config_file_with_http_info( + self, + filename: Annotated[StrictStr, Field(description="filename to fetch")], + path_prefix: Annotated[StrictStr, Field(description="path prefix this file belongs to")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get a base system config file (e.g. edu-sharing.conf) + + + :param filename: filename to fetch (required) + :type filename: str + :param path_prefix: path prefix this file belongs to (required) + :type path_prefix: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config_file_serialize( + filename=filename, + path_prefix=path_prefix, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_config_file_without_preload_content( + self, + filename: Annotated[StrictStr, Field(description="filename to fetch")], + path_prefix: Annotated[StrictStr, Field(description="path prefix this file belongs to")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get a base system config file (e.g. edu-sharing.conf) + + + :param filename: filename to fetch (required) + :type filename: str + :param path_prefix: path prefix this file belongs to (required) + :type path_prefix: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config_file_serialize( + filename=filename, + path_prefix=path_prefix, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_config_file_serialize( + self, + filename, + path_prefix, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if filename is not None: + + _query_params.append(('filename', filename)) + + if path_prefix is not None: + + _query_params.append(('pathPrefix', path_prefix)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/configFile', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_enabled_plugins( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get enabled system plugins + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_enabled_plugins_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_enabled_plugins_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get enabled system plugins + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_enabled_plugins_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_enabled_plugins_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get enabled system plugins + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_enabled_plugins_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_enabled_plugins_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/plugins', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_global_groups( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get global groups + + Get global groups (groups across repositories). + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_global_groups_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_global_groups_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get global groups + + Get global groups (groups across repositories). + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_global_groups_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_global_groups_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get global groups + + Get global groups (groups across repositories). + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_global_groups_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_global_groups_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/globalGroups', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_jobs( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get all running jobs + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_jobs_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_jobs_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get all running jobs + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_jobs_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_jobs_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get all running jobs + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_jobs_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_jobs_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/jobs', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_lightbend_config( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """get_lightbend_config + + Get the fully merged & parsed (lightbend) backend config + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_lightbend_config_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_lightbend_config_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """get_lightbend_config + + Get the fully merged & parsed (lightbend) backend config + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_lightbend_config_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_lightbend_config_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_lightbend_config + + Get the fully merged & parsed (lightbend) backend config + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_lightbend_config_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_lightbend_config_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/config/merged', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_logging_runtime( + self, + filters: Annotated[Optional[List[StrictStr]], Field(description="filters")] = None, + only_config: Annotated[Optional[StrictBool], Field(description="onlyConfig if true only loggers defined in log4j.xml or at runtime are returned")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> LoggerConfigResult: + """get the logger config + + + :param filters: filters + :type filters: List[str] + :param only_config: onlyConfig if true only loggers defined in log4j.xml or at runtime are returned + :type only_config: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_logging_runtime_serialize( + filters=filters, + only_config=only_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LoggerConfigResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_logging_runtime_with_http_info( + self, + filters: Annotated[Optional[List[StrictStr]], Field(description="filters")] = None, + only_config: Annotated[Optional[StrictBool], Field(description="onlyConfig if true only loggers defined in log4j.xml or at runtime are returned")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[LoggerConfigResult]: + """get the logger config + + + :param filters: filters + :type filters: List[str] + :param only_config: onlyConfig if true only loggers defined in log4j.xml or at runtime are returned + :type only_config: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_logging_runtime_serialize( + filters=filters, + only_config=only_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LoggerConfigResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_logging_runtime_without_preload_content( + self, + filters: Annotated[Optional[List[StrictStr]], Field(description="filters")] = None, + only_config: Annotated[Optional[StrictBool], Field(description="onlyConfig if true only loggers defined in log4j.xml or at runtime are returned")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the logger config + + + :param filters: filters + :type filters: List[str] + :param only_config: onlyConfig if true only loggers defined in log4j.xml or at runtime are returned + :type only_config: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_logging_runtime_serialize( + filters=filters, + only_config=only_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "LoggerConfigResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_logging_runtime_serialize( + self, + filters, + only_config, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'filters': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if filters is not None: + + _query_params.append(('filters', filters)) + + if only_config is not None: + + _query_params.append(('onlyConfig', only_config)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/log/config', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_oai_classes( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get OAI class names + + Get available importer classes for OAI import. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_oai_classes_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_oai_classes_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get OAI class names + + Get available importer classes for OAI import. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_oai_classes_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_oai_classes_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get OAI class names + + Get available importer classes for OAI import. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_oai_classes_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_oai_classes_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/import/oai/classes', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_property_to_mds( + self, + properties: Annotated[List[StrictStr], Field(description="one or more properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get a Mds Valuespace for all values of the given properties + + Get a Mds Valuespace for all values of the given properties. + + :param properties: one or more properties (required) + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_property_to_mds_serialize( + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_property_to_mds_with_http_info( + self, + properties: Annotated[List[StrictStr], Field(description="one or more properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get a Mds Valuespace for all values of the given properties + + Get a Mds Valuespace for all values of the given properties. + + :param properties: one or more properties (required) + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_property_to_mds_serialize( + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_property_to_mds_without_preload_content( + self, + properties: Annotated[List[StrictStr], Field(description="one or more properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a Mds Valuespace for all values of the given properties + + Get a Mds Valuespace for all values of the given properties. + + :param properties: one or more properties (required) + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_property_to_mds_serialize( + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_property_to_mds_serialize( + self, + properties, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'properties': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if properties is not None: + + _query_params.append(('properties', properties)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/propertyToMds', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_statistics( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AdminStatistics: + """get statistics + + get statistics. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AdminStatistics", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_statistics_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AdminStatistics]: + """get statistics + + get statistics. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AdminStatistics", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_statistics_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get statistics + + get statistics. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AdminStatistics", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_statistics_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/statistics', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_version( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RepositoryVersionInfo: + """get detailed version information + + detailed information about the running system version + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepositoryVersionInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_version_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RepositoryVersionInfo]: + """get detailed version information + + detailed information about the running system version + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepositoryVersionInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_version_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get detailed version information + + detailed information about the running system version + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepositoryVersionInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_version_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/version', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_collections( + self, + xml: Annotated[Dict[str, Any], Field(description="XML file to parse (or zip file containing exactly 1 xml file to parse)")], + parent: Annotated[Optional[StrictStr], Field(description="Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionsResult: + """import collections via a xml file + + xml file must be structured as defined by the xsd standard + + :param xml: XML file to parse (or zip file containing exactly 1 xml file to parse) (required) + :type xml: object + :param parent: Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_collections_serialize( + xml=xml, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionsResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_collections_with_http_info( + self, + xml: Annotated[Dict[str, Any], Field(description="XML file to parse (or zip file containing exactly 1 xml file to parse)")], + parent: Annotated[Optional[StrictStr], Field(description="Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionsResult]: + """import collections via a xml file + + xml file must be structured as defined by the xsd standard + + :param xml: XML file to parse (or zip file containing exactly 1 xml file to parse) (required) + :type xml: object + :param parent: Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_collections_serialize( + xml=xml, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionsResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_collections_without_preload_content( + self, + xml: Annotated[Dict[str, Any], Field(description="XML file to parse (or zip file containing exactly 1 xml file to parse)")], + parent: Annotated[Optional[StrictStr], Field(description="Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """import collections via a xml file + + xml file must be structured as defined by the xsd standard + + :param xml: XML file to parse (or zip file containing exactly 1 xml file to parse) (required) + :type xml: object + :param parent: Id of the root to initialize the collection structure, or '-root-' to inflate them on the first level + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_collections_serialize( + xml=xml, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionsResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_collections_serialize( + self, + xml, + parent, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if parent is not None: + + _query_params.append(('parent', parent)) + + # process the header parameters + # process the form parameters + if xml is not None: + _form_params.append(('xml', xml)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/import/collections', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_excel( + self, + parent: Annotated[StrictStr, Field(description="parent")], + add_to_collection: Annotated[StrictBool, Field(description="addToCollection")], + excel: Annotated[Dict[str, Any], Field(description="Excel file to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ExcelResult: + """Import excel data + + Import excel data. + + :param parent: parent (required) + :type parent: str + :param add_to_collection: addToCollection (required) + :type add_to_collection: bool + :param excel: Excel file to import (required) + :type excel: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_excel_serialize( + parent=parent, + add_to_collection=add_to_collection, + excel=excel, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ExcelResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_excel_with_http_info( + self, + parent: Annotated[StrictStr, Field(description="parent")], + add_to_collection: Annotated[StrictBool, Field(description="addToCollection")], + excel: Annotated[Dict[str, Any], Field(description="Excel file to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ExcelResult]: + """Import excel data + + Import excel data. + + :param parent: parent (required) + :type parent: str + :param add_to_collection: addToCollection (required) + :type add_to_collection: bool + :param excel: Excel file to import (required) + :type excel: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_excel_serialize( + parent=parent, + add_to_collection=add_to_collection, + excel=excel, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ExcelResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_excel_without_preload_content( + self, + parent: Annotated[StrictStr, Field(description="parent")], + add_to_collection: Annotated[StrictBool, Field(description="addToCollection")], + excel: Annotated[Dict[str, Any], Field(description="Excel file to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import excel data + + Import excel data. + + :param parent: parent (required) + :type parent: str + :param add_to_collection: addToCollection (required) + :type add_to_collection: bool + :param excel: Excel file to import (required) + :type excel: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_excel_serialize( + parent=parent, + add_to_collection=add_to_collection, + excel=excel, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ExcelResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_excel_serialize( + self, + parent, + add_to_collection, + excel, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if parent is not None: + + _query_params.append(('parent', parent)) + + if add_to_collection is not None: + + _query_params.append(('addToCollection', add_to_collection)) + + # process the header parameters + # process the form parameters + if excel is not None: + _form_params.append(('excel', excel)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/import/excel', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_oai( + self, + base_url: Annotated[StrictStr, Field(description="base url")], + set: Annotated[StrictStr, Field(description="set/catalog id")], + metadata_prefix: Annotated[StrictStr, Field(description="metadata prefix")], + class_name: Annotated[StrictStr, Field(description="importer job class name (call /classes to obtain a list)")], + metadataset: Annotated[Optional[StrictStr], Field(description="id metadataset")] = None, + importer_class_name: Annotated[Optional[StrictStr], Field(description="importer class name (call /classes to obtain a list)")] = None, + record_handler_class_name: Annotated[Optional[StrictStr], Field(description="RecordHandler class name")] = None, + binary_handler_class_name: Annotated[Optional[StrictStr], Field(description="BinaryHandler class name (may be empty for none)")] = None, + persistent_handler_class_name: Annotated[Optional[StrictStr], Field(description="PersistentHandlerClassName class name (may be empty for none)")] = None, + file_url: Annotated[Optional[StrictStr], Field(description="url to file")] = None, + oai_ids: Annotated[Optional[StrictStr], Field(description="OAI Ids to import, can be null than the whole set will be imported")] = None, + force_update: Annotated[Optional[StrictBool], Field(description="force Update of all entries")] = None, + var_from: Annotated[Optional[StrictStr], Field(description="from: datestring yyyy-MM-dd)")] = None, + until: Annotated[Optional[StrictStr], Field(description="until: datestring yyyy-MM-dd)")] = None, + period_in_days: Annotated[Optional[StrictStr], Field(description="periodInDays: internal sets from and until. only effective if from/until not set)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Import oai data + + Import oai data. + + :param base_url: base url (required) + :type base_url: str + :param set: set/catalog id (required) + :type set: str + :param metadata_prefix: metadata prefix (required) + :type metadata_prefix: str + :param class_name: importer job class name (call /classes to obtain a list) (required) + :type class_name: str + :param metadataset: id metadataset + :type metadataset: str + :param importer_class_name: importer class name (call /classes to obtain a list) + :type importer_class_name: str + :param record_handler_class_name: RecordHandler class name + :type record_handler_class_name: str + :param binary_handler_class_name: BinaryHandler class name (may be empty for none) + :type binary_handler_class_name: str + :param persistent_handler_class_name: PersistentHandlerClassName class name (may be empty for none) + :type persistent_handler_class_name: str + :param file_url: url to file + :type file_url: str + :param oai_ids: OAI Ids to import, can be null than the whole set will be imported + :type oai_ids: str + :param force_update: force Update of all entries + :type force_update: bool + :param var_from: from: datestring yyyy-MM-dd) + :type var_from: str + :param until: until: datestring yyyy-MM-dd) + :type until: str + :param period_in_days: periodInDays: internal sets from and until. only effective if from/until not set) + :type period_in_days: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_oai_serialize( + base_url=base_url, + set=set, + metadata_prefix=metadata_prefix, + class_name=class_name, + metadataset=metadataset, + importer_class_name=importer_class_name, + record_handler_class_name=record_handler_class_name, + binary_handler_class_name=binary_handler_class_name, + persistent_handler_class_name=persistent_handler_class_name, + file_url=file_url, + oai_ids=oai_ids, + force_update=force_update, + var_from=var_from, + until=until, + period_in_days=period_in_days, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_oai_with_http_info( + self, + base_url: Annotated[StrictStr, Field(description="base url")], + set: Annotated[StrictStr, Field(description="set/catalog id")], + metadata_prefix: Annotated[StrictStr, Field(description="metadata prefix")], + class_name: Annotated[StrictStr, Field(description="importer job class name (call /classes to obtain a list)")], + metadataset: Annotated[Optional[StrictStr], Field(description="id metadataset")] = None, + importer_class_name: Annotated[Optional[StrictStr], Field(description="importer class name (call /classes to obtain a list)")] = None, + record_handler_class_name: Annotated[Optional[StrictStr], Field(description="RecordHandler class name")] = None, + binary_handler_class_name: Annotated[Optional[StrictStr], Field(description="BinaryHandler class name (may be empty for none)")] = None, + persistent_handler_class_name: Annotated[Optional[StrictStr], Field(description="PersistentHandlerClassName class name (may be empty for none)")] = None, + file_url: Annotated[Optional[StrictStr], Field(description="url to file")] = None, + oai_ids: Annotated[Optional[StrictStr], Field(description="OAI Ids to import, can be null than the whole set will be imported")] = None, + force_update: Annotated[Optional[StrictBool], Field(description="force Update of all entries")] = None, + var_from: Annotated[Optional[StrictStr], Field(description="from: datestring yyyy-MM-dd)")] = None, + until: Annotated[Optional[StrictStr], Field(description="until: datestring yyyy-MM-dd)")] = None, + period_in_days: Annotated[Optional[StrictStr], Field(description="periodInDays: internal sets from and until. only effective if from/until not set)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Import oai data + + Import oai data. + + :param base_url: base url (required) + :type base_url: str + :param set: set/catalog id (required) + :type set: str + :param metadata_prefix: metadata prefix (required) + :type metadata_prefix: str + :param class_name: importer job class name (call /classes to obtain a list) (required) + :type class_name: str + :param metadataset: id metadataset + :type metadataset: str + :param importer_class_name: importer class name (call /classes to obtain a list) + :type importer_class_name: str + :param record_handler_class_name: RecordHandler class name + :type record_handler_class_name: str + :param binary_handler_class_name: BinaryHandler class name (may be empty for none) + :type binary_handler_class_name: str + :param persistent_handler_class_name: PersistentHandlerClassName class name (may be empty for none) + :type persistent_handler_class_name: str + :param file_url: url to file + :type file_url: str + :param oai_ids: OAI Ids to import, can be null than the whole set will be imported + :type oai_ids: str + :param force_update: force Update of all entries + :type force_update: bool + :param var_from: from: datestring yyyy-MM-dd) + :type var_from: str + :param until: until: datestring yyyy-MM-dd) + :type until: str + :param period_in_days: periodInDays: internal sets from and until. only effective if from/until not set) + :type period_in_days: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_oai_serialize( + base_url=base_url, + set=set, + metadata_prefix=metadata_prefix, + class_name=class_name, + metadataset=metadataset, + importer_class_name=importer_class_name, + record_handler_class_name=record_handler_class_name, + binary_handler_class_name=binary_handler_class_name, + persistent_handler_class_name=persistent_handler_class_name, + file_url=file_url, + oai_ids=oai_ids, + force_update=force_update, + var_from=var_from, + until=until, + period_in_days=period_in_days, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_oai_without_preload_content( + self, + base_url: Annotated[StrictStr, Field(description="base url")], + set: Annotated[StrictStr, Field(description="set/catalog id")], + metadata_prefix: Annotated[StrictStr, Field(description="metadata prefix")], + class_name: Annotated[StrictStr, Field(description="importer job class name (call /classes to obtain a list)")], + metadataset: Annotated[Optional[StrictStr], Field(description="id metadataset")] = None, + importer_class_name: Annotated[Optional[StrictStr], Field(description="importer class name (call /classes to obtain a list)")] = None, + record_handler_class_name: Annotated[Optional[StrictStr], Field(description="RecordHandler class name")] = None, + binary_handler_class_name: Annotated[Optional[StrictStr], Field(description="BinaryHandler class name (may be empty for none)")] = None, + persistent_handler_class_name: Annotated[Optional[StrictStr], Field(description="PersistentHandlerClassName class name (may be empty for none)")] = None, + file_url: Annotated[Optional[StrictStr], Field(description="url to file")] = None, + oai_ids: Annotated[Optional[StrictStr], Field(description="OAI Ids to import, can be null than the whole set will be imported")] = None, + force_update: Annotated[Optional[StrictBool], Field(description="force Update of all entries")] = None, + var_from: Annotated[Optional[StrictStr], Field(description="from: datestring yyyy-MM-dd)")] = None, + until: Annotated[Optional[StrictStr], Field(description="until: datestring yyyy-MM-dd)")] = None, + period_in_days: Annotated[Optional[StrictStr], Field(description="periodInDays: internal sets from and until. only effective if from/until not set)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import oai data + + Import oai data. + + :param base_url: base url (required) + :type base_url: str + :param set: set/catalog id (required) + :type set: str + :param metadata_prefix: metadata prefix (required) + :type metadata_prefix: str + :param class_name: importer job class name (call /classes to obtain a list) (required) + :type class_name: str + :param metadataset: id metadataset + :type metadataset: str + :param importer_class_name: importer class name (call /classes to obtain a list) + :type importer_class_name: str + :param record_handler_class_name: RecordHandler class name + :type record_handler_class_name: str + :param binary_handler_class_name: BinaryHandler class name (may be empty for none) + :type binary_handler_class_name: str + :param persistent_handler_class_name: PersistentHandlerClassName class name (may be empty for none) + :type persistent_handler_class_name: str + :param file_url: url to file + :type file_url: str + :param oai_ids: OAI Ids to import, can be null than the whole set will be imported + :type oai_ids: str + :param force_update: force Update of all entries + :type force_update: bool + :param var_from: from: datestring yyyy-MM-dd) + :type var_from: str + :param until: until: datestring yyyy-MM-dd) + :type until: str + :param period_in_days: periodInDays: internal sets from and until. only effective if from/until not set) + :type period_in_days: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_oai_serialize( + base_url=base_url, + set=set, + metadata_prefix=metadata_prefix, + class_name=class_name, + metadataset=metadataset, + importer_class_name=importer_class_name, + record_handler_class_name=record_handler_class_name, + binary_handler_class_name=binary_handler_class_name, + persistent_handler_class_name=persistent_handler_class_name, + file_url=file_url, + oai_ids=oai_ids, + force_update=force_update, + var_from=var_from, + until=until, + period_in_days=period_in_days, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_oai_serialize( + self, + base_url, + set, + metadata_prefix, + class_name, + metadataset, + importer_class_name, + record_handler_class_name, + binary_handler_class_name, + persistent_handler_class_name, + file_url, + oai_ids, + force_update, + var_from, + until, + period_in_days, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if base_url is not None: + + _query_params.append(('baseUrl', base_url)) + + if set is not None: + + _query_params.append(('set', set)) + + if metadata_prefix is not None: + + _query_params.append(('metadataPrefix', metadata_prefix)) + + if metadataset is not None: + + _query_params.append(('metadataset', metadataset)) + + if class_name is not None: + + _query_params.append(('className', class_name)) + + if importer_class_name is not None: + + _query_params.append(('importerClassName', importer_class_name)) + + if record_handler_class_name is not None: + + _query_params.append(('recordHandlerClassName', record_handler_class_name)) + + if binary_handler_class_name is not None: + + _query_params.append(('binaryHandlerClassName', binary_handler_class_name)) + + if persistent_handler_class_name is not None: + + _query_params.append(('persistentHandlerClassName', persistent_handler_class_name)) + + if file_url is not None: + + _query_params.append(('fileUrl', file_url)) + + if oai_ids is not None: + + _query_params.append(('oaiIds', oai_ids)) + + if force_update is not None: + + _query_params.append(('forceUpdate', force_update)) + + if var_from is not None: + + _query_params.append(('from', var_from)) + + if until is not None: + + _query_params.append(('until', until)) + + if period_in_days is not None: + + _query_params.append(('periodInDays', period_in_days)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/import/oai', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_oai_xml( + self, + record_handler_class_name: Annotated[Optional[StrictStr], Field(description="RecordHandler class name")] = None, + binary_handler_class_name: Annotated[Optional[StrictStr], Field(description="BinaryHandler class name (may be empty for none)")] = None, + xml: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Node: + """Import single xml via oai (for testing) + + + :param record_handler_class_name: RecordHandler class name + :type record_handler_class_name: str + :param binary_handler_class_name: BinaryHandler class name (may be empty for none) + :type binary_handler_class_name: str + :param xml: + :type xml: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_oai_xml_serialize( + record_handler_class_name=record_handler_class_name, + binary_handler_class_name=binary_handler_class_name, + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_oai_xml_with_http_info( + self, + record_handler_class_name: Annotated[Optional[StrictStr], Field(description="RecordHandler class name")] = None, + binary_handler_class_name: Annotated[Optional[StrictStr], Field(description="BinaryHandler class name (may be empty for none)")] = None, + xml: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Node]: + """Import single xml via oai (for testing) + + + :param record_handler_class_name: RecordHandler class name + :type record_handler_class_name: str + :param binary_handler_class_name: BinaryHandler class name (may be empty for none) + :type binary_handler_class_name: str + :param xml: + :type xml: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_oai_xml_serialize( + record_handler_class_name=record_handler_class_name, + binary_handler_class_name=binary_handler_class_name, + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_oai_xml_without_preload_content( + self, + record_handler_class_name: Annotated[Optional[StrictStr], Field(description="RecordHandler class name")] = None, + binary_handler_class_name: Annotated[Optional[StrictStr], Field(description="BinaryHandler class name (may be empty for none)")] = None, + xml: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import single xml via oai (for testing) + + + :param record_handler_class_name: RecordHandler class name + :type record_handler_class_name: str + :param binary_handler_class_name: BinaryHandler class name (may be empty for none) + :type binary_handler_class_name: str + :param xml: + :type xml: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_oai_xml_serialize( + record_handler_class_name=record_handler_class_name, + binary_handler_class_name=binary_handler_class_name, + xml=xml, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_oai_xml_serialize( + self, + record_handler_class_name, + binary_handler_class_name, + xml, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if record_handler_class_name is not None: + + _query_params.append(('recordHandlerClassName', record_handler_class_name)) + + if binary_handler_class_name is not None: + + _query_params.append(('binaryHandlerClassName', binary_handler_class_name)) + + # process the header parameters + # process the form parameters + if xml is not None: + _form_params.append(('xml', xml)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/import/oai/xml', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def refresh_app_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """refresh app info + + Refresh the application info. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_app_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def refresh_app_info_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """refresh app info + + Refresh the application info. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_app_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def refresh_app_info_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """refresh app info + + Refresh the application info. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_app_info_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _refresh_app_info_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/refreshAppInfo', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def refresh_cache( + self, + folder: Annotated[StrictStr, Field(description="refresh cache root folder id")], + sticky: Annotated[StrictBool, Field(description="sticky")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Refresh cache + + Refresh importer cache. + + :param folder: refresh cache root folder id (required) + :type folder: str + :param sticky: sticky (required) + :type sticky: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_cache_serialize( + folder=folder, + sticky=sticky, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def refresh_cache_with_http_info( + self, + folder: Annotated[StrictStr, Field(description="refresh cache root folder id")], + sticky: Annotated[StrictBool, Field(description="sticky")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Refresh cache + + Refresh importer cache. + + :param folder: refresh cache root folder id (required) + :type folder: str + :param sticky: sticky (required) + :type sticky: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_cache_serialize( + folder=folder, + sticky=sticky, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def refresh_cache_without_preload_content( + self, + folder: Annotated[StrictStr, Field(description="refresh cache root folder id")], + sticky: Annotated[StrictBool, Field(description="sticky")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Refresh cache + + Refresh importer cache. + + :param folder: refresh cache root folder id (required) + :type folder: str + :param sticky: sticky (required) + :type sticky: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_cache_serialize( + folder=folder, + sticky=sticky, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _refresh_cache_serialize( + self, + folder, + sticky, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if folder is not None: + _path_params['folder'] = folder + # process the query parameters + if sticky is not None: + + _query_params.append(('sticky', sticky)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/import/refreshCache/{folder}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def refresh_edu_group_cache( + self, + keep_existing: Annotated[Optional[StrictBool], Field(description="keep existing")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Refresh the Edu Group Cache + + Refresh the Edu Group Cache. + + :param keep_existing: keep existing + :type keep_existing: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_edu_group_cache_serialize( + keep_existing=keep_existing, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def refresh_edu_group_cache_with_http_info( + self, + keep_existing: Annotated[Optional[StrictBool], Field(description="keep existing")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Refresh the Edu Group Cache + + Refresh the Edu Group Cache. + + :param keep_existing: keep existing + :type keep_existing: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_edu_group_cache_serialize( + keep_existing=keep_existing, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def refresh_edu_group_cache_without_preload_content( + self, + keep_existing: Annotated[Optional[StrictBool], Field(description="keep existing")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Refresh the Edu Group Cache + + Refresh the Edu Group Cache. + + :param keep_existing: keep existing + :type keep_existing: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._refresh_edu_group_cache_serialize( + keep_existing=keep_existing, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _refresh_edu_group_cache_serialize( + self, + keep_existing, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if keep_existing is not None: + + _query_params.append(('keepExisting', keep_existing)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/cache/refreshEduGroupCache', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_application( + self, + id: Annotated[StrictStr, Field(description="Application id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """remove an application + + remove the specified application. + + :param id: Application id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_application_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_application_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Application id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """remove an application + + remove the specified application. + + :param id: Application id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_application_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_application_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Application id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """remove an application + + remove the specified application. + + :param id: Application id (required) + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_application_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_application_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/admin/v1/applications/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_cache_entry( + self, + cache_index: Annotated[Optional[StrictInt], Field(description="cacheIndex")] = None, + bean: Annotated[Optional[StrictStr], Field(description="bean")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """remove cache entry + + remove cache entry + + :param cache_index: cacheIndex + :type cache_index: int + :param bean: bean + :type bean: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_cache_entry_serialize( + cache_index=cache_index, + bean=bean, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_cache_entry_with_http_info( + self, + cache_index: Annotated[Optional[StrictInt], Field(description="cacheIndex")] = None, + bean: Annotated[Optional[StrictStr], Field(description="bean")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """remove cache entry + + remove cache entry + + :param cache_index: cacheIndex + :type cache_index: int + :param bean: bean + :type bean: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_cache_entry_serialize( + cache_index=cache_index, + bean=bean, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_cache_entry_without_preload_content( + self, + cache_index: Annotated[Optional[StrictInt], Field(description="cacheIndex")] = None, + bean: Annotated[Optional[StrictStr], Field(description="bean")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """remove cache entry + + remove cache entry + + :param cache_index: cacheIndex + :type cache_index: int + :param bean: bean + :type bean: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_cache_entry_serialize( + cache_index=cache_index, + bean=bean, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_cache_entry_serialize( + self, + cache_index, + bean, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if cache_index is not None: + + _query_params.append(('cacheIndex', cache_index)) + + if bean is not None: + + _query_params.append(('bean', bean)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/cache/removeCacheEntry', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_oai_imports( + self, + base_url: Annotated[StrictStr, Field(description="base url")], + set: Annotated[StrictStr, Field(description="set/catalog id")], + metadata_prefix: Annotated[StrictStr, Field(description="metadata prefix")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove deleted imports + + Remove deleted imports. + + :param base_url: base url (required) + :type base_url: str + :param set: set/catalog id (required) + :type set: str + :param metadata_prefix: metadata prefix (required) + :type metadata_prefix: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_oai_imports_serialize( + base_url=base_url, + set=set, + metadata_prefix=metadata_prefix, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_oai_imports_with_http_info( + self, + base_url: Annotated[StrictStr, Field(description="base url")], + set: Annotated[StrictStr, Field(description="set/catalog id")], + metadata_prefix: Annotated[StrictStr, Field(description="metadata prefix")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove deleted imports + + Remove deleted imports. + + :param base_url: base url (required) + :type base_url: str + :param set: set/catalog id (required) + :type set: str + :param metadata_prefix: metadata prefix (required) + :type metadata_prefix: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_oai_imports_serialize( + base_url=base_url, + set=set, + metadata_prefix=metadata_prefix, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_oai_imports_without_preload_content( + self, + base_url: Annotated[StrictStr, Field(description="base url")], + set: Annotated[StrictStr, Field(description="set/catalog id")], + metadata_prefix: Annotated[StrictStr, Field(description="metadata prefix")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove deleted imports + + Remove deleted imports. + + :param base_url: base url (required) + :type base_url: str + :param set: set/catalog id (required) + :type set: str + :param metadata_prefix: metadata prefix (required) + :type metadata_prefix: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_oai_imports_serialize( + base_url=base_url, + set=set, + metadata_prefix=metadata_prefix, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_oai_imports_serialize( + self, + base_url, + set, + metadata_prefix, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if base_url is not None: + + _query_params.append(('baseUrl', base_url)) + + if set is not None: + + _query_params.append(('set', set)) + + if metadata_prefix is not None: + + _query_params.append(('metadataPrefix', metadata_prefix)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/admin/v1/import/oai', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_by_elastic_dsl( + self, + dsl: Annotated[Optional[StrictStr], Field(description="dsl query (json encoded)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultElastic: + """Search for custom elastic DSL query + + + :param dsl: dsl query (json encoded) + :type dsl: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_elastic_dsl_serialize( + dsl=dsl, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultElastic", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_by_elastic_dsl_with_http_info( + self, + dsl: Annotated[Optional[StrictStr], Field(description="dsl query (json encoded)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultElastic]: + """Search for custom elastic DSL query + + + :param dsl: dsl query (json encoded) + :type dsl: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_elastic_dsl_serialize( + dsl=dsl, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultElastic", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_by_elastic_dsl_without_preload_content( + self, + dsl: Annotated[Optional[StrictStr], Field(description="dsl query (json encoded)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search for custom elastic DSL query + + + :param dsl: dsl query (json encoded) + :type dsl: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_elastic_dsl_serialize( + dsl=dsl, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultElastic", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_by_elastic_dsl_serialize( + self, + dsl, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if dsl is not None: + + _query_params.append(('dsl', dsl)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/elastic', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_by_lucene( + self, + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + store: Annotated[Optional[StrictStr], Field(description="store, workspace or archive")] = None, + authority_scope: Annotated[Optional[List[StrictStr]], Field(description="authority scope to search for")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResult: + """Search for custom lucene query + + e.g. @cm\\:name:\"*\" + + :param query: query + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param store: store, workspace or archive + :type store: str + :param authority_scope: authority scope to search for + :type authority_scope: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_lucene_serialize( + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + store=store, + authority_scope=authority_scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_by_lucene_with_http_info( + self, + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + store: Annotated[Optional[StrictStr], Field(description="store, workspace or archive")] = None, + authority_scope: Annotated[Optional[List[StrictStr]], Field(description="authority scope to search for")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResult]: + """Search for custom lucene query + + e.g. @cm\\:name:\"*\" + + :param query: query + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param store: store, workspace or archive + :type store: str + :param authority_scope: authority scope to search for + :type authority_scope: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_lucene_serialize( + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + store=store, + authority_scope=authority_scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_by_lucene_without_preload_content( + self, + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + store: Annotated[Optional[StrictStr], Field(description="store, workspace or archive")] = None, + authority_scope: Annotated[Optional[List[StrictStr]], Field(description="authority scope to search for")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search for custom lucene query + + e.g. @cm\\:name:\"*\" + + :param query: query + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param store: store, workspace or archive + :type store: str + :param authority_scope: authority scope to search for + :type authority_scope: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_lucene_serialize( + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + store=store, + authority_scope=authority_scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_by_lucene_serialize( + self, + query, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + store, + authority_scope, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + 'authorityScope': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if query is not None: + + _query_params.append(('query', query)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + if store is not None: + + _query_params.append(('store', store)) + + if authority_scope is not None: + + _query_params.append(('authorityScope', authority_scope)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/lucene', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def server_update_list( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """list available update tasks + + list available update tasks + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_update_list_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def server_update_list_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """list available update tasks + + list available update tasks + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_update_list_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def server_update_list_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list available update tasks + + list available update tasks + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_update_list_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _server_update_list_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/admin/v1/serverUpdate/list', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def server_update_list1( + self, + id: Annotated[StrictStr, Field(description="Id of the update task")], + execute: Annotated[StrictBool, Field(description="Actually execute (if false, just runs in test mode)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Run an update tasks + + Run a specific update task (test or full update). + + :param id: Id of the update task (required) + :type id: str + :param execute: Actually execute (if false, just runs in test mode) (required) + :type execute: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_update_list1_serialize( + id=id, + execute=execute, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def server_update_list1_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Id of the update task")], + execute: Annotated[StrictBool, Field(description="Actually execute (if false, just runs in test mode)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Run an update tasks + + Run a specific update task (test or full update). + + :param id: Id of the update task (required) + :type id: str + :param execute: Actually execute (if false, just runs in test mode) (required) + :type execute: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_update_list1_serialize( + id=id, + execute=execute, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def server_update_list1_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Id of the update task")], + execute: Annotated[StrictBool, Field(description="Actually execute (if false, just runs in test mode)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Run an update tasks + + Run a specific update task (test or full update). + + :param id: Id of the update task (required) + :type id: str + :param execute: Actually execute (if false, just runs in test mode) (required) + :type execute: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._server_update_list1_serialize( + id=id, + execute=execute, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _server_update_list1_serialize( + self, + id, + execute, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + if execute is not None: + + _query_params.append(('execute', execute)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/serverUpdate/run/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_config( + self, + repository_config: Optional[RepositoryConfig] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """set/update the repository config object + + + :param repository_config: + :type repository_config: RepositoryConfig + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_config_serialize( + repository_config=repository_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_config_with_http_info( + self, + repository_config: Optional[RepositoryConfig] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """set/update the repository config object + + + :param repository_config: + :type repository_config: RepositoryConfig + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_config_serialize( + repository_config=repository_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_config_without_preload_content( + self, + repository_config: Optional[RepositoryConfig] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """set/update the repository config object + + + :param repository_config: + :type repository_config: RepositoryConfig + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_config_serialize( + repository_config=repository_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_config_serialize( + self, + repository_config, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if repository_config is not None: + _body_params = repository_config + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/repositoryConfig', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_toolpermissions( + self, + authority: Annotated[StrictStr, Field(description="Authority to set (user or group)")], + request_body: Optional[Dict[str, StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """set toolpermissions for an authority + + If a toolpermission has status UNDEFINED, it will remove explicit permissions for the authority + + :param authority: Authority to set (user or group) (required) + :type authority: str + :param request_body: + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_toolpermissions_serialize( + authority=authority, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_toolpermissions_with_http_info( + self, + authority: Annotated[StrictStr, Field(description="Authority to set (user or group)")], + request_body: Optional[Dict[str, StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """set toolpermissions for an authority + + If a toolpermission has status UNDEFINED, it will remove explicit permissions for the authority + + :param authority: Authority to set (user or group) (required) + :type authority: str + :param request_body: + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_toolpermissions_serialize( + authority=authority, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_toolpermissions_without_preload_content( + self, + authority: Annotated[StrictStr, Field(description="Authority to set (user or group)")], + request_body: Optional[Dict[str, StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """set toolpermissions for an authority + + If a toolpermission has status UNDEFINED, it will remove explicit permissions for the authority + + :param authority: Authority to set (user or group) (required) + :type authority: str + :param request_body: + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_toolpermissions_serialize( + authority=authority, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_toolpermissions_serialize( + self, + authority, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if authority is not None: + _path_params['authority'] = authority + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/toolpermissions/{authority}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def start_job( + self, + job_class: Annotated[StrictStr, Field(description="jobClass")], + request_body: Annotated[Dict[str, Dict[str, Any]], Field(description="params")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Start a Job. + + Start a Job. + + :param job_class: jobClass (required) + :type job_class: str + :param request_body: params (required) + :type request_body: Dict[str, object] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_job_serialize( + job_class=job_class, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def start_job_with_http_info( + self, + job_class: Annotated[StrictStr, Field(description="jobClass")], + request_body: Annotated[Dict[str, Dict[str, Any]], Field(description="params")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Start a Job. + + Start a Job. + + :param job_class: jobClass (required) + :type job_class: str + :param request_body: params (required) + :type request_body: Dict[str, object] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_job_serialize( + job_class=job_class, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def start_job_without_preload_content( + self, + job_class: Annotated[StrictStr, Field(description="jobClass")], + request_body: Annotated[Dict[str, Dict[str, Any]], Field(description="params")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Start a Job. + + Start a Job. + + :param job_class: jobClass (required) + :type job_class: str + :param request_body: params (required) + :type request_body: Dict[str, object] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_job_serialize( + job_class=job_class, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _start_job_serialize( + self, + job_class, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if job_class is not None: + _path_params['jobClass'] = job_class + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/job/{jobClass}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def start_job_sync( + self, + job_class: Annotated[StrictStr, Field(description="jobClass")], + request_body: Annotated[Dict[str, Dict[str, Any]], Field(description="params")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Start a Job. + + Start a Job. Wait for the result synchronously + + :param job_class: jobClass (required) + :type job_class: str + :param request_body: params (required) + :type request_body: Dict[str, object] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_job_sync_serialize( + job_class=job_class, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def start_job_sync_with_http_info( + self, + job_class: Annotated[StrictStr, Field(description="jobClass")], + request_body: Annotated[Dict[str, Dict[str, Any]], Field(description="params")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """Start a Job. + + Start a Job. Wait for the result synchronously + + :param job_class: jobClass (required) + :type job_class: str + :param request_body: params (required) + :type request_body: Dict[str, object] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_job_sync_serialize( + job_class=job_class, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def start_job_sync_without_preload_content( + self, + job_class: Annotated[StrictStr, Field(description="jobClass")], + request_body: Annotated[Dict[str, Dict[str, Any]], Field(description="params")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Start a Job. + + Start a Job. Wait for the result synchronously + + :param job_class: jobClass (required) + :type job_class: str + :param request_body: params (required) + :type request_body: Dict[str, object] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_job_sync_serialize( + job_class=job_class, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _start_job_sync_serialize( + self, + job_class, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if job_class is not None: + _path_params['jobClass'] = job_class + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/job/{jobClass}/sync', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def switch_authority( + self, + authority_name: Annotated[StrictStr, Field(description="the authority to use (must be a person)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """switch the session to a known authority name + + + :param authority_name: the authority to use (must be a person) (required) + :type authority_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._switch_authority_serialize( + authority_name=authority_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def switch_authority_with_http_info( + self, + authority_name: Annotated[StrictStr, Field(description="the authority to use (must be a person)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """switch the session to a known authority name + + + :param authority_name: the authority to use (must be a person) (required) + :type authority_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._switch_authority_serialize( + authority_name=authority_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def switch_authority_without_preload_content( + self, + authority_name: Annotated[StrictStr, Field(description="the authority to use (must be a person)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """switch the session to a known authority name + + + :param authority_name: the authority to use (must be a person) (required) + :type authority_name: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._switch_authority_serialize( + authority_name=authority_name, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _switch_authority_serialize( + self, + authority_name, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if authority_name is not None: + _path_params['authorityName'] = authority_name + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/authenticate/{authorityName}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def test_mail( + self, + receiver: StrictStr, + template: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Test a mail template + + Sends the given template as a test to the given receiver. + + :param receiver: (required) + :type receiver: str + :param template: (required) + :type template: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._test_mail_serialize( + receiver=receiver, + template=template, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def test_mail_with_http_info( + self, + receiver: StrictStr, + template: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Test a mail template + + Sends the given template as a test to the given receiver. + + :param receiver: (required) + :type receiver: str + :param template: (required) + :type template: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._test_mail_serialize( + receiver=receiver, + template=template, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def test_mail_without_preload_content( + self, + receiver: StrictStr, + template: StrictStr, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Test a mail template + + Sends the given template as a test to the given receiver. + + :param receiver: (required) + :type receiver: str + :param template: (required) + :type template: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._test_mail_serialize( + receiver=receiver, + template=template, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _test_mail_serialize( + self, + receiver, + template, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if receiver is not None: + _path_params['receiver'] = receiver + if template is not None: + _path_params['template'] = template + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/admin/v1/mail/{receiver}/{template}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_application_xml( + self, + xml: Annotated[StrictStr, Field(description="Properties Filename (*.xml)")], + request_body: Optional[Dict[str, StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """edit any properties xml (like homeApplication.properties.xml) + + if the key exists, it will be overwritten. Otherwise, it will be created. You only need to transfer keys you want to edit + + :param xml: Properties Filename (*.xml) (required) + :type xml: str + :param request_body: + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_application_xml_serialize( + xml=xml, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_application_xml_with_http_info( + self, + xml: Annotated[StrictStr, Field(description="Properties Filename (*.xml)")], + request_body: Optional[Dict[str, StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """edit any properties xml (like homeApplication.properties.xml) + + if the key exists, it will be overwritten. Otherwise, it will be created. You only need to transfer keys you want to edit + + :param xml: Properties Filename (*.xml) (required) + :type xml: str + :param request_body: + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_application_xml_serialize( + xml=xml, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_application_xml_without_preload_content( + self, + xml: Annotated[StrictStr, Field(description="Properties Filename (*.xml)")], + request_body: Optional[Dict[str, StrictStr]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """edit any properties xml (like homeApplication.properties.xml) + + if the key exists, it will be overwritten. Otherwise, it will be created. You only need to transfer keys you want to edit + + :param xml: Properties Filename (*.xml) (required) + :type xml: str + :param request_body: + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_application_xml_serialize( + xml=xml, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_application_xml_serialize( + self, + xml, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if xml is not None: + _path_params['xml'] = xml + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/applications/{xml}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_config_file( + self, + filename: Annotated[StrictStr, Field(description="filename to fetch")], + path_prefix: Annotated[StrictStr, Field(description="path prefix this file belongs to")], + body: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """update a base system config file (e.g. edu-sharing.conf) + + + :param filename: filename to fetch (required) + :type filename: str + :param path_prefix: path prefix this file belongs to (required) + :type path_prefix: str + :param body: + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_config_file_serialize( + filename=filename, + path_prefix=path_prefix, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_config_file_with_http_info( + self, + filename: Annotated[StrictStr, Field(description="filename to fetch")], + path_prefix: Annotated[StrictStr, Field(description="path prefix this file belongs to")], + body: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """update a base system config file (e.g. edu-sharing.conf) + + + :param filename: filename to fetch (required) + :type filename: str + :param path_prefix: path prefix this file belongs to (required) + :type path_prefix: str + :param body: + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_config_file_serialize( + filename=filename, + path_prefix=path_prefix, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_config_file_without_preload_content( + self, + filename: Annotated[StrictStr, Field(description="filename to fetch")], + path_prefix: Annotated[StrictStr, Field(description="path prefix this file belongs to")], + body: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update a base system config file (e.g. edu-sharing.conf) + + + :param filename: filename to fetch (required) + :type filename: str + :param path_prefix: path prefix this file belongs to (required) + :type path_prefix: str + :param body: + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_config_file_serialize( + filename=filename, + path_prefix=path_prefix, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_config_file_serialize( + self, + filename, + path_prefix, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if filename is not None: + + _query_params.append(('filename', filename)) + + if path_prefix is not None: + + _query_params.append(('pathPrefix', path_prefix)) + + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/configFile', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def upload_temp( + self, + name: Annotated[StrictStr, Field(description="filename")], + file: Annotated[Dict[str, Any], Field(description="file to upload")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UploadResult: + """Upload a file + + Upload a file to tomcat temp directory, to use it on the server (e.g. an update) + + :param name: filename (required) + :type name: str + :param file: file to upload (required) + :type file: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._upload_temp_serialize( + name=name, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def upload_temp_with_http_info( + self, + name: Annotated[StrictStr, Field(description="filename")], + file: Annotated[Dict[str, Any], Field(description="file to upload")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UploadResult]: + """Upload a file + + Upload a file to tomcat temp directory, to use it on the server (e.g. an update) + + :param name: filename (required) + :type name: str + :param file: file to upload (required) + :type file: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._upload_temp_serialize( + name=name, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def upload_temp_without_preload_content( + self, + name: Annotated[StrictStr, Field(description="filename")], + file: Annotated[Dict[str, Any], Field(description="file to upload")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Upload a file + + Upload a file to tomcat temp directory, to use it on the server (e.g. an update) + + :param name: filename (required) + :type name: str + :param file: file to upload (required) + :type file: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._upload_temp_serialize( + name=name, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UploadResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _upload_temp_serialize( + self, + name, + file, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if name is not None: + _path_params['name'] = name + # process the query parameters + # process the header parameters + # process the form parameters + if file is not None: + _form_params.append(('file', file)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/admin/v1/upload/temp/{name}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/archivev1_api.py b/edu_sharing_openapi/edu_sharing_client/api/archivev1_api.py new file mode 100644 index 00000000..c7cf0242 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/archivev1_api.py @@ -0,0 +1,1406 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.restore_results import RestoreResults +from edu_sharing_client.models.search_result import SearchResult + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class ARCHIVEV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def purge( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + archived_node_ids: Annotated[List[StrictStr], Field(description="archived node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param archived_node_ids: archived node (required) + :type archived_node_ids: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._purge_serialize( + repository=repository, + archived_node_ids=archived_node_ids, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def purge_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + archived_node_ids: Annotated[List[StrictStr], Field(description="archived node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param archived_node_ids: archived node (required) + :type archived_node_ids: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._purge_serialize( + repository=repository, + archived_node_ids=archived_node_ids, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def purge_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + archived_node_ids: Annotated[List[StrictStr], Field(description="archived node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param archived_node_ids: archived node (required) + :type archived_node_ids: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._purge_serialize( + repository=repository, + archived_node_ids=archived_node_ids, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _purge_serialize( + self, + repository, + archived_node_ids, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'archivedNodeIds': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if archived_node_ids is not None: + + _query_params.append(('archivedNodeIds', archived_node_ids)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/archive/v1/purge/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def restore( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + archived_node_ids: Annotated[List[StrictStr], Field(description="archived nodes")], + target: Annotated[Optional[StrictStr], Field(description="to target")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RestoreResults: + """restore archived nodes. + + restores archived nodes. restoreStatus can have the following values: FALLBACK_PARENT_NOT_EXISTS, FALLBACK_PARENT_NO_PERMISSION, DUPLICATENAME, FINE + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param archived_node_ids: archived nodes (required) + :type archived_node_ids: List[str] + :param target: to target + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._restore_serialize( + repository=repository, + archived_node_ids=archived_node_ids, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RestoreResults", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def restore_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + archived_node_ids: Annotated[List[StrictStr], Field(description="archived nodes")], + target: Annotated[Optional[StrictStr], Field(description="to target")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RestoreResults]: + """restore archived nodes. + + restores archived nodes. restoreStatus can have the following values: FALLBACK_PARENT_NOT_EXISTS, FALLBACK_PARENT_NO_PERMISSION, DUPLICATENAME, FINE + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param archived_node_ids: archived nodes (required) + :type archived_node_ids: List[str] + :param target: to target + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._restore_serialize( + repository=repository, + archived_node_ids=archived_node_ids, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RestoreResults", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def restore_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + archived_node_ids: Annotated[List[StrictStr], Field(description="archived nodes")], + target: Annotated[Optional[StrictStr], Field(description="to target")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """restore archived nodes. + + restores archived nodes. restoreStatus can have the following values: FALLBACK_PARENT_NOT_EXISTS, FALLBACK_PARENT_NO_PERMISSION, DUPLICATENAME, FINE + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param archived_node_ids: archived nodes (required) + :type archived_node_ids: List[str] + :param target: to target + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._restore_serialize( + repository=repository, + archived_node_ids=archived_node_ids, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RestoreResults", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _restore_serialize( + self, + repository, + archived_node_ids, + target, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'archivedNodeIds': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if archived_node_ids is not None: + + _query_params.append(('archivedNodeIds', archived_node_ids)) + + if target is not None: + + _query_params.append(('target', target)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/archive/v1/restore/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_archive( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="search pattern")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResult: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: search pattern (required) + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_archive_serialize( + repository=repository, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_archive_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="search pattern")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResult]: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: search pattern (required) + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_archive_serialize( + repository=repository, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_archive_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="search pattern")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: search pattern (required) + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_archive_serialize( + repository=repository, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_archive_serialize( + self, + repository, + pattern, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if pattern is not None: + _path_params['pattern'] = pattern + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/archive/v1/search/{repository}/{pattern}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_archive_person( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="search pattern")], + person: Annotated[StrictStr, Field(description="person")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResult: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: search pattern (required) + :type pattern: str + :param person: person (required) + :type person: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_archive_person_serialize( + repository=repository, + pattern=pattern, + person=person, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_archive_person_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="search pattern")], + person: Annotated[StrictStr, Field(description="person")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResult]: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: search pattern (required) + :type pattern: str + :param person: person (required) + :type person: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_archive_person_serialize( + repository=repository, + pattern=pattern, + person=person, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_archive_person_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="search pattern")], + person: Annotated[StrictStr, Field(description="person")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Searches for archive nodes. + + Searches for archive nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: search pattern (required) + :type pattern: str + :param person: person (required) + :type person: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_archive_person_serialize( + repository=repository, + pattern=pattern, + person=person, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_archive_person_serialize( + self, + repository, + pattern, + person, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if pattern is not None: + _path_params['pattern'] = pattern + if person is not None: + _path_params['person'] = person + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/archive/v1/search/{repository}/{pattern}/{person}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/authenticationv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/authenticationv1_api.py new file mode 100644 index 00000000..4c03b41d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/authenticationv1_api.py @@ -0,0 +1,1360 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Optional +from typing_extensions import Annotated +from edu_sharing_client.models.authentication_token import AuthenticationToken +from edu_sharing_client.models.login import Login +from edu_sharing_client.models.login_credentials import LoginCredentials +from edu_sharing_client.models.user_profile_app_auth import UserProfileAppAuth + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class AUTHENTICATIONV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def authenticate( + self, + user_id: Annotated[StrictStr, Field(description="User Id")], + user_profile_app_auth: Annotated[Optional[UserProfileAppAuth], Field(description="User Profile")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AuthenticationToken: + """authenticate user of an registered application. + + headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + + :param user_id: User Id (required) + :type user_id: str + :param user_profile_app_auth: User Profile + :type user_profile_app_auth: UserProfileAppAuth + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authenticate_serialize( + user_id=user_id, + user_profile_app_auth=user_profile_app_auth, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthenticationToken", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def authenticate_with_http_info( + self, + user_id: Annotated[StrictStr, Field(description="User Id")], + user_profile_app_auth: Annotated[Optional[UserProfileAppAuth], Field(description="User Profile")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AuthenticationToken]: + """authenticate user of an registered application. + + headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + + :param user_id: User Id (required) + :type user_id: str + :param user_profile_app_auth: User Profile + :type user_profile_app_auth: UserProfileAppAuth + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authenticate_serialize( + user_id=user_id, + user_profile_app_auth=user_profile_app_auth, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthenticationToken", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def authenticate_without_preload_content( + self, + user_id: Annotated[StrictStr, Field(description="User Id")], + user_profile_app_auth: Annotated[Optional[UserProfileAppAuth], Field(description="User Profile")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """authenticate user of an registered application. + + headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + + :param user_id: User Id (required) + :type user_id: str + :param user_profile_app_auth: User Profile + :type user_profile_app_auth: UserProfileAppAuth + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._authenticate_serialize( + user_id=user_id, + user_profile_app_auth=user_profile_app_auth, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthenticationToken", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _authenticate_serialize( + self, + user_id, + user_profile_app_auth, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if user_id is not None: + _path_params['userId'] = user_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if user_profile_app_auth is not None: + _body_params = user_profile_app_auth + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/authentication/v1/appauth/{userId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def has_access_to_scope( + self, + scope: Annotated[StrictStr, Field(description="scope")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Returns true if the current user has access to the given scope + + + :param scope: scope (required) + :type scope: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._has_access_to_scope_serialize( + scope=scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def has_access_to_scope_with_http_info( + self, + scope: Annotated[StrictStr, Field(description="scope")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Returns true if the current user has access to the given scope + + + :param scope: scope (required) + :type scope: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._has_access_to_scope_serialize( + scope=scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def has_access_to_scope_without_preload_content( + self, + scope: Annotated[StrictStr, Field(description="scope")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Returns true if the current user has access to the given scope + + + :param scope: scope (required) + :type scope: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._has_access_to_scope_serialize( + scope=scope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _has_access_to_scope_serialize( + self, + scope, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if scope is not None: + + _query_params.append(('scope', scope)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/authentication/v1/hasAccessToScope', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def login( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Login: + """Validates the Basic Auth Credentials and check if the session is a logged in user + + Use the Basic auth header field to transfer the credentials + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Login", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def login_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Login]: + """Validates the Basic Auth Credentials and check if the session is a logged in user + + Use the Basic auth header field to transfer the credentials + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Login", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def login_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Validates the Basic Auth Credentials and check if the session is a logged in user + + Use the Basic auth header field to transfer the credentials + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Login", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _login_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/authentication/v1/validateSession', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def login_to_scope( + self, + login_credentials: Annotated[LoginCredentials, Field(description="credentials, example: test,test")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Login: + """Validates the Basic Auth Credentials and check if the session is a logged in user + + Use the Basic auth header field to transfer the credentials + + :param login_credentials: credentials, example: test,test (required) + :type login_credentials: LoginCredentials + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_to_scope_serialize( + login_credentials=login_credentials, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Login", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def login_to_scope_with_http_info( + self, + login_credentials: Annotated[LoginCredentials, Field(description="credentials, example: test,test")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Login]: + """Validates the Basic Auth Credentials and check if the session is a logged in user + + Use the Basic auth header field to transfer the credentials + + :param login_credentials: credentials, example: test,test (required) + :type login_credentials: LoginCredentials + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_to_scope_serialize( + login_credentials=login_credentials, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Login", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def login_to_scope_without_preload_content( + self, + login_credentials: Annotated[LoginCredentials, Field(description="credentials, example: test,test")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Validates the Basic Auth Credentials and check if the session is a logged in user + + Use the Basic auth header field to transfer the credentials + + :param login_credentials: credentials, example: test,test (required) + :type login_credentials: LoginCredentials + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_to_scope_serialize( + login_credentials=login_credentials, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Login", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _login_to_scope_serialize( + self, + login_credentials, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if login_credentials is not None: + _body_params = login_credentials + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/authentication/v1/loginToScope', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def logout( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Destroys the current session and logout the user + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._logout_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def logout_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Destroys the current session and logout the user + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._logout_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def logout_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Destroys the current session and logout the user + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._logout_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _logout_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/authentication/v1/destroySession', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/bulkv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/bulkv1_api.py new file mode 100644 index 00000000..59a8345e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/bulkv1_api.py @@ -0,0 +1,748 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictStr +from typing import Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.node_entry import NodeEntry + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class BULKV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def find( + self, + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties that must match (with \"AND\" concatenated)")], + resolve_node: Annotated[Optional[StrictBool], Field(description="Return the full node. If you don't need the data, set to false to only return the id (will improve performance)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """gets a given node + + Get a given node based on the posted, multiple criteria. Make sure that they'll provide an unique result + + :param request_body: properties that must match (with \"AND\" concatenated) (required) + :type request_body: Dict[str, List[str]] + :param resolve_node: Return the full node. If you don't need the data, set to false to only return the id (will improve performance) + :type resolve_node: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._find_serialize( + request_body=request_body, + resolve_node=resolve_node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def find_with_http_info( + self, + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties that must match (with \"AND\" concatenated)")], + resolve_node: Annotated[Optional[StrictBool], Field(description="Return the full node. If you don't need the data, set to false to only return the id (will improve performance)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """gets a given node + + Get a given node based on the posted, multiple criteria. Make sure that they'll provide an unique result + + :param request_body: properties that must match (with \"AND\" concatenated) (required) + :type request_body: Dict[str, List[str]] + :param resolve_node: Return the full node. If you don't need the data, set to false to only return the id (will improve performance) + :type resolve_node: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._find_serialize( + request_body=request_body, + resolve_node=resolve_node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def find_without_preload_content( + self, + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties that must match (with \"AND\" concatenated)")], + resolve_node: Annotated[Optional[StrictBool], Field(description="Return the full node. If you don't need the data, set to false to only return the id (will improve performance)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """gets a given node + + Get a given node based on the posted, multiple criteria. Make sure that they'll provide an unique result + + :param request_body: properties that must match (with \"AND\" concatenated) (required) + :type request_body: Dict[str, List[str]] + :param resolve_node: Return the full node. If you don't need the data, set to false to only return the id (will improve performance) + :type resolve_node: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._find_serialize( + request_body=request_body, + resolve_node=resolve_node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _find_serialize( + self, + request_body, + resolve_node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if resolve_node is not None: + + _query_params.append(('resolveNode', resolve_node)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/bulk/v1/find', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def sync( + self, + group: Annotated[StrictStr, Field(description="The group to which this node belongs to. Used for internal structuring. Please use simple names only")], + match: Annotated[List[StrictStr], Field(description="The properties that must match to identify if this node exists. Multiple properties will be and combined and compared")], + type: Annotated[StrictStr, Field(description="type of node. If the node already exists, this will not change the type afterwards")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, they'll not get filtered via mds, so be careful what you add here")], + group_by: Annotated[Optional[List[StrictStr]], Field(description="The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created)")] = None, + aspects: Annotated[Optional[List[StrictStr]], Field(description="aspects of node")] = None, + resolve_node: Annotated[Optional[StrictBool], Field(description="Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance)")] = None, + reset_version: Annotated[Optional[StrictBool], Field(description="reset all versions (like a complete reimport), all data inside edu-sharing will be lost")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create or update a given node + + Depending on the given \"match\" properties either a new node will be created or the existing one will be updated + + :param group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) + :type group: str + :param match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) + :type match: List[str] + :param type: type of node. If the node already exists, this will not change the type afterwards (required) + :type type: str + :param request_body: properties, they'll not get filtered via mds, so be careful what you add here (required) + :type request_body: Dict[str, List[str]] + :param group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) + :type group_by: List[str] + :param aspects: aspects of node + :type aspects: List[str] + :param resolve_node: Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance) + :type resolve_node: bool + :param reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost + :type reset_version: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._sync_serialize( + group=group, + match=match, + type=type, + request_body=request_body, + group_by=group_by, + aspects=aspects, + resolve_node=resolve_node, + reset_version=reset_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def sync_with_http_info( + self, + group: Annotated[StrictStr, Field(description="The group to which this node belongs to. Used for internal structuring. Please use simple names only")], + match: Annotated[List[StrictStr], Field(description="The properties that must match to identify if this node exists. Multiple properties will be and combined and compared")], + type: Annotated[StrictStr, Field(description="type of node. If the node already exists, this will not change the type afterwards")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, they'll not get filtered via mds, so be careful what you add here")], + group_by: Annotated[Optional[List[StrictStr]], Field(description="The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created)")] = None, + aspects: Annotated[Optional[List[StrictStr]], Field(description="aspects of node")] = None, + resolve_node: Annotated[Optional[StrictBool], Field(description="Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance)")] = None, + reset_version: Annotated[Optional[StrictBool], Field(description="reset all versions (like a complete reimport), all data inside edu-sharing will be lost")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create or update a given node + + Depending on the given \"match\" properties either a new node will be created or the existing one will be updated + + :param group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) + :type group: str + :param match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) + :type match: List[str] + :param type: type of node. If the node already exists, this will not change the type afterwards (required) + :type type: str + :param request_body: properties, they'll not get filtered via mds, so be careful what you add here (required) + :type request_body: Dict[str, List[str]] + :param group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) + :type group_by: List[str] + :param aspects: aspects of node + :type aspects: List[str] + :param resolve_node: Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance) + :type resolve_node: bool + :param reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost + :type reset_version: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._sync_serialize( + group=group, + match=match, + type=type, + request_body=request_body, + group_by=group_by, + aspects=aspects, + resolve_node=resolve_node, + reset_version=reset_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def sync_without_preload_content( + self, + group: Annotated[StrictStr, Field(description="The group to which this node belongs to. Used for internal structuring. Please use simple names only")], + match: Annotated[List[StrictStr], Field(description="The properties that must match to identify if this node exists. Multiple properties will be and combined and compared")], + type: Annotated[StrictStr, Field(description="type of node. If the node already exists, this will not change the type afterwards")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, they'll not get filtered via mds, so be careful what you add here")], + group_by: Annotated[Optional[List[StrictStr]], Field(description="The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created)")] = None, + aspects: Annotated[Optional[List[StrictStr]], Field(description="aspects of node")] = None, + resolve_node: Annotated[Optional[StrictBool], Field(description="Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance)")] = None, + reset_version: Annotated[Optional[StrictBool], Field(description="reset all versions (like a complete reimport), all data inside edu-sharing will be lost")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create or update a given node + + Depending on the given \"match\" properties either a new node will be created or the existing one will be updated + + :param group: The group to which this node belongs to. Used for internal structuring. Please use simple names only (required) + :type group: str + :param match: The properties that must match to identify if this node exists. Multiple properties will be and combined and compared (required) + :type match: List[str] + :param type: type of node. If the node already exists, this will not change the type afterwards (required) + :type type: str + :param request_body: properties, they'll not get filtered via mds, so be careful what you add here (required) + :type request_body: Dict[str, List[str]] + :param group_by: The properties on which the imported nodes should be grouped (for each value, a folder with the corresponding data is created) + :type group_by: List[str] + :param aspects: aspects of node + :type aspects: List[str] + :param resolve_node: Return the generated or updated node. If you don't need the data, set to false to only return the id (will improve performance) + :type resolve_node: bool + :param reset_version: reset all versions (like a complete reimport), all data inside edu-sharing will be lost + :type reset_version: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._sync_serialize( + group=group, + match=match, + type=type, + request_body=request_body, + group_by=group_by, + aspects=aspects, + resolve_node=resolve_node, + reset_version=reset_version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _sync_serialize( + self, + group, + match, + type, + request_body, + group_by, + aspects, + resolve_node, + reset_version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'match': 'multi', + 'groupBy': 'multi', + 'aspects': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if group is not None: + _path_params['group'] = group + # process the query parameters + if match is not None: + + _query_params.append(('match', match)) + + if group_by is not None: + + _query_params.append(('groupBy', group_by)) + + if type is not None: + + _query_params.append(('type', type)) + + if aspects is not None: + + _query_params.append(('aspects', aspects)) + + if resolve_node is not None: + + _query_params.append(('resolveNode', resolve_node)) + + if reset_version is not None: + + _query_params.append(('resetVersion', reset_version)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/bulk/v1/sync/{group}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/clientutilsv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/clientutilsv1_api.py new file mode 100644 index 00000000..3203c18e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/clientutilsv1_api.py @@ -0,0 +1,311 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Optional +from typing_extensions import Annotated +from edu_sharing_client.models.website_information import WebsiteInformation + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class CLIENTUTILSV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_website_information( + self, + url: Annotated[Optional[StrictStr], Field(description="full url with http or https")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> WebsiteInformation: + """Read generic information about a webpage + + + :param url: full url with http or https + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_website_information_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "WebsiteInformation", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_website_information_with_http_info( + self, + url: Annotated[Optional[StrictStr], Field(description="full url with http or https")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[WebsiteInformation]: + """Read generic information about a webpage + + + :param url: full url with http or https + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_website_information_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "WebsiteInformation", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_website_information_without_preload_content( + self, + url: Annotated[Optional[StrictStr], Field(description="full url with http or https")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Read generic information about a webpage + + + :param url: full url with http or https + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_website_information_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "WebsiteInformation", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_website_information_serialize( + self, + url, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if url is not None: + + _query_params.append(('url', url)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/clientUtils/v1/getWebsiteInformation', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/collectionv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/collectionv1_api.py new file mode 100644 index 00000000..f4ba5b02 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/collectionv1_api.py @@ -0,0 +1,4989 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import Any, Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.abstract_entries import AbstractEntries +from edu_sharing_client.models.collection_entries import CollectionEntries +from edu_sharing_client.models.collection_entry import CollectionEntry +from edu_sharing_client.models.collection_proposal_entries import CollectionProposalEntries +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.models.reference_entries import ReferenceEntries + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class COLLECTIONV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_to_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[StrictStr, Field(description="ID of node")], + source_repo: Annotated[Optional[StrictStr], Field(description="ID of source repository")] = None, + allow_duplicate: Annotated[Optional[StrictBool], Field(description="Allow that a node that already is inside the collection can be added again")] = None, + as_proposal: Annotated[Optional[StrictBool], Field(description="Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Add a node to a collection. + + Add a node to a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: ID of node (required) + :type node: str + :param source_repo: ID of source repository + :type source_repo: str + :param allow_duplicate: Allow that a node that already is inside the collection can be added again + :type allow_duplicate: bool + :param as_proposal: Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions + :type as_proposal: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_to_collection_serialize( + repository=repository, + collection=collection, + node=node, + source_repo=source_repo, + allow_duplicate=allow_duplicate, + as_proposal=as_proposal, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_to_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[StrictStr, Field(description="ID of node")], + source_repo: Annotated[Optional[StrictStr], Field(description="ID of source repository")] = None, + allow_duplicate: Annotated[Optional[StrictBool], Field(description="Allow that a node that already is inside the collection can be added again")] = None, + as_proposal: Annotated[Optional[StrictBool], Field(description="Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Add a node to a collection. + + Add a node to a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: ID of node (required) + :type node: str + :param source_repo: ID of source repository + :type source_repo: str + :param allow_duplicate: Allow that a node that already is inside the collection can be added again + :type allow_duplicate: bool + :param as_proposal: Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions + :type as_proposal: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_to_collection_serialize( + repository=repository, + collection=collection, + node=node, + source_repo=source_repo, + allow_duplicate=allow_duplicate, + as_proposal=as_proposal, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_to_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[StrictStr, Field(description="ID of node")], + source_repo: Annotated[Optional[StrictStr], Field(description="ID of source repository")] = None, + allow_duplicate: Annotated[Optional[StrictBool], Field(description="Allow that a node that already is inside the collection can be added again")] = None, + as_proposal: Annotated[Optional[StrictBool], Field(description="Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add a node to a collection. + + Add a node to a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: ID of node (required) + :type node: str + :param source_repo: ID of source repository + :type source_repo: str + :param allow_duplicate: Allow that a node that already is inside the collection can be added again + :type allow_duplicate: bool + :param as_proposal: Mark this node only as a proposal (not really adding but just marking it). This can also be used for collections where you don't have permissions + :type as_proposal: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_to_collection_serialize( + repository=repository, + collection=collection, + node=node, + source_repo=source_repo, + allow_duplicate=allow_duplicate, + as_proposal=as_proposal, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_to_collection_serialize( + self, + repository, + collection, + node, + source_repo, + allow_duplicate, + as_proposal, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + if node is not None: + _path_params['node'] = node + # process the query parameters + if source_repo is not None: + + _query_params.append(('sourceRepo', source_repo)) + + if allow_duplicate is not None: + + _query_params.append(('allowDuplicate', allow_duplicate)) + + if as_proposal is not None: + + _query_params.append(('asProposal', as_proposal)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/collection/v1/collections/{repository}/{collection}/references/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_icon_of_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + file: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionEntry: + """Writes Preview Image of a collection. + + Writes Preview Image of a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param file: + :type file: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_icon_of_collection_serialize( + repository=repository, + collection=collection, + mimetype=mimetype, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_icon_of_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + file: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionEntry]: + """Writes Preview Image of a collection. + + Writes Preview Image of a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param file: + :type file: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_icon_of_collection_serialize( + repository=repository, + collection=collection, + mimetype=mimetype, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_icon_of_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + file: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Writes Preview Image of a collection. + + Writes Preview Image of a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param file: + :type file: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_icon_of_collection_serialize( + repository=repository, + collection=collection, + mimetype=mimetype, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_icon_of_collection_serialize( + self, + repository, + collection, + mimetype, + file, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + if mimetype is not None: + + _query_params.append(('mimetype', mimetype)) + + # process the header parameters + # process the form parameters + if file is not None: + _form_params.append(('file', file)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/collection/v1/collections/{repository}/{collection}/icon', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection (or \"-root-\" for level0 collections)")], + node: Annotated[Node, Field(description="collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionEntry: + """Create a new collection. + + Create a new collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (or \"-root-\" for level0 collections) (required) + :type collection: str + :param node: collection (required) + :type node: Node + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection (or \"-root-\" for level0 collections)")], + node: Annotated[Node, Field(description="collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionEntry]: + """Create a new collection. + + Create a new collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (or \"-root-\" for level0 collections) (required) + :type collection: str + :param node: collection (required) + :type node: Node + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection (or \"-root-\" for level0 collections)")], + node: Annotated[Node, Field(description="collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new collection. + + Create a new collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (or \"-root-\" for level0 collections) (required) + :type collection: str + :param node: collection (required) + :type node: Node + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_collection_serialize( + self, + repository, + collection, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if node is not None: + _body_params = node + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/collection/v1/collections/{repository}/{collection}/children', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a collection. + + Delete a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_collection_serialize( + repository=repository, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete a collection. + + Delete a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_collection_serialize( + repository=repository, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a collection. + + Delete a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_collection_serialize( + repository=repository, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_collection_serialize( + self, + repository, + collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/collection/v1/collections/{repository}/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_from_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a node from a collection. + + Delete a node from a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_from_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_from_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete a node from a collection. + + Delete a node from a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_from_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_from_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a node from a collection. + + Delete a node from a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_from_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_from_collection_serialize( + self, + repository, + collection, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/collection/v1/collections/{repository}/{collection}/references/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection_id: Annotated[StrictStr, Field(description="ID of collection")], + track: Annotated[Optional[StrictBool], Field(description="track this as a view of the collection (default: true)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionEntry: + """Get a collection. + + Get a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection_id: ID of collection (required) + :type collection_id: str + :param track: track this as a view of the collection (default: true) + :type track: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collection_serialize( + repository=repository, + collection_id=collection_id, + track=track, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection_id: Annotated[StrictStr, Field(description="ID of collection")], + track: Annotated[Optional[StrictBool], Field(description="track this as a view of the collection (default: true)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionEntry]: + """Get a collection. + + Get a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection_id: ID of collection (required) + :type collection_id: str + :param track: track this as a view of the collection (default: true) + :type track: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collection_serialize( + repository=repository, + collection_id=collection_id, + track=track, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection_id: Annotated[StrictStr, Field(description="ID of collection")], + track: Annotated[Optional[StrictBool], Field(description="track this as a view of the collection (default: true)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a collection. + + Get a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection_id: ID of collection (required) + :type collection_id: str + :param track: track this as a view of the collection (default: true) + :type track: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collection_serialize( + repository=repository, + collection_id=collection_id, + track=track, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_collection_serialize( + self, + repository, + collection_id, + track, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection_id is not None: + _path_params['collectionId'] = collection_id + # process the query parameters + if track is not None: + + _query_params.append(('track', track)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/collection/v1/collections/{repository}/{collectionId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_collections_containing_proposals( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + status: Annotated[Optional[StrictStr], Field(description="status of the proposals to search for")] = None, + fetch_counts: Annotated[Optional[StrictBool], Field(description="fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionProposalEntries: + """Get all collections containing proposals with a given state (via search index) + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param status: status of the proposals to search for + :type status: str + :param fetch_counts: fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data + :type fetch_counts: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_containing_proposals_serialize( + repository=repository, + status=status, + fetch_counts=fetch_counts, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionProposalEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_collections_containing_proposals_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + status: Annotated[Optional[StrictStr], Field(description="status of the proposals to search for")] = None, + fetch_counts: Annotated[Optional[StrictBool], Field(description="fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionProposalEntries]: + """Get all collections containing proposals with a given state (via search index) + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param status: status of the proposals to search for + :type status: str + :param fetch_counts: fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data + :type fetch_counts: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_containing_proposals_serialize( + repository=repository, + status=status, + fetch_counts=fetch_counts, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionProposalEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_collections_containing_proposals_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + status: Annotated[Optional[StrictStr], Field(description="status of the proposals to search for")] = None, + fetch_counts: Annotated[Optional[StrictBool], Field(description="fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all collections containing proposals with a given state (via search index) + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param status: status of the proposals to search for + :type status: str + :param fetch_counts: fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data + :type fetch_counts: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_containing_proposals_serialize( + repository=repository, + status=status, + fetch_counts=fetch_counts, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionProposalEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_collections_containing_proposals_serialize( + self, + repository, + status, + fetch_counts, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if status is not None: + + _query_params.append(('status', status)) + + if fetch_counts is not None: + + _query_params.append(('fetchCounts', fetch_counts)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/collection/v1/collections/{repository}/children/proposals/collections', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_collections_proposals( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection")], + status: Annotated[StrictStr, Field(description="Only show elements with given status")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AbstractEntries: + """Get proposed objects for collection (requires edit permissions on collection). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (required) + :type collection: str + :param status: Only show elements with given status (required) + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_proposals_serialize( + repository=repository, + collection=collection, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AbstractEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_collections_proposals_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection")], + status: Annotated[StrictStr, Field(description="Only show elements with given status")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AbstractEntries]: + """Get proposed objects for collection (requires edit permissions on collection). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (required) + :type collection: str + :param status: Only show elements with given status (required) + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_proposals_serialize( + repository=repository, + collection=collection, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AbstractEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_collections_proposals_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection")], + status: Annotated[StrictStr, Field(description="Only show elements with given status")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get proposed objects for collection (requires edit permissions on collection). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (required) + :type collection: str + :param status: Only show elements with given status (required) + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_proposals_serialize( + repository=repository, + collection=collection, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AbstractEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_collections_proposals_serialize( + self, + repository, + collection, + status, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + if status is not None: + + _query_params.append(('status', status)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/collection/v1/collections/{repository}/{collection}/children/proposals', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_collections_references( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ReferenceEntries: + """Get references objects for collection. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (required) + :type collection: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_references_serialize( + repository=repository, + collection=collection, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReferenceEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_collections_references_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ReferenceEntries]: + """Get references objects for collection. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (required) + :type collection: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_references_serialize( + repository=repository, + collection=collection, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReferenceEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_collections_references_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get references objects for collection. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (required) + :type collection: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_references_serialize( + repository=repository, + collection=collection, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ReferenceEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_collections_references_serialize( + self, + repository, + collection, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/collection/v1/collections/{repository}/{collection}/children/references', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_collections_subcollections( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection (or \"-root-\" for level0 collections)")], + scope: Annotated[StrictStr, Field(description="scope (only relevant if parent == -root-)")], + fetch_counts: Annotated[Optional[StrictBool], Field(description="fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionEntries: + """Get child collections for collection (or root). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (or \"-root-\" for level0 collections) (required) + :type collection: str + :param scope: scope (only relevant if parent == -root-) (required) + :type scope: str + :param fetch_counts: fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data + :type fetch_counts: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_subcollections_serialize( + repository=repository, + collection=collection, + scope=scope, + fetch_counts=fetch_counts, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_collections_subcollections_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection (or \"-root-\" for level0 collections)")], + scope: Annotated[StrictStr, Field(description="scope (only relevant if parent == -root-)")], + fetch_counts: Annotated[Optional[StrictBool], Field(description="fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionEntries]: + """Get child collections for collection (or root). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (or \"-root-\" for level0 collections) (required) + :type collection: str + :param scope: scope (only relevant if parent == -root-) (required) + :type scope: str + :param fetch_counts: fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data + :type fetch_counts: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_subcollections_serialize( + repository=repository, + collection=collection, + scope=scope, + fetch_counts=fetch_counts, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_collections_subcollections_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of parent collection (or \"-root-\" for level0 collections)")], + scope: Annotated[StrictStr, Field(description="scope (only relevant if parent == -root-)")], + fetch_counts: Annotated[Optional[StrictBool], Field(description="fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get child collections for collection (or root). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of parent collection (or \"-root-\" for level0 collections) (required) + :type collection: str + :param scope: scope (only relevant if parent == -root-) (required) + :type scope: str + :param fetch_counts: fetch counts of collections (materials and subcollections). This parameter will decrease performance so only enable if if you need this data + :type fetch_counts: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_collections_subcollections_serialize( + repository=repository, + collection=collection, + scope=scope, + fetch_counts=fetch_counts, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_collections_subcollections_serialize( + self, + repository, + collection, + scope, + fetch_counts, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + if scope is not None: + + _query_params.append(('scope', scope)) + + if fetch_counts is not None: + + _query_params.append(('fetchCounts', fetch_counts)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/collection/v1/collections/{repository}/{collection}/children/collections', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_icon_of_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Deletes Preview Image of a collection. + + Deletes Preview Image of a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_icon_of_collection_serialize( + repository=repository, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_icon_of_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Deletes Preview Image of a collection. + + Deletes Preview Image of a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_icon_of_collection_serialize( + repository=repository, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_icon_of_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Deletes Preview Image of a collection. + + Deletes Preview Image of a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_icon_of_collection_serialize( + repository=repository, + collection=collection, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_icon_of_collection_serialize( + self, + repository, + collection, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/collection/v1/collections/{repository}/{collection}/icon', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_collections( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + query: Annotated[StrictStr, Field(description="query string")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> CollectionEntries: + """(Deprecated) Search collections. + + Search collections. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param query: query string (required) + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + warnings.warn("GET /collection/v1/collections/{repository}/search is deprecated.", DeprecationWarning) + + _param = self._search_collections_serialize( + repository=repository, + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_collections_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + query: Annotated[StrictStr, Field(description="query string")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[CollectionEntries]: + """(Deprecated) Search collections. + + Search collections. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param query: query string (required) + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + warnings.warn("GET /collection/v1/collections/{repository}/search is deprecated.", DeprecationWarning) + + _param = self._search_collections_serialize( + repository=repository, + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_collections_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + query: Annotated[StrictStr, Field(description="query string")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """(Deprecated) Search collections. + + Search collections. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param query: query string (required) + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + warnings.warn("GET /collection/v1/collections/{repository}/search is deprecated.", DeprecationWarning) + + _param = self._search_collections_serialize( + repository=repository, + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "CollectionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_collections_serialize( + self, + repository, + query, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if query is not None: + + _query_params.append(('query', query)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/collection/v1/collections/{repository}/search', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_collection_order( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + request_body: Annotated[Optional[List[StrictStr]], Field(description="List of nodes in the order to be saved. If empty, custom order of the collection will be disabled")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection + + Current order will be overriden. Requires full permissions for the parent collection + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param request_body: List of nodes in the order to be saved. If empty, custom order of the collection will be disabled + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_collection_order_serialize( + repository=repository, + collection=collection, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_collection_order_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + request_body: Annotated[Optional[List[StrictStr]], Field(description="List of nodes in the order to be saved. If empty, custom order of the collection will be disabled")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection + + Current order will be overriden. Requires full permissions for the parent collection + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param request_body: List of nodes in the order to be saved. If empty, custom order of the collection will be disabled + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_collection_order_serialize( + repository=repository, + collection=collection, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_collection_order_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + request_body: Annotated[Optional[List[StrictStr]], Field(description="List of nodes in the order to be saved. If empty, custom order of the collection will be disabled")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection + + Current order will be overriden. Requires full permissions for the parent collection + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param request_body: List of nodes in the order to be saved. If empty, custom order of the collection will be disabled + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_collection_order_serialize( + repository=repository, + collection=collection, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_collection_order_serialize( + self, + repository, + collection, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'request_body': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/collection/v1/collections/{repository}/{collection}/order', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_pinned_collections( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + request_body: Annotated[List[StrictStr], Field(description="List of collections that should be pinned")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set pinned collections. + + Remove all currently pinned collections and set them in the order send. Requires TOOLPERMISSION_COLLECTION_PINNING + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param request_body: List of collections that should be pinned (required) + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_pinned_collections_serialize( + repository=repository, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_pinned_collections_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + request_body: Annotated[List[StrictStr], Field(description="List of collections that should be pinned")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set pinned collections. + + Remove all currently pinned collections and set them in the order send. Requires TOOLPERMISSION_COLLECTION_PINNING + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param request_body: List of collections that should be pinned (required) + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_pinned_collections_serialize( + repository=repository, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_pinned_collections_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + request_body: Annotated[List[StrictStr], Field(description="List of collections that should be pinned")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set pinned collections. + + Remove all currently pinned collections and set them in the order send. Requires TOOLPERMISSION_COLLECTION_PINNING + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param request_body: List of collections that should be pinned (required) + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_pinned_collections_serialize( + repository=repository, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_pinned_collections_serialize( + self, + repository, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'request_body': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/collection/v1/collections/{repository}/pinning', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_collection( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[Node, Field(description="collection node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update a collection. + + Update a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: collection node (required) + :type node: Node + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_collection_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[Node, Field(description="collection node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Update a collection. + + Update a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: collection node (required) + :type node: Node + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_collection_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + collection: Annotated[StrictStr, Field(description="ID of collection")], + node: Annotated[Node, Field(description="collection node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a collection. + + Update a collection. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param collection: ID of collection (required) + :type collection: str + :param node: collection node (required) + :type node: Node + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_collection_serialize( + repository=repository, + collection=collection, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_collection_serialize( + self, + repository, + collection, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if collection is not None: + _path_params['collection'] = collection + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if node is not None: + _body_params = node + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/collection/v1/collections/{repository}/{collection}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/commentv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/commentv1_api.py new file mode 100644 index 00000000..a5fa3ef3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/commentv1_api.py @@ -0,0 +1,1264 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Optional +from typing_extensions import Annotated +from edu_sharing_client.models.comments import Comments + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class COMMENTV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_comment( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + body: Annotated[StrictStr, Field(description="Text content of comment")], + comment_reference: Annotated[Optional[StrictStr], Field(description="In reply to an other comment, can be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """create a new comment + + Adds a comment to the given node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param body: Text content of comment (required) + :type body: str + :param comment_reference: In reply to an other comment, can be null + :type comment_reference: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_comment_serialize( + repository=repository, + node=node, + body=body, + comment_reference=comment_reference, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_comment_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + body: Annotated[StrictStr, Field(description="Text content of comment")], + comment_reference: Annotated[Optional[StrictStr], Field(description="In reply to an other comment, can be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """create a new comment + + Adds a comment to the given node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param body: Text content of comment (required) + :type body: str + :param comment_reference: In reply to an other comment, can be null + :type comment_reference: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_comment_serialize( + repository=repository, + node=node, + body=body, + comment_reference=comment_reference, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_comment_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + body: Annotated[StrictStr, Field(description="Text content of comment")], + comment_reference: Annotated[Optional[StrictStr], Field(description="In reply to an other comment, can be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create a new comment + + Adds a comment to the given node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param body: Text content of comment (required) + :type body: str + :param comment_reference: In reply to an other comment, can be null + :type comment_reference: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_comment_serialize( + repository=repository, + node=node, + body=body, + comment_reference=comment_reference, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_comment_serialize( + self, + repository, + node, + body, + comment_reference, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if comment_reference is not None: + + _query_params.append(('commentReference', comment_reference)) + + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/comment/v1/comments/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_comment( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + comment: Annotated[StrictStr, Field(description="id of the comment to delete")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """delete a comment + + Delete the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param comment: id of the comment to delete (required) + :type comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_comment_serialize( + repository=repository, + comment=comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_comment_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + comment: Annotated[StrictStr, Field(description="id of the comment to delete")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """delete a comment + + Delete the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param comment: id of the comment to delete (required) + :type comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_comment_serialize( + repository=repository, + comment=comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_comment_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + comment: Annotated[StrictStr, Field(description="id of the comment to delete")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete a comment + + Delete the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param comment: id of the comment to delete (required) + :type comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_comment_serialize( + repository=repository, + comment=comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_comment_serialize( + self, + repository, + comment, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if comment is not None: + _path_params['comment'] = comment + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/comment/v1/comments/{repository}/{comment}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def edit_comment( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + comment: Annotated[StrictStr, Field(description="id of the comment to edit")], + body: Annotated[StrictStr, Field(description="Text content of comment")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """edit a comment + + Edit the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param comment: id of the comment to edit (required) + :type comment: str + :param body: Text content of comment (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._edit_comment_serialize( + repository=repository, + comment=comment, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def edit_comment_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + comment: Annotated[StrictStr, Field(description="id of the comment to edit")], + body: Annotated[StrictStr, Field(description="Text content of comment")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """edit a comment + + Edit the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param comment: id of the comment to edit (required) + :type comment: str + :param body: Text content of comment (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._edit_comment_serialize( + repository=repository, + comment=comment, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def edit_comment_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + comment: Annotated[StrictStr, Field(description="id of the comment to edit")], + body: Annotated[StrictStr, Field(description="Text content of comment")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """edit a comment + + Edit the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param comment: id of the comment to edit (required) + :type comment: str + :param body: Text content of comment (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._edit_comment_serialize( + repository=repository, + comment=comment, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _edit_comment_serialize( + self, + repository, + comment, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if comment is not None: + _path_params['comment'] = comment + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/comment/v1/comments/{repository}/{comment}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_comments( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Comments: + """list comments + + List all comments + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_comments_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Comments", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_comments_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Comments]: + """list comments + + List all comments + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_comments_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Comments", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_comments_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list comments + + List all comments + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_comments_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Comments", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_comments_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/comment/v1/comments/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/configv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/configv1_api.py new file mode 100644 index 00000000..ef9f036a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/configv1_api.py @@ -0,0 +1,1658 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictStr +from typing_extensions import Annotated +from edu_sharing_client.models.config import Config +from edu_sharing_client.models.dynamic_config import DynamicConfig +from edu_sharing_client.models.language import Language +from edu_sharing_client.models.variables import Variables + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class CONFIGV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_config1( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Config: + """get repository config values + + Current is the actual (context-based) active config. Global is the default global config if no context is active (may be identical to the current) + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config1_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Config", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_config1_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Config]: + """get repository config values + + Current is the actual (context-based) active config. Global is the default global config if no context is active (may be identical to the current) + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config1_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Config", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_config1_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get repository config values + + Current is the actual (context-based) active config. Global is the default global config if no context is active (may be identical to the current) + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config1_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Config", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_config1_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/config/v1/values', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dynamic_value( + self, + key: Annotated[StrictStr, Field(description="Key of the config value that should be fetched")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DynamicConfig: + """Get a config entry (appropriate rights for the entry are required) + + + :param key: Key of the config value that should be fetched (required) + :type key: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dynamic_value_serialize( + key=key, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dynamic_value_with_http_info( + self, + key: Annotated[StrictStr, Field(description="Key of the config value that should be fetched")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DynamicConfig]: + """Get a config entry (appropriate rights for the entry are required) + + + :param key: Key of the config value that should be fetched (required) + :type key: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dynamic_value_serialize( + key=key, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dynamic_value_without_preload_content( + self, + key: Annotated[StrictStr, Field(description="Key of the config value that should be fetched")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a config entry (appropriate rights for the entry are required) + + + :param key: Key of the config value that should be fetched (required) + :type key: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dynamic_value_serialize( + key=key, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dynamic_value_serialize( + self, + key, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if key is not None: + _path_params['key'] = key + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/config/v1/dynamic/{key}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_language( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Language: + """get override strings for the current language + + Language strings + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_language_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Language", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_language_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Language]: + """get override strings for the current language + + Language strings + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_language_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Language", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_language_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get override strings for the current language + + Language strings + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_language_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Language", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_language_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/config/v1/language', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_language_defaults( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get all inital language strings for angular + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_language_defaults_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_language_defaults_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get all inital language strings for angular + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_language_defaults_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_language_defaults_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get all inital language strings for angular + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_language_defaults_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_language_defaults_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/config/v1/language/defaults', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_variables( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Variables: + """get global config variables + + global config variables + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_variables_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Variables", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_variables_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Variables]: + """get global config variables + + global config variables + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_variables_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Variables", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_variables_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get global config variables + + global config variables + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_variables_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Variables", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_variables_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/config/v1/variables', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_dynamic_value( + self, + key: Annotated[StrictStr, Field(description="Key of the config value that should be fetched")], + public: Annotated[StrictBool, Field(description="Is everyone allowed to read the value")], + body: Annotated[StrictStr, Field(description="Must be a json-encapsulated string")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DynamicConfig: + """Set a config entry (admin rights required) + + the body must be a json encapsulated string + + :param key: Key of the config value that should be fetched (required) + :type key: str + :param public: Is everyone allowed to read the value (required) + :type public: bool + :param body: Must be a json-encapsulated string (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_dynamic_value_serialize( + key=key, + public=public, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_dynamic_value_with_http_info( + self, + key: Annotated[StrictStr, Field(description="Key of the config value that should be fetched")], + public: Annotated[StrictBool, Field(description="Is everyone allowed to read the value")], + body: Annotated[StrictStr, Field(description="Must be a json-encapsulated string")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DynamicConfig]: + """Set a config entry (admin rights required) + + the body must be a json encapsulated string + + :param key: Key of the config value that should be fetched (required) + :type key: str + :param public: Is everyone allowed to read the value (required) + :type public: bool + :param body: Must be a json-encapsulated string (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_dynamic_value_serialize( + key=key, + public=public, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_dynamic_value_without_preload_content( + self, + key: Annotated[StrictStr, Field(description="Key of the config value that should be fetched")], + public: Annotated[StrictBool, Field(description="Is everyone allowed to read the value")], + body: Annotated[StrictStr, Field(description="Must be a json-encapsulated string")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set a config entry (admin rights required) + + the body must be a json encapsulated string + + :param key: Key of the config value that should be fetched (required) + :type key: str + :param public: Is everyone allowed to read the value (required) + :type public: bool + :param body: Must be a json-encapsulated string (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_dynamic_value_serialize( + key=key, + public=public, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicConfig", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_dynamic_value_serialize( + self, + key, + public, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if key is not None: + _path_params['key'] = key + # process the query parameters + if public is not None: + + _query_params.append(('public', public)) + + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/config/v1/dynamic/{key}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/connectorv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/connectorv1_api.py new file mode 100644 index 00000000..caf6bb83 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/connectorv1_api.py @@ -0,0 +1,308 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from edu_sharing_client.models.connector_list import ConnectorList + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class CONNECTORV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def list_connectors( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ConnectorList: + """List all available connectors + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_connectors_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ConnectorList", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def list_connectors_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ConnectorList]: + """List all available connectors + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_connectors_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ConnectorList", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def list_connectors_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List all available connectors + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._list_connectors_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ConnectorList", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _list_connectors_serialize( + self, + repository, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/connector/v1/connectors/{repository}/list', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/feedbackv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/feedbackv1_api.py new file mode 100644 index 00000000..c052b51a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/feedbackv1_api.py @@ -0,0 +1,644 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Dict, List +from typing_extensions import Annotated +from edu_sharing_client.models.feedback_data import FeedbackData +from edu_sharing_client.models.feedback_result import FeedbackResult + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class FEEDBACKV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_feedback( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="feedback data, key/value pairs")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> FeedbackResult: + """Give feedback on a node + + Adds feedback to the given node. Depending on the internal config, the current user will be obscured to prevent back-tracing to the original id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: feedback data, key/value pairs (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_feedback_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "FeedbackResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_feedback_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="feedback data, key/value pairs")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[FeedbackResult]: + """Give feedback on a node + + Adds feedback to the given node. Depending on the internal config, the current user will be obscured to prevent back-tracing to the original id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: feedback data, key/value pairs (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_feedback_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "FeedbackResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_feedback_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="feedback data, key/value pairs")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Give feedback on a node + + Adds feedback to the given node. Depending on the internal config, the current user will be obscured to prevent back-tracing to the original id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: feedback data, key/value pairs (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_feedback_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "FeedbackResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_feedback_serialize( + self, + repository, + node, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/feedback/v1/feedback/{repository}/{node}/add', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_feedbacks( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> List[FeedbackData]: + """Get given feedback on a node + + Get all given feedback for a node. Requires Coordinator permissions on node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_feedbacks_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[FeedbackData]", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_feedbacks_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[List[FeedbackData]]: + """Get given feedback on a node + + Get all given feedback for a node. Requires Coordinator permissions on node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_feedbacks_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[FeedbackData]", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_feedbacks_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get given feedback on a node + + Get all given feedback for a node. Requires Coordinator permissions on node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_feedbacks_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "List[FeedbackData]", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_feedbacks_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/feedback/v1/feedback/{repository}/{node}/list', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/iamv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/iamv1_api.py new file mode 100644 index 00000000..9aa95e7e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/iamv1_api.py @@ -0,0 +1,10890 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import Any, Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.models.group import Group +from edu_sharing_client.models.group_entries import GroupEntries +from edu_sharing_client.models.group_entry import GroupEntry +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.group_signup_details import GroupSignupDetails +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.models.preferences import Preferences +from edu_sharing_client.models.profile_settings import ProfileSettings +from edu_sharing_client.models.user import User +from edu_sharing_client.models.user_credential import UserCredential +from edu_sharing_client.models.user_entries import UserEntries +from edu_sharing_client.models.user_entry import UserEntry +from edu_sharing_client.models.user_profile_edit import UserProfileEdit +from edu_sharing_client.models.user_stats import UserStats + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class IAMV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_membership( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add member to the group. + + Add member to the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_membership_serialize( + repository=repository, + group=group, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_membership_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Add member to the group. + + Add member to the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_membership_serialize( + repository=repository, + group=group, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_membership_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add member to the group. + + Add member to the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_membership_serialize( + repository=repository, + group=group, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_membership_serialize( + self, + repository, + group, + member, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + if member is not None: + _path_params['member'] = member + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/groups/{repository}/{group}/members/{member}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def add_node_list( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name. If this list does not exist, it will be created")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add a node to node a list of a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name. If this list does not exist, it will be created (required) + :type list: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_node_list_serialize( + repository=repository, + person=person, + list=list, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_node_list_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name. If this list does not exist, it will be created")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Add a node to node a list of a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name. If this list does not exist, it will be created (required) + :type list: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_node_list_serialize( + repository=repository, + person=person, + list=list, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_node_list_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name. If this list does not exist, it will be created")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add a node to node a list of a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name. If this list does not exist, it will be created (required) + :type list: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_node_list_serialize( + repository=repository, + person=person, + list=list, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_node_list_serialize( + self, + repository, + person, + list, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + if list is not None: + _path_params['list'] = list + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/nodeList/{list}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_group_profile( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + group_profile: Annotated[GroupProfile, Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set profile of the group. + + Set profile of the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param group_profile: properties (required) + :type group_profile: GroupProfile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_group_profile_serialize( + repository=repository, + group=group, + group_profile=group_profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_group_profile_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + group_profile: Annotated[GroupProfile, Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set profile of the group. + + Set profile of the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param group_profile: properties (required) + :type group_profile: GroupProfile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_group_profile_serialize( + repository=repository, + group=group, + group_profile=group_profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_group_profile_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + group_profile: Annotated[GroupProfile, Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set profile of the group. + + Set profile of the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param group_profile: properties (required) + :type group_profile: GroupProfile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_group_profile_serialize( + repository=repository, + group=group, + group_profile=group_profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_group_profile_serialize( + self, + repository, + group, + group_profile, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if group_profile is not None: + _body_params = group_profile + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/groups/{repository}/{group}/profile', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_user_avatar( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + avatar: Annotated[Dict[str, Any], Field(description="avatar image")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set avatar of the user. + + Set avatar of the user. (To set foreign avatars, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param avatar: avatar image (required) + :type avatar: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_avatar_serialize( + repository=repository, + person=person, + avatar=avatar, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_user_avatar_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + avatar: Annotated[Dict[str, Any], Field(description="avatar image")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set avatar of the user. + + Set avatar of the user. (To set foreign avatars, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param avatar: avatar image (required) + :type avatar: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_avatar_serialize( + repository=repository, + person=person, + avatar=avatar, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_user_avatar_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + avatar: Annotated[Dict[str, Any], Field(description="avatar image")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set avatar of the user. + + Set avatar of the user. (To set foreign avatars, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param avatar: avatar image (required) + :type avatar: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_avatar_serialize( + repository=repository, + person=person, + avatar=avatar, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_user_avatar_serialize( + self, + repository, + person, + avatar, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + if avatar is not None: + _form_params.append(('avatar', avatar)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/avatar', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_user_password( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + user_credential: Annotated[UserCredential, Field(description="credential")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Change/Set password of the user. + + Change/Set password of the user. (To change foreign passwords or set passwords, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param user_credential: credential (required) + :type user_credential: UserCredential + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_password_serialize( + repository=repository, + person=person, + user_credential=user_credential, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_user_password_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + user_credential: Annotated[UserCredential, Field(description="credential")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Change/Set password of the user. + + Change/Set password of the user. (To change foreign passwords or set passwords, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param user_credential: credential (required) + :type user_credential: UserCredential + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_password_serialize( + repository=repository, + person=person, + user_credential=user_credential, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_user_password_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + user_credential: Annotated[UserCredential, Field(description="credential")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change/Set password of the user. + + Change/Set password of the user. (To change foreign passwords or set passwords, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param user_credential: credential (required) + :type user_credential: UserCredential + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_password_serialize( + repository=repository, + person=person, + user_credential=user_credential, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_user_password_serialize( + self, + repository, + person, + user_credential, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if user_credential is not None: + _body_params = user_credential + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/credential', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_user_profile( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + user_profile_edit: Annotated[UserProfileEdit, Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set profile of the user. + + Set profile of the user. (To set foreign profiles, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param user_profile_edit: properties (required) + :type user_profile_edit: UserProfileEdit + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_profile_serialize( + repository=repository, + person=person, + user_profile_edit=user_profile_edit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_user_profile_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + user_profile_edit: Annotated[UserProfileEdit, Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set profile of the user. + + Set profile of the user. (To set foreign profiles, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param user_profile_edit: properties (required) + :type user_profile_edit: UserProfileEdit + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_profile_serialize( + repository=repository, + person=person, + user_profile_edit=user_profile_edit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_user_profile_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + user_profile_edit: Annotated[UserProfileEdit, Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set profile of the user. + + Set profile of the user. (To set foreign profiles, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param user_profile_edit: properties (required) + :type user_profile_edit: UserProfileEdit + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_user_profile_serialize( + repository=repository, + person=person, + user_profile_edit=user_profile_edit, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_user_profile_serialize( + self, + repository, + person, + user_profile_edit, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if user_profile_edit is not None: + _body_params = user_profile_edit + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/profile', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def confirm_signup( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + user: Annotated[StrictStr, Field(description="ID of user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """put the pending user into the group + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param user: ID of user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._confirm_signup_serialize( + repository=repository, + group=group, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def confirm_signup_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + user: Annotated[StrictStr, Field(description="ID of user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """put the pending user into the group + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param user: ID of user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._confirm_signup_serialize( + repository=repository, + group=group, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def confirm_signup_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + user: Annotated[StrictStr, Field(description="ID of user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """put the pending user into the group + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param user: ID of user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._confirm_signup_serialize( + repository=repository, + group=group, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _confirm_signup_serialize( + self, + repository, + group, + user, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + if user is not None: + _path_params['user'] = user + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/groups/{repository}/{group}/signup/list/{user}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_group( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + group_profile: Annotated[GroupProfile, Field(description="properties")], + parent: Annotated[Optional[StrictStr], Field(description="parent (will be added to this parent, also for name hashing), may be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Group: + """Create a new group. + + Create a new group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param group_profile: properties (required) + :type group_profile: GroupProfile + :param parent: parent (will be added to this parent, also for name hashing), may be null + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_group_serialize( + repository=repository, + group=group, + group_profile=group_profile, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Group", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_group_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + group_profile: Annotated[GroupProfile, Field(description="properties")], + parent: Annotated[Optional[StrictStr], Field(description="parent (will be added to this parent, also for name hashing), may be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Group]: + """Create a new group. + + Create a new group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param group_profile: properties (required) + :type group_profile: GroupProfile + :param parent: parent (will be added to this parent, also for name hashing), may be null + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_group_serialize( + repository=repository, + group=group, + group_profile=group_profile, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Group", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_group_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + group_profile: Annotated[GroupProfile, Field(description="properties")], + parent: Annotated[Optional[StrictStr], Field(description="parent (will be added to this parent, also for name hashing), may be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new group. + + Create a new group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param group_profile: properties (required) + :type group_profile: GroupProfile + :param parent: parent (will be added to this parent, also for name hashing), may be null + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_group_serialize( + repository=repository, + group=group, + group_profile=group_profile, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Group", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_group_serialize( + self, + repository, + group, + group_profile, + parent, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + if parent is not None: + + _query_params.append(('parent', parent)) + + # process the header parameters + # process the form parameters + # process the body parameter + if group_profile is not None: + _body_params = group_profile + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/iam/v1/groups/{repository}/{group}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_user( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + user_profile_edit: Annotated[UserProfileEdit, Field(description="profile")], + password: Annotated[Optional[StrictStr], Field(description="Password, leave empty if you don't want to set any")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> User: + """Create a new user. + + Create a new user. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param user_profile_edit: profile (required) + :type user_profile_edit: UserProfileEdit + :param password: Password, leave empty if you don't want to set any + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_user_serialize( + repository=repository, + person=person, + user_profile_edit=user_profile_edit, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "User", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_user_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + user_profile_edit: Annotated[UserProfileEdit, Field(description="profile")], + password: Annotated[Optional[StrictStr], Field(description="Password, leave empty if you don't want to set any")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[User]: + """Create a new user. + + Create a new user. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param user_profile_edit: profile (required) + :type user_profile_edit: UserProfileEdit + :param password: Password, leave empty if you don't want to set any + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_user_serialize( + repository=repository, + person=person, + user_profile_edit=user_profile_edit, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "User", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_user_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + user_profile_edit: Annotated[UserProfileEdit, Field(description="profile")], + password: Annotated[Optional[StrictStr], Field(description="Password, leave empty if you don't want to set any")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new user. + + Create a new user. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param user_profile_edit: profile (required) + :type user_profile_edit: UserProfileEdit + :param password: Password, leave empty if you don't want to set any + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_user_serialize( + repository=repository, + person=person, + user_profile_edit=user_profile_edit, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "User", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_user_serialize( + self, + repository, + person, + user_profile_edit, + password, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + if password is not None: + + _query_params.append(('password', password)) + + # process the header parameters + # process the form parameters + # process the body parameter + if user_profile_edit is not None: + _body_params = user_profile_edit + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/iam/v1/people/{repository}/{person}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_group( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete the group. + + Delete the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_group_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_group_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete the group. + + Delete the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_group_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_group_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete the group. + + Delete the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_group_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_group_serialize( + self, + repository, + group, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/iam/v1/groups/{repository}/{group}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_membership( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete member from the group. + + Delete member from the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_membership_serialize( + repository=repository, + group=group, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_membership_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete member from the group. + + Delete member from the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_membership_serialize( + repository=repository, + group=group, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_membership_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete member from the group. + + Delete member from the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_membership_serialize( + repository=repository, + group=group, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_membership_serialize( + self, + repository, + group, + member, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + if member is not None: + _path_params['member'] = member + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/iam/v1/groups/{repository}/{group}/members/{member}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_user( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + force: Annotated[Optional[StrictBool], Field(description="force the deletion (if false then only persons which are previously marked for deletion are getting deleted)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete the user. + + Delete the user. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param force: force the deletion (if false then only persons which are previously marked for deletion are getting deleted) + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_user_serialize( + repository=repository, + person=person, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_user_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + force: Annotated[Optional[StrictBool], Field(description="force the deletion (if false then only persons which are previously marked for deletion are getting deleted)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete the user. + + Delete the user. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param force: force the deletion (if false then only persons which are previously marked for deletion are getting deleted) + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_user_serialize( + repository=repository, + person=person, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_user_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + force: Annotated[Optional[StrictBool], Field(description="force the deletion (if false then only persons which are previously marked for deletion are getting deleted)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete the user. + + Delete the user. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param force: force the deletion (if false then only persons which are previously marked for deletion are getting deleted) + :type force: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_user_serialize( + repository=repository, + person=person, + force=force, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_user_serialize( + self, + repository, + person, + force, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + if force is not None: + + _query_params.append(('force', force)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/iam/v1/people/{repository}/{person}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_group( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GroupEntry: + """Get the group. + + Get the group. (To get foreign profiles, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_group_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_group_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GroupEntry]: + """Get the group. + + Get the group. (To get foreign profiles, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_group_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_group_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get the group. + + Get the group. (To get foreign profiles, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: groupname (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_group_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_group_serialize( + self, + repository, + group, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/groups/{repository}/{group}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_membership( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="authority name (begins with GROUP_)")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + authority_type: Annotated[Optional[StrictStr], Field(description="authorityType either GROUP or USER, empty to show all")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AuthorityEntries: + """Get all members of the group. + + Get all members of the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: authority name (begins with GROUP_) (required) + :type group: str + :param pattern: pattern + :type pattern: str + :param authority_type: authorityType either GROUP or USER, empty to show all + :type authority_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_membership_serialize( + repository=repository, + group=group, + pattern=pattern, + authority_type=authority_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_membership_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="authority name (begins with GROUP_)")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + authority_type: Annotated[Optional[StrictStr], Field(description="authorityType either GROUP or USER, empty to show all")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AuthorityEntries]: + """Get all members of the group. + + Get all members of the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: authority name (begins with GROUP_) (required) + :type group: str + :param pattern: pattern + :type pattern: str + :param authority_type: authorityType either GROUP or USER, empty to show all + :type authority_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_membership_serialize( + repository=repository, + group=group, + pattern=pattern, + authority_type=authority_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_membership_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="authority name (begins with GROUP_)")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + authority_type: Annotated[Optional[StrictStr], Field(description="authorityType either GROUP or USER, empty to show all")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all members of the group. + + Get all members of the group. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: authority name (begins with GROUP_) (required) + :type group: str + :param pattern: pattern + :type pattern: str + :param authority_type: authorityType either GROUP or USER, empty to show all + :type authority_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_membership_serialize( + repository=repository, + group=group, + pattern=pattern, + authority_type=authority_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_membership_serialize( + self, + repository, + group, + pattern, + authority_type, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + if pattern is not None: + + _query_params.append(('pattern', pattern)) + + if authority_type is not None: + + _query_params.append(('authorityType', authority_type)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/groups/{repository}/{group}/members', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_node_list( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntries: + """Get a specific node list for a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name (required) + :type list: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_node_list_serialize( + repository=repository, + person=person, + list=list, + property_filter=property_filter, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_node_list_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntries]: + """Get a specific node list for a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name (required) + :type list: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_node_list_serialize( + repository=repository, + person=person, + list=list, + property_filter=property_filter, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_node_list_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a specific node list for a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name (required) + :type list: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_node_list_serialize( + repository=repository, + person=person, + list=list, + property_filter=property_filter, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_node_list_serialize( + self, + repository, + person, + list, + property_filter, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'propertyFilter': 'multi', + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + if list is not None: + _path_params['list'] = list + # process the query parameters + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}/{person}/nodeList/{list}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_preferences( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Preferences: + """Get preferences stored for user + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_preferences_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Preferences", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_preferences_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Preferences]: + """Get preferences stored for user + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_preferences_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Preferences", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_preferences_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get preferences stored for user + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_preferences_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Preferences", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_preferences_serialize( + self, + repository, + person, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}/{person}/preferences', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_profile_settings( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ProfileSettings: + """Get profileSettings configuration + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_profile_settings_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ProfileSettings", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_profile_settings_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ProfileSettings]: + """Get profileSettings configuration + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_profile_settings_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ProfileSettings", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_profile_settings_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get profileSettings configuration + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_profile_settings_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ProfileSettings", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_profile_settings_serialize( + self, + repository, + person, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}/{person}/profileSettings', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_recently_invited( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AuthorityEntries: + """Get recently invited authorities. + + Get the authorities the current user has recently invited. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_recently_invited_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_recently_invited_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AuthorityEntries]: + """Get recently invited authorities. + + Get the authorities the current user has recently invited. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_recently_invited_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_recently_invited_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get recently invited authorities. + + Get the authorities the current user has recently invited. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_recently_invited_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_recently_invited_serialize( + self, + repository, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/authorities/{repository}/recent', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_subgroup_by_type( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="authority name of the parent/primary group (begins with GROUP_)")], + type: Annotated[StrictStr, Field(description="group type to filter for, e.g. ORG_ADMINISTRATORS")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AuthorityEntries: + """Get a subgroup by the specified type + + Get a subgroup by the specified type + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: authority name of the parent/primary group (begins with GROUP_) (required) + :type group: str + :param type: group type to filter for, e.g. ORG_ADMINISTRATORS (required) + :type type: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_subgroup_by_type_serialize( + repository=repository, + group=group, + type=type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_subgroup_by_type_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="authority name of the parent/primary group (begins with GROUP_)")], + type: Annotated[StrictStr, Field(description="group type to filter for, e.g. ORG_ADMINISTRATORS")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AuthorityEntries]: + """Get a subgroup by the specified type + + Get a subgroup by the specified type + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: authority name of the parent/primary group (begins with GROUP_) (required) + :type group: str + :param type: group type to filter for, e.g. ORG_ADMINISTRATORS (required) + :type type: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_subgroup_by_type_serialize( + repository=repository, + group=group, + type=type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_subgroup_by_type_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="authority name of the parent/primary group (begins with GROUP_)")], + type: Annotated[StrictStr, Field(description="group type to filter for, e.g. ORG_ADMINISTRATORS")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a subgroup by the specified type + + Get a subgroup by the specified type + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: authority name of the parent/primary group (begins with GROUP_) (required) + :type group: str + :param type: group type to filter for, e.g. ORG_ADMINISTRATORS (required) + :type type: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_subgroup_by_type_serialize( + repository=repository, + group=group, + type=type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_subgroup_by_type_serialize( + self, + repository, + group, + type, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + if type is not None: + _path_params['type'] = type + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/groups/{repository}/{group}/type/{type}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_user( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserEntry: + """Get the user. + + Get the user. (Not all information are feteched for foreign profiles if current user is not an admin) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_user_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserEntry]: + """Get the user. + + Get the user. (Not all information are feteched for foreign profiles if current user is not an admin) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_user_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get the user. + + Get the user. (Not all information are feteched for foreign profiles if current user is not an admin) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_user_serialize( + self, + repository, + person, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}/{person}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_user_groups( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="authority name")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GroupEntries: + """Get all groups the given user is member of. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: authority name (required) + :type person: str + :param pattern: pattern + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_groups_serialize( + repository=repository, + person=person, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_user_groups_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="authority name")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GroupEntries]: + """Get all groups the given user is member of. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: authority name (required) + :type person: str + :param pattern: pattern + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_groups_serialize( + repository=repository, + person=person, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_user_groups_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="authority name")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all groups the given user is member of. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: authority name (required) + :type person: str + :param pattern: pattern + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_groups_serialize( + repository=repository, + person=person, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_user_groups_serialize( + self, + repository, + person, + pattern, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + if pattern is not None: + + _query_params.append(('pattern', pattern)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}/{person}/memberships', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_user_stats( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserStats: + """Get the user stats. + + Get the user stats (e.g. publicly created material count) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_stats_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserStats", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_user_stats_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserStats]: + """Get the user stats. + + Get the user stats (e.g. publicly created material count) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_stats_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserStats", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_user_stats_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get the user stats. + + Get the user stats (e.g. publicly created material count) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_user_stats_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserStats", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_user_stats_serialize( + self, + repository, + person, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}/{person}/stats', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def reject_signup( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + user: Annotated[StrictStr, Field(description="ID of user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """reject the pending user + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param user: ID of user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._reject_signup_serialize( + repository=repository, + group=group, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def reject_signup_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + user: Annotated[StrictStr, Field(description="ID of user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """reject the pending user + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param user: ID of user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._reject_signup_serialize( + repository=repository, + group=group, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def reject_signup_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + user: Annotated[StrictStr, Field(description="ID of user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """reject the pending user + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param user: ID of user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._reject_signup_serialize( + repository=repository, + group=group, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _reject_signup_serialize( + self, + repository, + group, + user, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + if user is not None: + _path_params['user'] = user + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/iam/v1/groups/{repository}/{group}/signup/list/{user}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_node_list( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete a node of a node list of a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name (required) + :type list: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_node_list_serialize( + repository=repository, + person=person, + list=list, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_node_list_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete a node of a node list of a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name (required) + :type list: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_node_list_serialize( + repository=repository, + person=person, + list=list, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_node_list_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + list: Annotated[StrictStr, Field(description="list name")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete a node of a node list of a user + + For guest users, the list will be temporary stored in the current session + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param list: list name (required) + :type list: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_node_list_serialize( + repository=repository, + person=person, + list=list, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_node_list_serialize( + self, + repository, + person, + list, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + if list is not None: + _path_params['list'] = list + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/iam/v1/people/{repository}/{person}/nodeList/{list}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_user_avatar( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove avatar of the user. + + Remove avatar of the user. (To Remove foreign avatars, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_user_avatar_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_user_avatar_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove avatar of the user. + + Remove avatar of the user. (To Remove foreign avatars, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_user_avatar_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_user_avatar_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove avatar of the user. + + Remove avatar of the user. (To Remove foreign avatars, admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_user_avatar_serialize( + repository=repository, + person=person, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_user_avatar_serialize( + self, + repository, + person, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/iam/v1/people/{repository}/{person}/avatar', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_authorities( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for users within the organizations")] = None, + group_type: Annotated[Optional[StrictStr], Field(description="find a specific groupType (does nothing for persons)")] = None, + signup_method: Annotated[Optional[StrictStr], Field(description="find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> AuthorityEntries: + """Search authorities. + + Search authorities. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param var_global: global search context, defaults to true, otherwise just searches for users within the organizations + :type var_global: bool + :param group_type: find a specific groupType (does nothing for persons) + :type group_type: str + :param signup_method: find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons) + :type signup_method: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_authorities_serialize( + repository=repository, + pattern=pattern, + var_global=var_global, + group_type=group_type, + signup_method=signup_method, + max_items=max_items, + skip_count=skip_count, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_authorities_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for users within the organizations")] = None, + group_type: Annotated[Optional[StrictStr], Field(description="find a specific groupType (does nothing for persons)")] = None, + signup_method: Annotated[Optional[StrictStr], Field(description="find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[AuthorityEntries]: + """Search authorities. + + Search authorities. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param var_global: global search context, defaults to true, otherwise just searches for users within the organizations + :type var_global: bool + :param group_type: find a specific groupType (does nothing for persons) + :type group_type: str + :param signup_method: find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons) + :type signup_method: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_authorities_serialize( + repository=repository, + pattern=pattern, + var_global=var_global, + group_type=group_type, + signup_method=signup_method, + max_items=max_items, + skip_count=skip_count, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_authorities_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for users within the organizations")] = None, + group_type: Annotated[Optional[StrictStr], Field(description="find a specific groupType (does nothing for persons)")] = None, + signup_method: Annotated[Optional[StrictStr], Field(description="find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search authorities. + + Search authorities. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param var_global: global search context, defaults to true, otherwise just searches for users within the organizations + :type var_global: bool + :param group_type: find a specific groupType (does nothing for persons) + :type group_type: str + :param signup_method: find a specific signupMethod for groups (or asterisk for all including one) (does nothing for persons) + :type signup_method: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_authorities_serialize( + repository=repository, + pattern=pattern, + var_global=var_global, + group_type=group_type, + signup_method=signup_method, + max_items=max_items, + skip_count=skip_count, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "AuthorityEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_authorities_serialize( + self, + repository, + pattern, + var_global, + group_type, + signup_method, + max_items, + skip_count, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if pattern is not None: + + _query_params.append(('pattern', pattern)) + + if var_global is not None: + + _query_params.append(('global', var_global)) + + if group_type is not None: + + _query_params.append(('groupType', group_type)) + + if signup_method is not None: + + _query_params.append(('signupMethod', signup_method)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/authorities/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_groups( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + group_type: Annotated[Optional[StrictStr], Field(description="find a specific groupType")] = None, + signup_method: Annotated[Optional[StrictStr], Field(description="find a specific signupMethod for groups (or asterisk for all including one)")] = None, + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for groups within the organizations")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GroupEntries: + """Search groups. + + Search groups. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param group_type: find a specific groupType + :type group_type: str + :param signup_method: find a specific signupMethod for groups (or asterisk for all including one) + :type signup_method: str + :param var_global: global search context, defaults to true, otherwise just searches for groups within the organizations + :type var_global: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_groups_serialize( + repository=repository, + pattern=pattern, + group_type=group_type, + signup_method=signup_method, + var_global=var_global, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_groups_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + group_type: Annotated[Optional[StrictStr], Field(description="find a specific groupType")] = None, + signup_method: Annotated[Optional[StrictStr], Field(description="find a specific signupMethod for groups (or asterisk for all including one)")] = None, + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for groups within the organizations")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GroupEntries]: + """Search groups. + + Search groups. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param group_type: find a specific groupType + :type group_type: str + :param signup_method: find a specific signupMethod for groups (or asterisk for all including one) + :type signup_method: str + :param var_global: global search context, defaults to true, otherwise just searches for groups within the organizations + :type var_global: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_groups_serialize( + repository=repository, + pattern=pattern, + group_type=group_type, + signup_method=signup_method, + var_global=var_global, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_groups_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + group_type: Annotated[Optional[StrictStr], Field(description="find a specific groupType")] = None, + signup_method: Annotated[Optional[StrictStr], Field(description="find a specific signupMethod for groups (or asterisk for all including one)")] = None, + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for groups within the organizations")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search groups. + + Search groups. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param group_type: find a specific groupType + :type group_type: str + :param signup_method: find a specific signupMethod for groups (or asterisk for all including one) + :type signup_method: str + :param var_global: global search context, defaults to true, otherwise just searches for groups within the organizations + :type var_global: bool + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_groups_serialize( + repository=repository, + pattern=pattern, + group_type=group_type, + signup_method=signup_method, + var_global=var_global, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GroupEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_groups_serialize( + self, + repository, + pattern, + group_type, + signup_method, + var_global, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if pattern is not None: + + _query_params.append(('pattern', pattern)) + + if group_type is not None: + + _query_params.append(('groupType', group_type)) + + if signup_method is not None: + + _query_params.append(('signupMethod', signup_method)) + + if var_global is not None: + + _query_params.append(('global', var_global)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/groups/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_user( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for users within the organizations")] = None, + status: Annotated[Optional[StrictStr], Field(description="the user status (e.g. active), if not set, all users are returned")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> UserEntries: + """Search users. + + Search users. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param var_global: global search context, defaults to true, otherwise just searches for users within the organizations + :type var_global: bool + :param status: the user status (e.g. active), if not set, all users are returned + :type status: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_user_serialize( + repository=repository, + pattern=pattern, + var_global=var_global, + status=status, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_user_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for users within the organizations")] = None, + status: Annotated[Optional[StrictStr], Field(description="the user status (e.g. active), if not set, all users are returned")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[UserEntries]: + """Search users. + + Search users. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param var_global: global search context, defaults to true, otherwise just searches for users within the organizations + :type var_global: bool + :param status: the user status (e.g. active), if not set, all users are returned + :type status: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_user_serialize( + repository=repository, + pattern=pattern, + var_global=var_global, + status=status, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_user_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[StrictStr, Field(description="pattern")], + var_global: Annotated[Optional[StrictBool], Field(description="global search context, defaults to true, otherwise just searches for users within the organizations")] = None, + status: Annotated[Optional[StrictStr], Field(description="the user status (e.g. active), if not set, all users are returned")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search users. + + Search users. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern (required) + :type pattern: str + :param var_global: global search context, defaults to true, otherwise just searches for users within the organizations + :type var_global: bool + :param status: the user status (e.g. active), if not set, all users are returned + :type status: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_user_serialize( + repository=repository, + pattern=pattern, + var_global=var_global, + status=status, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "UserEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_user_serialize( + self, + repository, + pattern, + var_global, + status, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if pattern is not None: + + _query_params.append(('pattern', pattern)) + + if var_global is not None: + + _query_params.append(('global', var_global)) + + if status is not None: + + _query_params.append(('status', status)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/people/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_preferences( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + body: Annotated[StrictStr, Field(description="preferences (json string)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set preferences for user + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param body: preferences (json string) (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_preferences_serialize( + repository=repository, + person=person, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_preferences_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + body: Annotated[StrictStr, Field(description="preferences (json string)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set preferences for user + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param body: preferences (json string) (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_preferences_serialize( + repository=repository, + person=person, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_preferences_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + body: Annotated[StrictStr, Field(description="preferences (json string)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set preferences for user + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param body: preferences (json string) (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_preferences_serialize( + repository=repository, + person=person, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_preferences_serialize( + self, + repository, + person, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/preferences', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_profile_settings( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + profile_settings: Annotated[ProfileSettings, Field(description="ProfileSetting Object")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set profileSettings Configuration + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param profile_settings: ProfileSetting Object (required) + :type profile_settings: ProfileSettings + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_profile_settings_serialize( + repository=repository, + person=person, + profile_settings=profile_settings, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_profile_settings_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + profile_settings: Annotated[ProfileSettings, Field(description="ProfileSetting Object")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set profileSettings Configuration + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param profile_settings: ProfileSetting Object (required) + :type profile_settings: ProfileSettings + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_profile_settings_serialize( + repository=repository, + person=person, + profile_settings=profile_settings, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_profile_settings_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username (or \"-me-\" for current user)")], + profile_settings: Annotated[ProfileSettings, Field(description="ProfileSetting Object")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set profileSettings Configuration + + Will fail for guest + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (or \"-me-\" for current user) (required) + :type person: str + :param profile_settings: ProfileSetting Object (required) + :type profile_settings: ProfileSettings + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_profile_settings_serialize( + repository=repository, + person=person, + profile_settings=profile_settings, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_profile_settings_serialize( + self, + repository, + person, + profile_settings, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if profile_settings is not None: + _body_params = profile_settings + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/profileSettings', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def signup_group( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + password: Annotated[Optional[StrictStr], Field(description="Password for signup (only required if signupMethod == password)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """let the current user signup to the given group + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param password: Password for signup (only required if signupMethod == password) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_serialize( + repository=repository, + group=group, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def signup_group_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + password: Annotated[Optional[StrictStr], Field(description="Password for signup (only required if signupMethod == password)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """let the current user signup to the given group + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param password: Password for signup (only required if signupMethod == password) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_serialize( + repository=repository, + group=group, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def signup_group_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + password: Annotated[Optional[StrictStr], Field(description="Password for signup (only required if signupMethod == password)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """let the current user signup to the given group + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param password: Password for signup (only required if signupMethod == password) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_serialize( + repository=repository, + group=group, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _signup_group_serialize( + self, + repository, + group, + password, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + if password is not None: + + _query_params.append(('password', password)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/iam/v1/groups/{repository}/{group}/signup', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def signup_group_details( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + group_signup_details: Annotated[GroupSignupDetails, Field(description="Details to edit")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """ requires admin rights + + set group signup options + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param group_signup_details: Details to edit (required) + :type group_signup_details: GroupSignupDetails + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_details_serialize( + repository=repository, + group=group, + group_signup_details=group_signup_details, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def signup_group_details_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + group_signup_details: Annotated[GroupSignupDetails, Field(description="Details to edit")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """ requires admin rights + + set group signup options + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param group_signup_details: Details to edit (required) + :type group_signup_details: GroupSignupDetails + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_details_serialize( + repository=repository, + group=group, + group_signup_details=group_signup_details, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def signup_group_details_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + group_signup_details: Annotated[GroupSignupDetails, Field(description="Details to edit")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """ requires admin rights + + set group signup options + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param group_signup_details: Details to edit (required) + :type group_signup_details: GroupSignupDetails + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_details_serialize( + repository=repository, + group=group, + group_signup_details=group_signup_details, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _signup_group_details_serialize( + self, + repository, + group, + group_signup_details, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if group_signup_details is not None: + _body_params = group_signup_details + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/iam/v1/groups/{repository}/{group}/signup/config', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def signup_group_list( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """list pending users that want to join this group + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_list_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def signup_group_list_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """list pending users that want to join this group + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_list_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def signup_group_list_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + group: Annotated[StrictStr, Field(description="ID of group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """list pending users that want to join this group + + Requires admin rights or org administrator on this group + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param group: ID of group (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._signup_group_list_serialize( + repository=repository, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _signup_group_list_serialize( + self, + repository, + group, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/iam/v1/groups/{repository}/{group}/signup/list', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_user_status( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + status: Annotated[StrictStr, Field(description="the new status to set")], + notify: Annotated[StrictBool, Field(description="notify the user via mail")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """update the user status. + + update the user status. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param status: the new status to set (required) + :type status: str + :param notify: notify the user via mail (required) + :type notify: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_status_serialize( + repository=repository, + person=person, + status=status, + notify=notify, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_user_status_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + status: Annotated[StrictStr, Field(description="the new status to set")], + notify: Annotated[StrictBool, Field(description="notify the user via mail")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """update the user status. + + update the user status. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param status: the new status to set (required) + :type status: str + :param notify: notify the user via mail (required) + :type notify: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_status_serialize( + repository=repository, + person=person, + status=status, + notify=notify, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_user_status_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + person: Annotated[StrictStr, Field(description="username")], + status: Annotated[StrictStr, Field(description="the new status to set")], + notify: Annotated[StrictBool, Field(description="notify the user via mail")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update the user status. + + update the user status. (admin rights are required.) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param person: username (required) + :type person: str + :param status: the new status to set (required) + :type status: str + :param notify: notify the user via mail (required) + :type notify: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_user_status_serialize( + repository=repository, + person=person, + status=status, + notify=notify, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_user_status_serialize( + self, + repository, + person, + status, + notify, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if person is not None: + _path_params['person'] = person + if status is not None: + _path_params['status'] = status + # process the query parameters + if notify is not None: + + _query_params.append(('notify', notify)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/iam/v1/people/{repository}/{person}/status/{status}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/knowledgev1_api.py b/edu_sharing_openapi/edu_sharing_client/api/knowledgev1_api.py new file mode 100644 index 00000000..ba229e61 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/knowledgev1_api.py @@ -0,0 +1,591 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing_extensions import Annotated +from edu_sharing_client.models.job_entry import JobEntry + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class KNOWLEDGEV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_analyzing_job_status( + self, + job: Annotated[StrictStr, Field(description="ID of job ticket")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> JobEntry: + """Get analyzing job status. + + Get analyzing job status. + + :param job: ID of job ticket (required) + :type job: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_analyzing_job_status_serialize( + job=job, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "JobEntry", + '401': None, + '403': None, + '404': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_analyzing_job_status_with_http_info( + self, + job: Annotated[StrictStr, Field(description="ID of job ticket")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[JobEntry]: + """Get analyzing job status. + + Get analyzing job status. + + :param job: ID of job ticket (required) + :type job: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_analyzing_job_status_serialize( + job=job, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "JobEntry", + '401': None, + '403': None, + '404': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_analyzing_job_status_without_preload_content( + self, + job: Annotated[StrictStr, Field(description="ID of job ticket")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get analyzing job status. + + Get analyzing job status. + + :param job: ID of job ticket (required) + :type job: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_analyzing_job_status_serialize( + job=job, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "JobEntry", + '401': None, + '403': None, + '404': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_analyzing_job_status_serialize( + self, + job, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if job is not None: + _path_params['job'] = job + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/knowledge/v1/analyze/jobs/{job}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def run_analyzing_job( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> JobEntry: + """Run analyzing job. + + Run analyzing job for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._run_analyzing_job_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '202': "JobEntry", + '401': None, + '403': None, + '404': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def run_analyzing_job_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[JobEntry]: + """Run analyzing job. + + Run analyzing job for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._run_analyzing_job_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '202': "JobEntry", + '401': None, + '403': None, + '404': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def run_analyzing_job_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Run analyzing job. + + Run analyzing job for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._run_analyzing_job_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '202': "JobEntry", + '401': None, + '403': None, + '404': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _run_analyzing_job_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if repository is not None: + + _query_params.append(('repository', repository)) + + if node is not None: + + _query_params.append(('node', node)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/knowledge/v1/analyze/jobs', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/lti_platform_v13_api.py b/edu_sharing_openapi/edu_sharing_client/api/lti_platform_v13_api.py new file mode 100644 index 00000000..048c90d2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/lti_platform_v13_api.py @@ -0,0 +1,4504 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictBytes, StrictStr +from typing import Dict, Optional, Union +from typing_extensions import Annotated +from edu_sharing_client.models.manual_registration_data import ManualRegistrationData +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.models.open_id_configuration import OpenIdConfiguration +from edu_sharing_client.models.open_id_registration_result import OpenIdRegistrationResult +from edu_sharing_client.models.tools import Tools + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class LTIPlatformV13Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def auth( + self, + scope: Annotated[StrictStr, Field(description="scope")], + response_type: Annotated[StrictStr, Field(description="response_type")], + login_hint: Annotated[StrictStr, Field(description="login_hint")], + state: Annotated[StrictStr, Field(description="state")], + response_mode: Annotated[StrictStr, Field(description="response_mode")], + nonce: Annotated[StrictStr, Field(description="nonce")], + prompt: Annotated[StrictStr, Field(description="prompt")], + redirect_uri: Annotated[StrictStr, Field(description="redirect_uri")], + client_id: Annotated[Optional[StrictStr], Field(description="optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """LTI Platform oidc endpoint. responds to a login authentication request + + + :param scope: scope (required) + :type scope: str + :param response_type: response_type (required) + :type response_type: str + :param login_hint: login_hint (required) + :type login_hint: str + :param state: state (required) + :type state: str + :param response_mode: response_mode (required) + :type response_mode: str + :param nonce: nonce (required) + :type nonce: str + :param prompt: prompt (required) + :type prompt: str + :param redirect_uri: redirect_uri (required) + :type redirect_uri: str + :param client_id: optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request + :type client_id: str + :param lti_message_hint: Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered + :type lti_message_hint: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auth_serialize( + scope=scope, + response_type=response_type, + login_hint=login_hint, + state=state, + response_mode=response_mode, + nonce=nonce, + prompt=prompt, + redirect_uri=redirect_uri, + client_id=client_id, + lti_message_hint=lti_message_hint, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def auth_with_http_info( + self, + scope: Annotated[StrictStr, Field(description="scope")], + response_type: Annotated[StrictStr, Field(description="response_type")], + login_hint: Annotated[StrictStr, Field(description="login_hint")], + state: Annotated[StrictStr, Field(description="state")], + response_mode: Annotated[StrictStr, Field(description="response_mode")], + nonce: Annotated[StrictStr, Field(description="nonce")], + prompt: Annotated[StrictStr, Field(description="prompt")], + redirect_uri: Annotated[StrictStr, Field(description="redirect_uri")], + client_id: Annotated[Optional[StrictStr], Field(description="optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """LTI Platform oidc endpoint. responds to a login authentication request + + + :param scope: scope (required) + :type scope: str + :param response_type: response_type (required) + :type response_type: str + :param login_hint: login_hint (required) + :type login_hint: str + :param state: state (required) + :type state: str + :param response_mode: response_mode (required) + :type response_mode: str + :param nonce: nonce (required) + :type nonce: str + :param prompt: prompt (required) + :type prompt: str + :param redirect_uri: redirect_uri (required) + :type redirect_uri: str + :param client_id: optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request + :type client_id: str + :param lti_message_hint: Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered + :type lti_message_hint: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auth_serialize( + scope=scope, + response_type=response_type, + login_hint=login_hint, + state=state, + response_mode=response_mode, + nonce=nonce, + prompt=prompt, + redirect_uri=redirect_uri, + client_id=client_id, + lti_message_hint=lti_message_hint, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def auth_without_preload_content( + self, + scope: Annotated[StrictStr, Field(description="scope")], + response_type: Annotated[StrictStr, Field(description="response_type")], + login_hint: Annotated[StrictStr, Field(description="login_hint")], + state: Annotated[StrictStr, Field(description="state")], + response_mode: Annotated[StrictStr, Field(description="response_mode")], + nonce: Annotated[StrictStr, Field(description="nonce")], + prompt: Annotated[StrictStr, Field(description="prompt")], + redirect_uri: Annotated[StrictStr, Field(description="redirect_uri")], + client_id: Annotated[Optional[StrictStr], Field(description="optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTI Platform oidc endpoint. responds to a login authentication request + + + :param scope: scope (required) + :type scope: str + :param response_type: response_type (required) + :type response_type: str + :param login_hint: login_hint (required) + :type login_hint: str + :param state: state (required) + :type state: str + :param response_mode: response_mode (required) + :type response_mode: str + :param nonce: nonce (required) + :type nonce: str + :param prompt: prompt (required) + :type prompt: str + :param redirect_uri: redirect_uri (required) + :type redirect_uri: str + :param client_id: optional parameter client_id specifies the client id for the authorization server that should be used to authorize the subsequent LTI message request + :type client_id: str + :param lti_message_hint: Similarly to the login_hint parameter, lti_message_hint value is opaque to the tool. If present in the login initiation request, the tool MUST include it back in the authentication request unaltered + :type lti_message_hint: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auth_serialize( + scope=scope, + response_type=response_type, + login_hint=login_hint, + state=state, + response_mode=response_mode, + nonce=nonce, + prompt=prompt, + redirect_uri=redirect_uri, + client_id=client_id, + lti_message_hint=lti_message_hint, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _auth_serialize( + self, + scope, + response_type, + login_hint, + state, + response_mode, + nonce, + prompt, + redirect_uri, + client_id, + lti_message_hint, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if scope is not None: + + _query_params.append(('scope', scope)) + + if response_type is not None: + + _query_params.append(('response_type', response_type)) + + if client_id is not None: + + _query_params.append(('client_id', client_id)) + + if login_hint is not None: + + _query_params.append(('login_hint', login_hint)) + + if state is not None: + + _query_params.append(('state', state)) + + if response_mode is not None: + + _query_params.append(('response_mode', response_mode)) + + if nonce is not None: + + _query_params.append(('nonce', nonce)) + + if prompt is not None: + + _query_params.append(('prompt', prompt)) + + if lti_message_hint is not None: + + _query_params.append(('lti_message_hint', lti_message_hint)) + + if redirect_uri is not None: + + _query_params.append(('redirect_uri', redirect_uri)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/auth', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def auth_token_endpoint( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """LTIPlatform auth token endpoint + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auth_token_endpoint_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def auth_token_endpoint_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """LTIPlatform auth token endpoint + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auth_token_endpoint_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def auth_token_endpoint_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTIPlatform auth token endpoint + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._auth_token_endpoint_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _auth_token_endpoint_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/token', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_content( + self, + jwt: Annotated[StrictStr, Field(description="jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + file: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="file upload")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Custom edu-sharing endpoint to change content of node. + + Change content of node. + + :param jwt: jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool (required) + :type jwt: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param file: file upload + :type file: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content_serialize( + jwt=jwt, + mimetype=mimetype, + version_comment=version_comment, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_content_with_http_info( + self, + jwt: Annotated[StrictStr, Field(description="jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + file: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="file upload")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Custom edu-sharing endpoint to change content of node. + + Change content of node. + + :param jwt: jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool (required) + :type jwt: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param file: file upload + :type file: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content_serialize( + jwt=jwt, + mimetype=mimetype, + version_comment=version_comment, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_content_without_preload_content( + self, + jwt: Annotated[StrictStr, Field(description="jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + file: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="file upload")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Custom edu-sharing endpoint to change content of node. + + Change content of node. + + :param jwt: jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool (required) + :type jwt: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param file: file upload + :type file: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content_serialize( + jwt=jwt, + mimetype=mimetype, + version_comment=version_comment, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_content_serialize( + self, + jwt, + mimetype, + version_comment, + file, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if jwt is not None: + + _query_params.append(('jwt', jwt)) + + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + if mimetype is not None: + + _query_params.append(('mimetype', mimetype)) + + # process the header parameters + # process the form parameters + if file is not None: + _files['file'] = file + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/ltiplatform/v13/content', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def convert_to_resourcelink( + self, + node_id: Annotated[StrictStr, Field(description="nodeId")], + app_id: Annotated[StrictStr, Field(description="appId of a lti tool")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """manual convertion of an io to an resource link without deeplinking + + io conversion to resourcelink + + :param node_id: nodeId (required) + :type node_id: str + :param app_id: appId of a lti tool (required) + :type app_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._convert_to_resourcelink_serialize( + node_id=node_id, + app_id=app_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def convert_to_resourcelink_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="nodeId")], + app_id: Annotated[StrictStr, Field(description="appId of a lti tool")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """manual convertion of an io to an resource link without deeplinking + + io conversion to resourcelink + + :param node_id: nodeId (required) + :type node_id: str + :param app_id: appId of a lti tool (required) + :type app_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._convert_to_resourcelink_serialize( + node_id=node_id, + app_id=app_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def convert_to_resourcelink_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="nodeId")], + app_id: Annotated[StrictStr, Field(description="appId of a lti tool")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """manual convertion of an io to an resource link without deeplinking + + io conversion to resourcelink + + :param node_id: nodeId (required) + :type node_id: str + :param app_id: appId of a lti tool (required) + :type app_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._convert_to_resourcelink_serialize( + node_id=node_id, + app_id=app_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _convert_to_resourcelink_serialize( + self, + node_id, + app_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if node_id is not None: + + _query_params.append(('nodeId', node_id)) + + if app_id is not None: + + _query_params.append(('appId', app_id)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/ltiplatform/v13/convert2resourcelink', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def deep_linking_response( + self, + jwt: Annotated[StrictStr, Field(description="JWT")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """receiving deeplink response messages. + + deeplink response + + :param jwt: JWT (required) + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._deep_linking_response_serialize( + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '409': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def deep_linking_response_with_http_info( + self, + jwt: Annotated[StrictStr, Field(description="JWT")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """receiving deeplink response messages. + + deeplink response + + :param jwt: JWT (required) + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._deep_linking_response_serialize( + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '409': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def deep_linking_response_without_preload_content( + self, + jwt: Annotated[StrictStr, Field(description="JWT")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """receiving deeplink response messages. + + deeplink response + + :param jwt: JWT (required) + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._deep_linking_response_serialize( + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '409': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _deep_linking_response_serialize( + self, + jwt, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if jwt is not None: + _form_params.append(('JWT', jwt)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/x-www-form-urlencoded' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/ltiplatform/v13/deeplinking-response', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def generate_login_initiation_form( + self, + app_id: Annotated[StrictStr, Field(description="appId of the tool")], + parent_id: Annotated[StrictStr, Field(description="the folder id the lti node will be created in. is required for lti deeplink.")], + node_id: Annotated[Optional[StrictStr], Field(description="the nodeId when tool has custom content option.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. + + + :param app_id: appId of the tool (required) + :type app_id: str + :param parent_id: the folder id the lti node will be created in. is required for lti deeplink. (required) + :type parent_id: str + :param node_id: the nodeId when tool has custom content option. + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_login_initiation_form_serialize( + app_id=app_id, + parent_id=parent_id, + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def generate_login_initiation_form_with_http_info( + self, + app_id: Annotated[StrictStr, Field(description="appId of the tool")], + parent_id: Annotated[StrictStr, Field(description="the folder id the lti node will be created in. is required for lti deeplink.")], + node_id: Annotated[Optional[StrictStr], Field(description="the nodeId when tool has custom content option.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. + + + :param app_id: appId of the tool (required) + :type app_id: str + :param parent_id: the folder id the lti node will be created in. is required for lti deeplink. (required) + :type parent_id: str + :param node_id: the nodeId when tool has custom content option. + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_login_initiation_form_serialize( + app_id=app_id, + parent_id=parent_id, + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def generate_login_initiation_form_without_preload_content( + self, + app_id: Annotated[StrictStr, Field(description="appId of the tool")], + parent_id: Annotated[StrictStr, Field(description="the folder id the lti node will be created in. is required for lti deeplink.")], + node_id: Annotated[Optional[StrictStr], Field(description="the nodeId when tool has custom content option.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. + + + :param app_id: appId of the tool (required) + :type app_id: str + :param parent_id: the folder id the lti node will be created in. is required for lti deeplink. (required) + :type parent_id: str + :param node_id: the nodeId when tool has custom content option. + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_login_initiation_form_serialize( + app_id=app_id, + parent_id=parent_id, + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _generate_login_initiation_form_serialize( + self, + app_id, + parent_id, + node_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if app_id is not None: + + _query_params.append(('appId', app_id)) + + if parent_id is not None: + + _query_params.append(('parentId', parent_id)) + + if node_id is not None: + + _query_params.append(('nodeId', node_id)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/generateLoginInitiationForm', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def generate_login_initiation_form_resource_link( + self, + node_id: Annotated[StrictStr, Field(description="the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink")], + edit_mode: Annotated[Optional[StrictBool], Field(description="for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded")] = None, + version: Annotated[Optional[StrictStr], Field(description="the version. for tools with contentoption.")] = None, + launch_presentation: Annotated[Optional[StrictStr], Field(description="launchPresentation. how the resourcelink will be embedded. valid values: window,iframe")] = None, + jwt: Annotated[Optional[StrictStr], Field(description="jwt for checking access in lms context")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. + + + :param node_id: the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink (required) + :type node_id: str + :param edit_mode: for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded + :type edit_mode: bool + :param version: the version. for tools with contentoption. + :type version: str + :param launch_presentation: launchPresentation. how the resourcelink will be embedded. valid values: window,iframe + :type launch_presentation: str + :param jwt: jwt for checking access in lms context + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_login_initiation_form_resource_link_serialize( + node_id=node_id, + edit_mode=edit_mode, + version=version, + launch_presentation=launch_presentation, + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def generate_login_initiation_form_resource_link_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink")], + edit_mode: Annotated[Optional[StrictBool], Field(description="for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded")] = None, + version: Annotated[Optional[StrictStr], Field(description="the version. for tools with contentoption.")] = None, + launch_presentation: Annotated[Optional[StrictStr], Field(description="launchPresentation. how the resourcelink will be embedded. valid values: window,iframe")] = None, + jwt: Annotated[Optional[StrictStr], Field(description="jwt for checking access in lms context")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. + + + :param node_id: the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink (required) + :type node_id: str + :param edit_mode: for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded + :type edit_mode: bool + :param version: the version. for tools with contentoption. + :type version: str + :param launch_presentation: launchPresentation. how the resourcelink will be embedded. valid values: window,iframe + :type launch_presentation: str + :param jwt: jwt for checking access in lms context + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_login_initiation_form_resource_link_serialize( + node_id=node_id, + edit_mode=edit_mode, + version=version, + launch_presentation=launch_presentation, + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def generate_login_initiation_form_resource_link_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink")], + edit_mode: Annotated[Optional[StrictBool], Field(description="for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded")] = None, + version: Annotated[Optional[StrictStr], Field(description="the version. for tools with contentoption.")] = None, + launch_presentation: Annotated[Optional[StrictStr], Field(description="launchPresentation. how the resourcelink will be embedded. valid values: window,iframe")] = None, + jwt: Annotated[Optional[StrictStr], Field(description="jwt for checking access in lms context")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. + + + :param node_id: the nodeid of a node that contains a lti resourcelink. is required for lti resourcelink (required) + :type node_id: str + :param edit_mode: for tools with content option, this param sends changeContentUrl (true) else contentUrl will be excluded + :type edit_mode: bool + :param version: the version. for tools with contentoption. + :type version: str + :param launch_presentation: launchPresentation. how the resourcelink will be embedded. valid values: window,iframe + :type launch_presentation: str + :param jwt: jwt for checking access in lms context + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_login_initiation_form_resource_link_serialize( + node_id=node_id, + edit_mode=edit_mode, + version=version, + launch_presentation=launch_presentation, + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _generate_login_initiation_form_resource_link_serialize( + self, + node_id, + edit_mode, + version, + launch_presentation, + jwt, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if node_id is not None: + + _query_params.append(('nodeId', node_id)) + + if edit_mode is not None: + + _query_params.append(('editMode', edit_mode)) + + if version is not None: + + _query_params.append(('version', version)) + + if launch_presentation is not None: + + _query_params.append(('launchPresentation', launch_presentation)) + + if jwt is not None: + + _query_params.append(('jwt', jwt)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/generateLoginInitiationFormResourceLink', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_content( + self, + jwt: Annotated[StrictStr, Field(description="jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Custom edu-sharing endpoint to get content of node. + + Get content of node. + + :param jwt: jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool (required) + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_content_serialize( + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_content_with_http_info( + self, + jwt: Annotated[StrictStr, Field(description="jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Custom edu-sharing endpoint to get content of node. + + Get content of node. + + :param jwt: jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool (required) + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_content_serialize( + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_content_without_preload_content( + self, + jwt: Annotated[StrictStr, Field(description="jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Custom edu-sharing endpoint to get content of node. + + Get content of node. + + :param jwt: jwt containing the claims appId, nodeId, user previously send with ResourceLinkRequest or DeeplinkRequest. Must be signed by tool (required) + :type jwt: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_content_serialize( + jwt=jwt, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_content_serialize( + self, + jwt, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if jwt is not None: + + _query_params.append(('jwt', jwt)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + '*/*', + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/content', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def manual_registration( + self, + manual_registration_data: Annotated[ManualRegistrationData, Field(description="registrationData")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """manual registration endpoint for registration of tools. + + tool registration + + :param manual_registration_data: registrationData (required) + :type manual_registration_data: ManualRegistrationData + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._manual_registration_serialize( + manual_registration_data=manual_registration_data, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def manual_registration_with_http_info( + self, + manual_registration_data: Annotated[ManualRegistrationData, Field(description="registrationData")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """manual registration endpoint for registration of tools. + + tool registration + + :param manual_registration_data: registrationData (required) + :type manual_registration_data: ManualRegistrationData + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._manual_registration_serialize( + manual_registration_data=manual_registration_data, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def manual_registration_without_preload_content( + self, + manual_registration_data: Annotated[ManualRegistrationData, Field(description="registrationData")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """manual registration endpoint for registration of tools. + + tool registration + + :param manual_registration_data: registrationData (required) + :type manual_registration_data: ManualRegistrationData + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._manual_registration_serialize( + manual_registration_data=manual_registration_data, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _manual_registration_serialize( + self, + manual_registration_data, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if manual_registration_data is not None: + _body_params = manual_registration_data + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/ltiplatform/v13/manual-registration', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def open_id_registration( + self, + body: Annotated[StrictStr, Field(description="registrationpayload")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> OpenIdRegistrationResult: + """registration endpoint the tool uses to register at platform. + + tool registration + + :param body: registrationpayload (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._open_id_registration_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OpenIdRegistrationResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def open_id_registration_with_http_info( + self, + body: Annotated[StrictStr, Field(description="registrationpayload")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[OpenIdRegistrationResult]: + """registration endpoint the tool uses to register at platform. + + tool registration + + :param body: registrationpayload (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._open_id_registration_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OpenIdRegistrationResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def open_id_registration_without_preload_content( + self, + body: Annotated[StrictStr, Field(description="registrationpayload")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """registration endpoint the tool uses to register at platform. + + tool registration + + :param body: registrationpayload (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._open_id_registration_serialize( + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OpenIdRegistrationResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _open_id_registration_serialize( + self, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/ltiplatform/v13/openid-registration', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def openid_configuration( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> OpenIdConfiguration: + """LTIPlatform openid configuration + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._openid_configuration_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OpenIdConfiguration", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def openid_configuration_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[OpenIdConfiguration]: + """LTIPlatform openid configuration + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._openid_configuration_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OpenIdConfiguration", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def openid_configuration_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTIPlatform openid configuration + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._openid_configuration_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OpenIdConfiguration", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _openid_configuration_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/openid-configuration', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def start_dynamic_registration( + self, + url: Annotated[StrictStr, Field(description="url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """starts lti dynamic registration. + + start dynmic registration + + :param url: url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_dynamic_registration_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def start_dynamic_registration_with_http_info( + self, + url: Annotated[StrictStr, Field(description="url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """starts lti dynamic registration. + + start dynmic registration + + :param url: url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_dynamic_registration_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def start_dynamic_registration_without_preload_content( + self, + url: Annotated[StrictStr, Field(description="url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """starts lti dynamic registration. + + start dynmic registration + + :param url: url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_dynamic_registration_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _start_dynamic_registration_serialize( + self, + url, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if url is not None: + _form_params.append(('url', url)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/x-www-form-urlencoded' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/ltiplatform/v13/start-dynamic-registration', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def start_dynamic_registration_get( + self, + url: Annotated[StrictStr, Field(description="url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """starts lti dynamic registration. + + start dynmic registration + + :param url: url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_dynamic_registration_get_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def start_dynamic_registration_get_with_http_info( + self, + url: Annotated[StrictStr, Field(description="url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """starts lti dynamic registration. + + start dynmic registration + + :param url: url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_dynamic_registration_get_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def start_dynamic_registration_get_without_preload_content( + self, + url: Annotated[StrictStr, Field(description="url")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """starts lti dynamic registration. + + start dynmic registration + + :param url: url (required) + :type url: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._start_dynamic_registration_get_serialize( + url=url, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _start_dynamic_registration_get_serialize( + self, + url, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if url is not None: + + _query_params.append(('url', url)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/start-dynamic-registration', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def test_token( + self, + request_body: Annotated[Dict[str, StrictStr], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """test creates a token signed with homeapp. + + test token. + + :param request_body: properties (required) + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._test_token_serialize( + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def test_token_with_http_info( + self, + request_body: Annotated[Dict[str, StrictStr], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """test creates a token signed with homeapp. + + test token. + + :param request_body: properties (required) + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._test_token_serialize( + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def test_token_without_preload_content( + self, + request_body: Annotated[Dict[str, StrictStr], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """test creates a token signed with homeapp. + + test token. + + :param request_body: properties (required) + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._test_token_serialize( + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _test_token_serialize( + self, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/ltiplatform/v13/testToken', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def tools( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Tools: + """List of tools registered + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._tools_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Tools", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def tools_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Tools]: + """List of tools registered + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._tools_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Tools", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def tools_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List of tools registered + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._tools_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Tools", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _tools_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/ltiplatform/v13/tools', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/ltiv13_api.py b/edu_sharing_openapi/edu_sharing_client/api/ltiv13_api.py new file mode 100644 index 00000000..984919f0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/ltiv13_api.py @@ -0,0 +1,3788 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictStr, field_validator +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.models.registration_url import RegistrationUrl +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class LTIV13Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def generate_deep_linking_response( + self, + node_ids: Annotated[List[StrictStr], Field(description="selected node id's")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeLTIDeepLink: + """generate DeepLinkingResponse + + + :param node_ids: selected node id's (required) + :type node_ids: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_deep_linking_response_serialize( + node_ids=node_ids, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeLTIDeepLink", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def generate_deep_linking_response_with_http_info( + self, + node_ids: Annotated[List[StrictStr], Field(description="selected node id's")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeLTIDeepLink]: + """generate DeepLinkingResponse + + + :param node_ids: selected node id's (required) + :type node_ids: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_deep_linking_response_serialize( + node_ids=node_ids, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeLTIDeepLink", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def generate_deep_linking_response_without_preload_content( + self, + node_ids: Annotated[List[StrictStr], Field(description="selected node id's")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """generate DeepLinkingResponse + + + :param node_ids: selected node id's (required) + :type node_ids: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._generate_deep_linking_response_serialize( + node_ids=node_ids, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeLTIDeepLink", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _generate_deep_linking_response_serialize( + self, + node_ids, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'nodeIds': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if node_ids is not None: + + _query_params.append(('nodeIds', node_ids)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/lti/v13/generateDeepLinkingResponse', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_details_snippet( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + jwt: Annotated[StrictStr, Field(description="jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RenderingDetailsEntry: + """get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow + + get rendered html snippet for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param jwt: jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform (required) + :type jwt: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet_serialize( + repository=repository, + node=node, + jwt=jwt, + version=version, + display_mode=display_mode, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_details_snippet_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + jwt: Annotated[StrictStr, Field(description="jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RenderingDetailsEntry]: + """get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow + + get rendered html snippet for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param jwt: jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform (required) + :type jwt: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet_serialize( + repository=repository, + node=node, + jwt=jwt, + version=version, + display_mode=display_mode, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_details_snippet_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + jwt: Annotated[StrictStr, Field(description="jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow + + get rendered html snippet for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param jwt: jwt containing the claims aud (clientId of platform), deploymentId and a token. must be signed by platform (required) + :type jwt: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet_serialize( + repository=repository, + node=node, + jwt=jwt, + version=version, + display_mode=display_mode, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_details_snippet_serialize( + self, + repository, + node, + jwt, + version, + display_mode, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version is not None: + + _query_params.append(('version', version)) + + if display_mode is not None: + + _query_params.append(('displayMode', display_mode)) + + if jwt is not None: + + _query_params.append(('jwt', jwt)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/lti/v13/details/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def jwks_uri( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RegistrationUrl: + """LTI - returns repository JSON Web Key Sets + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._jwks_uri_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RegistrationUrl", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def jwks_uri_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RegistrationUrl]: + """LTI - returns repository JSON Web Key Sets + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._jwks_uri_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RegistrationUrl", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def jwks_uri_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTI - returns repository JSON Web Key Sets + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._jwks_uri_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RegistrationUrl", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _jwks_uri_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/lti/v13/jwks', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def login_initiations( + self, + iss: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + target_link_uri: Annotated[StrictStr, Field(description="target url of platform at the end of the flow")], + client_id: Annotated[Optional[StrictStr], Field(description="Id of the issuer")] = None, + login_hint: Annotated[Optional[StrictStr], Field(description="context information of the platform")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="additional context information of the platform")] = None, + lti_deployment_id: Annotated[Optional[StrictStr], Field(description="A can have multiple deployments in a platform")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """lti authentication process preparation. + + preflight phase. prepares lti authentication process. checks it issuer is valid + + :param iss: Issuer of the request, will be validated (required) + :type iss: str + :param target_link_uri: target url of platform at the end of the flow (required) + :type target_link_uri: str + :param client_id: Id of the issuer + :type client_id: str + :param login_hint: context information of the platform + :type login_hint: str + :param lti_message_hint: additional context information of the platform + :type lti_message_hint: str + :param lti_deployment_id: A can have multiple deployments in a platform + :type lti_deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_initiations_serialize( + iss=iss, + target_link_uri=target_link_uri, + client_id=client_id, + login_hint=login_hint, + lti_message_hint=lti_message_hint, + lti_deployment_id=lti_deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def login_initiations_with_http_info( + self, + iss: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + target_link_uri: Annotated[StrictStr, Field(description="target url of platform at the end of the flow")], + client_id: Annotated[Optional[StrictStr], Field(description="Id of the issuer")] = None, + login_hint: Annotated[Optional[StrictStr], Field(description="context information of the platform")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="additional context information of the platform")] = None, + lti_deployment_id: Annotated[Optional[StrictStr], Field(description="A can have multiple deployments in a platform")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """lti authentication process preparation. + + preflight phase. prepares lti authentication process. checks it issuer is valid + + :param iss: Issuer of the request, will be validated (required) + :type iss: str + :param target_link_uri: target url of platform at the end of the flow (required) + :type target_link_uri: str + :param client_id: Id of the issuer + :type client_id: str + :param login_hint: context information of the platform + :type login_hint: str + :param lti_message_hint: additional context information of the platform + :type lti_message_hint: str + :param lti_deployment_id: A can have multiple deployments in a platform + :type lti_deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_initiations_serialize( + iss=iss, + target_link_uri=target_link_uri, + client_id=client_id, + login_hint=login_hint, + lti_message_hint=lti_message_hint, + lti_deployment_id=lti_deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def login_initiations_without_preload_content( + self, + iss: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + target_link_uri: Annotated[StrictStr, Field(description="target url of platform at the end of the flow")], + client_id: Annotated[Optional[StrictStr], Field(description="Id of the issuer")] = None, + login_hint: Annotated[Optional[StrictStr], Field(description="context information of the platform")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="additional context information of the platform")] = None, + lti_deployment_id: Annotated[Optional[StrictStr], Field(description="A can have multiple deployments in a platform")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lti authentication process preparation. + + preflight phase. prepares lti authentication process. checks it issuer is valid + + :param iss: Issuer of the request, will be validated (required) + :type iss: str + :param target_link_uri: target url of platform at the end of the flow (required) + :type target_link_uri: str + :param client_id: Id of the issuer + :type client_id: str + :param login_hint: context information of the platform + :type login_hint: str + :param lti_message_hint: additional context information of the platform + :type lti_message_hint: str + :param lti_deployment_id: A can have multiple deployments in a platform + :type lti_deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_initiations_serialize( + iss=iss, + target_link_uri=target_link_uri, + client_id=client_id, + login_hint=login_hint, + lti_message_hint=lti_message_hint, + lti_deployment_id=lti_deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _login_initiations_serialize( + self, + iss, + target_link_uri, + client_id, + login_hint, + lti_message_hint, + lti_deployment_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if iss is not None: + _form_params.append(('iss', iss)) + if target_link_uri is not None: + _form_params.append(('target_link_uri', target_link_uri)) + if client_id is not None: + _form_params.append(('client_id', client_id)) + if login_hint is not None: + _form_params.append(('login_hint', login_hint)) + if lti_message_hint is not None: + _form_params.append(('lti_message_hint', lti_message_hint)) + if lti_deployment_id is not None: + _form_params.append(('lti_deployment_id', lti_deployment_id)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/x-www-form-urlencoded' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/lti/v13/oidc/login_initiations', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def login_initiations_get( + self, + iss: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + target_link_uri: Annotated[StrictStr, Field(description="target url of platform at the end of the flow")], + client_id: Annotated[Optional[StrictStr], Field(description="Id of the issuer")] = None, + login_hint: Annotated[Optional[StrictStr], Field(description="context information of the platform")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="additional context information of the platform")] = None, + lti_deployment_id: Annotated[Optional[StrictStr], Field(description="A can have multiple deployments in a platform")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """lti authentication process preparation. + + preflight phase. prepares lti authentication process. checks it issuer is valid + + :param iss: Issuer of the request, will be validated (required) + :type iss: str + :param target_link_uri: target url of platform at the end of the flow (required) + :type target_link_uri: str + :param client_id: Id of the issuer + :type client_id: str + :param login_hint: context information of the platform + :type login_hint: str + :param lti_message_hint: additional context information of the platform + :type lti_message_hint: str + :param lti_deployment_id: A can have multiple deployments in a platform + :type lti_deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_initiations_get_serialize( + iss=iss, + target_link_uri=target_link_uri, + client_id=client_id, + login_hint=login_hint, + lti_message_hint=lti_message_hint, + lti_deployment_id=lti_deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def login_initiations_get_with_http_info( + self, + iss: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + target_link_uri: Annotated[StrictStr, Field(description="target url of platform at the end of the flow")], + client_id: Annotated[Optional[StrictStr], Field(description="Id of the issuer")] = None, + login_hint: Annotated[Optional[StrictStr], Field(description="context information of the platform")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="additional context information of the platform")] = None, + lti_deployment_id: Annotated[Optional[StrictStr], Field(description="A can have multiple deployments in a platform")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """lti authentication process preparation. + + preflight phase. prepares lti authentication process. checks it issuer is valid + + :param iss: Issuer of the request, will be validated (required) + :type iss: str + :param target_link_uri: target url of platform at the end of the flow (required) + :type target_link_uri: str + :param client_id: Id of the issuer + :type client_id: str + :param login_hint: context information of the platform + :type login_hint: str + :param lti_message_hint: additional context information of the platform + :type lti_message_hint: str + :param lti_deployment_id: A can have multiple deployments in a platform + :type lti_deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_initiations_get_serialize( + iss=iss, + target_link_uri=target_link_uri, + client_id=client_id, + login_hint=login_hint, + lti_message_hint=lti_message_hint, + lti_deployment_id=lti_deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def login_initiations_get_without_preload_content( + self, + iss: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + target_link_uri: Annotated[StrictStr, Field(description="target url of platform at the end of the flow")], + client_id: Annotated[Optional[StrictStr], Field(description="Id of the issuer")] = None, + login_hint: Annotated[Optional[StrictStr], Field(description="context information of the platform")] = None, + lti_message_hint: Annotated[Optional[StrictStr], Field(description="additional context information of the platform")] = None, + lti_deployment_id: Annotated[Optional[StrictStr], Field(description="A can have multiple deployments in a platform")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lti authentication process preparation. + + preflight phase. prepares lti authentication process. checks it issuer is valid + + :param iss: Issuer of the request, will be validated (required) + :type iss: str + :param target_link_uri: target url of platform at the end of the flow (required) + :type target_link_uri: str + :param client_id: Id of the issuer + :type client_id: str + :param login_hint: context information of the platform + :type login_hint: str + :param lti_message_hint: additional context information of the platform + :type lti_message_hint: str + :param lti_deployment_id: A can have multiple deployments in a platform + :type lti_deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._login_initiations_get_serialize( + iss=iss, + target_link_uri=target_link_uri, + client_id=client_id, + login_hint=login_hint, + lti_message_hint=lti_message_hint, + lti_deployment_id=lti_deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _login_initiations_get_serialize( + self, + iss, + target_link_uri, + client_id, + login_hint, + lti_message_hint, + lti_deployment_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if iss is not None: + + _query_params.append(('iss', iss)) + + if target_link_uri is not None: + + _query_params.append(('target_link_uri', target_link_uri)) + + if client_id is not None: + + _query_params.append(('client_id', client_id)) + + if login_hint is not None: + + _query_params.append(('login_hint', login_hint)) + + if lti_message_hint is not None: + + _query_params.append(('lti_message_hint', lti_message_hint)) + + if lti_deployment_id is not None: + + _query_params.append(('lti_deployment_id', lti_deployment_id)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/lti/v13/oidc/login_initiations', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def lti( + self, + id_token: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + state: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """lti tool redirect. + + lti tool redirect + + :param id_token: Issuer of the request, will be validated (required) + :type id_token: str + :param state: Issuer of the request, will be validated (required) + :type state: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_serialize( + id_token=id_token, + state=state, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lti_with_http_info( + self, + id_token: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + state: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """lti tool redirect. + + lti tool redirect + + :param id_token: Issuer of the request, will be validated (required) + :type id_token: str + :param state: Issuer of the request, will be validated (required) + :type state: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_serialize( + id_token=id_token, + state=state, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lti_without_preload_content( + self, + id_token: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + state: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lti tool redirect. + + lti tool redirect + + :param id_token: Issuer of the request, will be validated (required) + :type id_token: str + :param state: Issuer of the request, will be validated (required) + :type state: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_serialize( + id_token=id_token, + state=state, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lti_serialize( + self, + id_token, + state, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if id_token is not None: + _form_params.append(('id_token', id_token)) + if state is not None: + _form_params.append(('state', state)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/x-www-form-urlencoded' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/lti/v13/lti13', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def lti_registration_dynamic( + self, + openid_configuration: Annotated[StrictStr, Field(description="the endpoint to the open id configuration to be used for this registration")], + token: Annotated[StrictStr, Field(description="one time usage token which is autogenerated with the url in edu-sharing admin gui.")], + registration_token: Annotated[Optional[StrictStr], Field(description="the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """LTI Dynamic Registration - Initiate registration + + + :param openid_configuration: the endpoint to the open id configuration to be used for this registration (required) + :type openid_configuration: str + :param token: one time usage token which is autogenerated with the url in edu-sharing admin gui. (required) + :type token: str + :param registration_token: the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration. + :type registration_token: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_registration_dynamic_serialize( + openid_configuration=openid_configuration, + token=token, + registration_token=registration_token, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lti_registration_dynamic_with_http_info( + self, + openid_configuration: Annotated[StrictStr, Field(description="the endpoint to the open id configuration to be used for this registration")], + token: Annotated[StrictStr, Field(description="one time usage token which is autogenerated with the url in edu-sharing admin gui.")], + registration_token: Annotated[Optional[StrictStr], Field(description="the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """LTI Dynamic Registration - Initiate registration + + + :param openid_configuration: the endpoint to the open id configuration to be used for this registration (required) + :type openid_configuration: str + :param token: one time usage token which is autogenerated with the url in edu-sharing admin gui. (required) + :type token: str + :param registration_token: the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration. + :type registration_token: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_registration_dynamic_serialize( + openid_configuration=openid_configuration, + token=token, + registration_token=registration_token, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lti_registration_dynamic_without_preload_content( + self, + openid_configuration: Annotated[StrictStr, Field(description="the endpoint to the open id configuration to be used for this registration")], + token: Annotated[StrictStr, Field(description="one time usage token which is autogenerated with the url in edu-sharing admin gui.")], + registration_token: Annotated[Optional[StrictStr], Field(description="the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTI Dynamic Registration - Initiate registration + + + :param openid_configuration: the endpoint to the open id configuration to be used for this registration (required) + :type openid_configuration: str + :param token: one time usage token which is autogenerated with the url in edu-sharing admin gui. (required) + :type token: str + :param registration_token: the registration access token. If present, it must be used as the access token by the tool when making the registration request to the registration endpoint exposed in the openid configuration. + :type registration_token: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_registration_dynamic_serialize( + openid_configuration=openid_configuration, + token=token, + registration_token=registration_token, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lti_registration_dynamic_serialize( + self, + openid_configuration, + token, + registration_token, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if token is not None: + _path_params['token'] = token + # process the query parameters + if openid_configuration is not None: + + _query_params.append(('openid_configuration', openid_configuration)) + + if registration_token is not None: + + _query_params.append(('registration_token', registration_token)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/lti/v13/registration/dynamic/{token}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def lti_registration_url( + self, + generate: Annotated[StrictBool, Field(description="if to add a ne url to the list")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DynamicRegistrationTokens: + """LTI Dynamic Registration - generates url for platform + + + :param generate: if to add a ne url to the list (required) + :type generate: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_registration_url_serialize( + generate=generate, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicRegistrationTokens", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lti_registration_url_with_http_info( + self, + generate: Annotated[StrictBool, Field(description="if to add a ne url to the list")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DynamicRegistrationTokens]: + """LTI Dynamic Registration - generates url for platform + + + :param generate: if to add a ne url to the list (required) + :type generate: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_registration_url_serialize( + generate=generate, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicRegistrationTokens", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lti_registration_url_without_preload_content( + self, + generate: Annotated[StrictBool, Field(description="if to add a ne url to the list")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTI Dynamic Registration - generates url for platform + + + :param generate: if to add a ne url to the list (required) + :type generate: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_registration_url_serialize( + generate=generate, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicRegistrationTokens", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lti_registration_url_serialize( + self, + generate, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if generate is not None: + + _query_params.append(('generate', generate)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/lti/v13/registration/url', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def lti_target( + self, + node_id: Annotated[StrictStr, Field(description="edu-sharing node id")], + id_token: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + state: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """lti tool resource link target. + + used by some platforms for direct (without oidc login_init) launch requests + + :param node_id: edu-sharing node id (required) + :type node_id: str + :param id_token: Issuer of the request, will be validated (required) + :type id_token: str + :param state: Issuer of the request, will be validated (required) + :type state: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_target_serialize( + node_id=node_id, + id_token=id_token, + state=state, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def lti_target_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="edu-sharing node id")], + id_token: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + state: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """lti tool resource link target. + + used by some platforms for direct (without oidc login_init) launch requests + + :param node_id: edu-sharing node id (required) + :type node_id: str + :param id_token: Issuer of the request, will be validated (required) + :type id_token: str + :param state: Issuer of the request, will be validated (required) + :type state: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_target_serialize( + node_id=node_id, + id_token=id_token, + state=state, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def lti_target_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="edu-sharing node id")], + id_token: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + state: Annotated[StrictStr, Field(description="Issuer of the request, will be validated")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """lti tool resource link target. + + used by some platforms for direct (without oidc login_init) launch requests + + :param node_id: edu-sharing node id (required) + :type node_id: str + :param id_token: Issuer of the request, will be validated (required) + :type id_token: str + :param state: Issuer of the request, will be validated (required) + :type state: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._lti_target_serialize( + node_id=node_id, + id_token=id_token, + state=state, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _lti_target_serialize( + self, + node_id, + id_token, + state, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if node_id is not None: + _path_params['nodeId'] = node_id + # process the query parameters + # process the header parameters + # process the form parameters + if id_token is not None: + _form_params.append(('id_token', id_token)) + if state is not None: + _form_params.append(('state', state)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'text/html' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/x-www-form-urlencoded' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/lti/v13/lti13/{nodeId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def register_by_type( + self, + type: Annotated[StrictStr, Field(description="lti platform typ i.e. moodle")], + base_url: Annotated[StrictStr, Field(description="base url i.e. http://localhost/moodle used as platformId")], + client_id: Annotated[Optional[StrictStr], Field(description="client id")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="deployment id")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """register LTI platform + + + :param type: lti platform typ i.e. moodle (required) + :type type: str + :param base_url: base url i.e. http://localhost/moodle used as platformId (required) + :type base_url: str + :param client_id: client id + :type client_id: str + :param deployment_id: deployment id + :type deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_by_type_serialize( + type=type, + base_url=base_url, + client_id=client_id, + deployment_id=deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def register_by_type_with_http_info( + self, + type: Annotated[StrictStr, Field(description="lti platform typ i.e. moodle")], + base_url: Annotated[StrictStr, Field(description="base url i.e. http://localhost/moodle used as platformId")], + client_id: Annotated[Optional[StrictStr], Field(description="client id")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="deployment id")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """register LTI platform + + + :param type: lti platform typ i.e. moodle (required) + :type type: str + :param base_url: base url i.e. http://localhost/moodle used as platformId (required) + :type base_url: str + :param client_id: client id + :type client_id: str + :param deployment_id: deployment id + :type deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_by_type_serialize( + type=type, + base_url=base_url, + client_id=client_id, + deployment_id=deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def register_by_type_without_preload_content( + self, + type: Annotated[StrictStr, Field(description="lti platform typ i.e. moodle")], + base_url: Annotated[StrictStr, Field(description="base url i.e. http://localhost/moodle used as platformId")], + client_id: Annotated[Optional[StrictStr], Field(description="client id")] = None, + deployment_id: Annotated[Optional[StrictStr], Field(description="deployment id")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """register LTI platform + + + :param type: lti platform typ i.e. moodle (required) + :type type: str + :param base_url: base url i.e. http://localhost/moodle used as platformId (required) + :type base_url: str + :param client_id: client id + :type client_id: str + :param deployment_id: deployment id + :type deployment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_by_type_serialize( + type=type, + base_url=base_url, + client_id=client_id, + deployment_id=deployment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _register_by_type_serialize( + self, + type, + base_url, + client_id, + deployment_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if type is not None: + _path_params['type'] = type + # process the query parameters + if base_url is not None: + + _query_params.append(('baseUrl', base_url)) + + if client_id is not None: + + _query_params.append(('client_id', client_id)) + + if deployment_id is not None: + + _query_params.append(('deployment_id', deployment_id)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/lti/v13/registration/{type}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def register_test( + self, + platform_id: Annotated[StrictStr, Field(description="the issuer")], + client_id: Annotated[StrictStr, Field(description="client id")], + deployment_id: Annotated[StrictStr, Field(description="deployment id")], + authentication_request_url: Annotated[StrictStr, Field(description="oidc endpoint, authentication request url")], + keyset_url: Annotated[StrictStr, Field(description="jwks endpoint, keyset url")], + auth_token_url: Annotated[StrictStr, Field(description="auth token url")], + key_id: Annotated[Optional[StrictStr], Field(description="jwks key id")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """register LTI platform + + + :param platform_id: the issuer (required) + :type platform_id: str + :param client_id: client id (required) + :type client_id: str + :param deployment_id: deployment id (required) + :type deployment_id: str + :param authentication_request_url: oidc endpoint, authentication request url (required) + :type authentication_request_url: str + :param keyset_url: jwks endpoint, keyset url (required) + :type keyset_url: str + :param auth_token_url: auth token url (required) + :type auth_token_url: str + :param key_id: jwks key id + :type key_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_test_serialize( + platform_id=platform_id, + client_id=client_id, + deployment_id=deployment_id, + authentication_request_url=authentication_request_url, + keyset_url=keyset_url, + auth_token_url=auth_token_url, + key_id=key_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def register_test_with_http_info( + self, + platform_id: Annotated[StrictStr, Field(description="the issuer")], + client_id: Annotated[StrictStr, Field(description="client id")], + deployment_id: Annotated[StrictStr, Field(description="deployment id")], + authentication_request_url: Annotated[StrictStr, Field(description="oidc endpoint, authentication request url")], + keyset_url: Annotated[StrictStr, Field(description="jwks endpoint, keyset url")], + auth_token_url: Annotated[StrictStr, Field(description="auth token url")], + key_id: Annotated[Optional[StrictStr], Field(description="jwks key id")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """register LTI platform + + + :param platform_id: the issuer (required) + :type platform_id: str + :param client_id: client id (required) + :type client_id: str + :param deployment_id: deployment id (required) + :type deployment_id: str + :param authentication_request_url: oidc endpoint, authentication request url (required) + :type authentication_request_url: str + :param keyset_url: jwks endpoint, keyset url (required) + :type keyset_url: str + :param auth_token_url: auth token url (required) + :type auth_token_url: str + :param key_id: jwks key id + :type key_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_test_serialize( + platform_id=platform_id, + client_id=client_id, + deployment_id=deployment_id, + authentication_request_url=authentication_request_url, + keyset_url=keyset_url, + auth_token_url=auth_token_url, + key_id=key_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def register_test_without_preload_content( + self, + platform_id: Annotated[StrictStr, Field(description="the issuer")], + client_id: Annotated[StrictStr, Field(description="client id")], + deployment_id: Annotated[StrictStr, Field(description="deployment id")], + authentication_request_url: Annotated[StrictStr, Field(description="oidc endpoint, authentication request url")], + keyset_url: Annotated[StrictStr, Field(description="jwks endpoint, keyset url")], + auth_token_url: Annotated[StrictStr, Field(description="auth token url")], + key_id: Annotated[Optional[StrictStr], Field(description="jwks key id")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """register LTI platform + + + :param platform_id: the issuer (required) + :type platform_id: str + :param client_id: client id (required) + :type client_id: str + :param deployment_id: deployment id (required) + :type deployment_id: str + :param authentication_request_url: oidc endpoint, authentication request url (required) + :type authentication_request_url: str + :param keyset_url: jwks endpoint, keyset url (required) + :type keyset_url: str + :param auth_token_url: auth token url (required) + :type auth_token_url: str + :param key_id: jwks key id + :type key_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_test_serialize( + platform_id=platform_id, + client_id=client_id, + deployment_id=deployment_id, + authentication_request_url=authentication_request_url, + keyset_url=keyset_url, + auth_token_url=auth_token_url, + key_id=key_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _register_test_serialize( + self, + platform_id, + client_id, + deployment_id, + authentication_request_url, + keyset_url, + auth_token_url, + key_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if platform_id is not None: + + _query_params.append(('platformId', platform_id)) + + if client_id is not None: + + _query_params.append(('client_id', client_id)) + + if deployment_id is not None: + + _query_params.append(('deployment_id', deployment_id)) + + if authentication_request_url is not None: + + _query_params.append(('authentication_request_url', authentication_request_url)) + + if keyset_url is not None: + + _query_params.append(('keyset_url', keyset_url)) + + if key_id is not None: + + _query_params.append(('key_id', key_id)) + + if auth_token_url is not None: + + _query_params.append(('auth_token_url', auth_token_url)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/lti/v13/registration/static', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_lti_registration_url( + self, + token: Annotated[StrictStr, Field(description="the token of the link you have to remove")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DynamicRegistrationTokens: + """LTI Dynamic Regitration - delete url + + + :param token: the token of the link you have to remove (required) + :type token: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_lti_registration_url_serialize( + token=token, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicRegistrationTokens", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_lti_registration_url_with_http_info( + self, + token: Annotated[StrictStr, Field(description="the token of the link you have to remove")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DynamicRegistrationTokens]: + """LTI Dynamic Regitration - delete url + + + :param token: the token of the link you have to remove (required) + :type token: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_lti_registration_url_serialize( + token=token, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicRegistrationTokens", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_lti_registration_url_without_preload_content( + self, + token: Annotated[StrictStr, Field(description="the token of the link you have to remove")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """LTI Dynamic Regitration - delete url + + + :param token: the token of the link you have to remove (required) + :type token: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_lti_registration_url_serialize( + token=token, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DynamicRegistrationTokens", + '400': "str", + '401': "str", + '403': "str", + '404': "str", + '500': "str", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_lti_registration_url_serialize( + self, + token, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if token is not None: + _path_params['token'] = token + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/lti/v13/registration/url/{token}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/mdsv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/mdsv1_api.py new file mode 100644 index 00000000..b077d5f5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/mdsv1_api.py @@ -0,0 +1,1626 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.mds import Mds +from edu_sharing_client.models.mds_entries import MdsEntries +from edu_sharing_client.models.mds_value import MdsValue +from edu_sharing_client.models.suggestion_param import SuggestionParam +from edu_sharing_client.models.suggestions import Suggestions + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class MDSV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_metadata_set( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Mds: + """Get metadata set new. + + Get metadata set new. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_set_serialize( + repository=repository, + metadataset=metadataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mds", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_metadata_set_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Mds]: + """Get metadata set new. + + Get metadata set new. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_set_serialize( + repository=repository, + metadataset=metadataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mds", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_metadata_set_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get metadata set new. + + Get metadata set new. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_set_serialize( + repository=repository, + metadataset=metadataset, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mds", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_metadata_set_serialize( + self, + repository, + metadataset, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/mds/v1/metadatasets/{repository}/{metadataset}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_metadata_sets( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MdsEntries: + """Get metadata sets V2 of repository. + + Get metadata sets V2 of repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_sets_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MdsEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_metadata_sets_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MdsEntries]: + """Get metadata sets V2 of repository. + + Get metadata sets V2 of repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_sets_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MdsEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_metadata_sets_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get metadata sets V2 of repository. + + Get metadata sets V2 of repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_sets_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MdsEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_metadata_sets_serialize( + self, + repository, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/mds/v1/metadatasets/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_values( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + suggestion_param: Annotated[Optional[SuggestionParam], Field(description="suggestionParam")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Suggestions: + """Get values. + + Get values. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param suggestion_param: suggestionParam + :type suggestion_param: SuggestionParam + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_values_serialize( + repository=repository, + metadataset=metadataset, + suggestion_param=suggestion_param, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Suggestions", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_values_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + suggestion_param: Annotated[Optional[SuggestionParam], Field(description="suggestionParam")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Suggestions]: + """Get values. + + Get values. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param suggestion_param: suggestionParam + :type suggestion_param: SuggestionParam + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_values_serialize( + repository=repository, + metadataset=metadataset, + suggestion_param=suggestion_param, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Suggestions", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_values_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + suggestion_param: Annotated[Optional[SuggestionParam], Field(description="suggestionParam")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get values. + + Get values. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param suggestion_param: suggestionParam + :type suggestion_param: SuggestionParam + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_values_serialize( + repository=repository, + metadataset=metadataset, + suggestion_param=suggestion_param, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Suggestions", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_values_serialize( + self, + repository, + metadataset, + suggestion_param, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if suggestion_param is not None: + _body_params = suggestion_param + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mds/v1/metadatasets/{repository}/{metadataset}/values', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_values4_keys( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + var_property: Annotated[Optional[StrictStr], Field(description="property")] = None, + request_body: Annotated[Optional[List[StrictStr]], Field(description="keys")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Suggestions: + """Get values for keys. + + Get values for keys. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: query + :type query: str + :param var_property: property + :type var_property: str + :param request_body: keys + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_values4_keys_serialize( + repository=repository, + metadataset=metadataset, + query=query, + var_property=var_property, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Suggestions", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_values4_keys_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + var_property: Annotated[Optional[StrictStr], Field(description="property")] = None, + request_body: Annotated[Optional[List[StrictStr]], Field(description="keys")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Suggestions]: + """Get values for keys. + + Get values for keys. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: query + :type query: str + :param var_property: property + :type var_property: str + :param request_body: keys + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_values4_keys_serialize( + repository=repository, + metadataset=metadataset, + query=query, + var_property=var_property, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Suggestions", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_values4_keys_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[Optional[StrictStr], Field(description="query")] = None, + var_property: Annotated[Optional[StrictStr], Field(description="property")] = None, + request_body: Annotated[Optional[List[StrictStr]], Field(description="keys")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get values for keys. + + Get values for keys. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: query + :type query: str + :param var_property: property + :type var_property: str + :param request_body: keys + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_values4_keys_serialize( + repository=repository, + metadataset=metadataset, + query=query, + var_property=var_property, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Suggestions", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_values4_keys_serialize( + self, + repository, + metadataset, + query, + var_property, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'request_body': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + # process the query parameters + if query is not None: + + _query_params.append(('query', query)) + + if var_property is not None: + + _query_params.append(('property', var_property)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mds/v1/metadatasets/{repository}/{metadataset}/values_for_keys', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def suggest_value( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + widget: Annotated[StrictStr, Field(description="widget id, e.g. cm:name")], + caption: Annotated[StrictStr, Field(description="caption of the new entry (id will be auto-generated)")], + parent: Annotated[Optional[StrictStr], Field(description="parent id of the new entry (might be null)")] = None, + node_id: Annotated[Optional[List[StrictStr]], Field(description="One or more nodes this suggestion relates to (optional, only for extended mail data)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MdsValue: + """Suggest a value. + + Suggest a new value for a given metadataset and widget. The suggestion will be forwarded to the corresponding person in the metadataset file + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param widget: widget id, e.g. cm:name (required) + :type widget: str + :param caption: caption of the new entry (id will be auto-generated) (required) + :type caption: str + :param parent: parent id of the new entry (might be null) + :type parent: str + :param node_id: One or more nodes this suggestion relates to (optional, only for extended mail data) + :type node_id: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suggest_value_serialize( + repository=repository, + metadataset=metadataset, + widget=widget, + caption=caption, + parent=parent, + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MdsValue", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def suggest_value_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + widget: Annotated[StrictStr, Field(description="widget id, e.g. cm:name")], + caption: Annotated[StrictStr, Field(description="caption of the new entry (id will be auto-generated)")], + parent: Annotated[Optional[StrictStr], Field(description="parent id of the new entry (might be null)")] = None, + node_id: Annotated[Optional[List[StrictStr]], Field(description="One or more nodes this suggestion relates to (optional, only for extended mail data)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MdsValue]: + """Suggest a value. + + Suggest a new value for a given metadataset and widget. The suggestion will be forwarded to the corresponding person in the metadataset file + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param widget: widget id, e.g. cm:name (required) + :type widget: str + :param caption: caption of the new entry (id will be auto-generated) (required) + :type caption: str + :param parent: parent id of the new entry (might be null) + :type parent: str + :param node_id: One or more nodes this suggestion relates to (optional, only for extended mail data) + :type node_id: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suggest_value_serialize( + repository=repository, + metadataset=metadataset, + widget=widget, + caption=caption, + parent=parent, + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MdsValue", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def suggest_value_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + widget: Annotated[StrictStr, Field(description="widget id, e.g. cm:name")], + caption: Annotated[StrictStr, Field(description="caption of the new entry (id will be auto-generated)")], + parent: Annotated[Optional[StrictStr], Field(description="parent id of the new entry (might be null)")] = None, + node_id: Annotated[Optional[List[StrictStr]], Field(description="One or more nodes this suggestion relates to (optional, only for extended mail data)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Suggest a value. + + Suggest a new value for a given metadataset and widget. The suggestion will be forwarded to the corresponding person in the metadataset file + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param widget: widget id, e.g. cm:name (required) + :type widget: str + :param caption: caption of the new entry (id will be auto-generated) (required) + :type caption: str + :param parent: parent id of the new entry (might be null) + :type parent: str + :param node_id: One or more nodes this suggestion relates to (optional, only for extended mail data) + :type node_id: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._suggest_value_serialize( + repository=repository, + metadataset=metadataset, + widget=widget, + caption=caption, + parent=parent, + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MdsValue", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _suggest_value_serialize( + self, + repository, + metadataset, + widget, + caption, + parent, + node_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'nodeId': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + if widget is not None: + _path_params['widget'] = widget + # process the query parameters + if caption is not None: + + _query_params.append(('caption', caption)) + + if parent is not None: + + _query_params.append(('parent', parent)) + + if node_id is not None: + + _query_params.append(('nodeId', node_id)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mds/v1/metadatasets/{repository}/{metadataset}/values/{widget}/suggest', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/mediacenterv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/mediacenterv1_api.py new file mode 100644 index 00000000..b2977abf --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/mediacenterv1_api.py @@ -0,0 +1,3815 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import Any, Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult +from edu_sharing_client.models.mediacenter import Mediacenter +from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult +from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult +from edu_sharing_client.models.profile import Profile +from edu_sharing_client.models.search_parameters import SearchParameters + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class MEDIACENTERV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_mediacenter_group( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + group: Annotated[StrictStr, Field(description="authorityName of the group that should be managed by that mediacenter")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """add a group that is managed by the given mediacenter + + although not restricted, it is recommended that the group is an edu-sharing organization (admin rights are required) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param group: authorityName of the group that should be managed by that mediacenter (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_mediacenter_group_serialize( + repository=repository, + mediacenter=mediacenter, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_mediacenter_group_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + group: Annotated[StrictStr, Field(description="authorityName of the group that should be managed by that mediacenter")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """add a group that is managed by the given mediacenter + + although not restricted, it is recommended that the group is an edu-sharing organization (admin rights are required) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param group: authorityName of the group that should be managed by that mediacenter (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_mediacenter_group_serialize( + repository=repository, + mediacenter=mediacenter, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_mediacenter_group_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + group: Annotated[StrictStr, Field(description="authorityName of the group that should be managed by that mediacenter")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """add a group that is managed by the given mediacenter + + although not restricted, it is recommended that the group is an edu-sharing organization (admin rights are required) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param group: authorityName of the group that should be managed by that mediacenter (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_mediacenter_group_serialize( + repository=repository, + mediacenter=mediacenter, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_mediacenter_group_serialize( + self, + repository, + mediacenter, + group, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_mediacenter( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="mediacenter name")], + profile: Optional[Profile] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Mediacenter: + """create new mediacenter in repository. + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: mediacenter name (required) + :type mediacenter: str + :param profile: + :type profile: Profile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + profile=profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mediacenter", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_mediacenter_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="mediacenter name")], + profile: Optional[Profile] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Mediacenter]: + """create new mediacenter in repository. + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: mediacenter name (required) + :type mediacenter: str + :param profile: + :type profile: Profile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + profile=profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mediacenter", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_mediacenter_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="mediacenter name")], + profile: Optional[Profile] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create new mediacenter in repository. + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: mediacenter name (required) + :type mediacenter: str + :param profile: + :type profile: Profile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + profile=profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mediacenter", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_mediacenter_serialize( + self, + repository, + mediacenter, + profile, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if profile is not None: + _body_params = profile + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_mediacenter( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """delete a mediacenter group and it's admin group and proxy group + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_mediacenter_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """delete a mediacenter group and it's admin group and proxy group + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_mediacenter_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete a mediacenter group and it's admin group and proxy group + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_mediacenter_serialize( + self, + repository, + mediacenter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def edit_mediacenter( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="mediacenter name")], + profile: Optional[Profile] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Mediacenter: + """edit a mediacenter in repository. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: mediacenter name (required) + :type mediacenter: str + :param profile: + :type profile: Profile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._edit_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + profile=profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mediacenter", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def edit_mediacenter_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="mediacenter name")], + profile: Optional[Profile] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Mediacenter]: + """edit a mediacenter in repository. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: mediacenter name (required) + :type mediacenter: str + :param profile: + :type profile: Profile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._edit_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + profile=profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mediacenter", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def edit_mediacenter_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="mediacenter name")], + profile: Optional[Profile] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """edit a mediacenter in repository. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: mediacenter name (required) + :type mediacenter: str + :param profile: + :type profile: Profile + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._edit_mediacenter_serialize( + repository=repository, + mediacenter=mediacenter, + profile=profile, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Mediacenter", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _edit_mediacenter_serialize( + self, + repository, + mediacenter, + profile, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if profile is not None: + _body_params = profile + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def export_mediacenter_licensed_nodes( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that licenses nodes")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + properties: Annotated[Optional[List[StrictStr]], Field(description="properties to fetch, use parent:: to include parent property values")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get nodes that are licensed by the given mediacenter + + e.g. cm:name + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that licenses nodes (required) + :type mediacenter: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param properties: properties to fetch, use parent:: to include parent property values + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_mediacenter_licensed_nodes_serialize( + repository=repository, + mediacenter=mediacenter, + search_parameters=search_parameters, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def export_mediacenter_licensed_nodes_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that licenses nodes")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + properties: Annotated[Optional[List[StrictStr]], Field(description="properties to fetch, use parent:: to include parent property values")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get nodes that are licensed by the given mediacenter + + e.g. cm:name + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that licenses nodes (required) + :type mediacenter: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param properties: properties to fetch, use parent:: to include parent property values + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_mediacenter_licensed_nodes_serialize( + repository=repository, + mediacenter=mediacenter, + search_parameters=search_parameters, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def export_mediacenter_licensed_nodes_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that licenses nodes")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + properties: Annotated[Optional[List[StrictStr]], Field(description="properties to fetch, use parent:: to include parent property values")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get nodes that are licensed by the given mediacenter + + e.g. cm:name + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that licenses nodes (required) + :type mediacenter: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param properties: properties to fetch, use parent:: to include parent property values + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._export_mediacenter_licensed_nodes_serialize( + repository=repository, + mediacenter=mediacenter, + search_parameters=search_parameters, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _export_mediacenter_licensed_nodes_serialize( + self, + repository, + mediacenter, + search_parameters, + sort_properties, + sort_ascending, + properties, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'properties': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + # process the query parameters + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if properties is not None: + + _query_params.append(('properties', properties)) + + # process the header parameters + # process the form parameters + # process the body parameter + if search_parameters is not None: + _body_params = search_parameters + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses/export', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_mediacenter_groups( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get groups that are managed by the given mediacenter + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenter_groups_serialize( + repository=repository, + mediacenter=mediacenter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_mediacenter_groups_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get groups that are managed by the given mediacenter + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenter_groups_serialize( + repository=repository, + mediacenter=mediacenter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_mediacenter_groups_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get groups that are managed by the given mediacenter + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenter_groups_serialize( + repository=repository, + mediacenter=mediacenter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_mediacenter_groups_serialize( + self, + repository, + mediacenter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_mediacenter_licensed_nodes( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that licenses nodes")], + searchword: Annotated[StrictStr, Field(description="searchword of licensed nodes")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get nodes that are licensed by the given mediacenter + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that licenses nodes (required) + :type mediacenter: str + :param searchword: searchword of licensed nodes (required) + :type searchword: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenter_licensed_nodes_serialize( + repository=repository, + mediacenter=mediacenter, + searchword=searchword, + search_parameters=search_parameters, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_mediacenter_licensed_nodes_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that licenses nodes")], + searchword: Annotated[StrictStr, Field(description="searchword of licensed nodes")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get nodes that are licensed by the given mediacenter + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that licenses nodes (required) + :type mediacenter: str + :param searchword: searchword of licensed nodes (required) + :type searchword: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenter_licensed_nodes_serialize( + repository=repository, + mediacenter=mediacenter, + searchword=searchword, + search_parameters=search_parameters, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_mediacenter_licensed_nodes_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that licenses nodes")], + searchword: Annotated[StrictStr, Field(description="searchword of licensed nodes")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get nodes that are licensed by the given mediacenter + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that licenses nodes (required) + :type mediacenter: str + :param searchword: searchword of licensed nodes (required) + :type searchword: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenter_licensed_nodes_serialize( + repository=repository, + mediacenter=mediacenter, + searchword=searchword, + search_parameters=search_parameters, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_mediacenter_licensed_nodes_serialize( + self, + repository, + mediacenter, + searchword, + search_parameters, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + if searchword is not None: + + _query_params.append(('searchword', searchword)) + + # process the header parameters + # process the form parameters + # process the body parameter + if search_parameters is not None: + _body_params = search_parameters + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}/licenses', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_mediacenters( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get mediacenters in the repository. + + Only shows the one available/managing the current user (only admin can access all) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenters_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_mediacenters_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get mediacenters in the repository. + + Only shows the one available/managing the current user (only admin can access all) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenters_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_mediacenters_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get mediacenters in the repository. + + Only shows the one available/managing the current user (only admin can access all) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_mediacenters_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_mediacenters_serialize( + self, + repository, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/mediacenter/v1/mediacenter/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_mc_org_connections( + self, + mc_orgs: Annotated[Dict[str, Any], Field(description="Mediacenter Organisation Connection csv to import")], + remove_schools_from_mc: Annotated[Optional[StrictBool], Field(description="removeSchoolsFromMC")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> McOrgConnectResult: + """Import Mediacenter Organisation Connection + + Import Mediacenter Organisation Connection. + + :param mc_orgs: Mediacenter Organisation Connection csv to import (required) + :type mc_orgs: object + :param remove_schools_from_mc: removeSchoolsFromMC + :type remove_schools_from_mc: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_mc_org_connections_serialize( + mc_orgs=mc_orgs, + remove_schools_from_mc=remove_schools_from_mc, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "McOrgConnectResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_mc_org_connections_with_http_info( + self, + mc_orgs: Annotated[Dict[str, Any], Field(description="Mediacenter Organisation Connection csv to import")], + remove_schools_from_mc: Annotated[Optional[StrictBool], Field(description="removeSchoolsFromMC")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[McOrgConnectResult]: + """Import Mediacenter Organisation Connection + + Import Mediacenter Organisation Connection. + + :param mc_orgs: Mediacenter Organisation Connection csv to import (required) + :type mc_orgs: object + :param remove_schools_from_mc: removeSchoolsFromMC + :type remove_schools_from_mc: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_mc_org_connections_serialize( + mc_orgs=mc_orgs, + remove_schools_from_mc=remove_schools_from_mc, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "McOrgConnectResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_mc_org_connections_without_preload_content( + self, + mc_orgs: Annotated[Dict[str, Any], Field(description="Mediacenter Organisation Connection csv to import")], + remove_schools_from_mc: Annotated[Optional[StrictBool], Field(description="removeSchoolsFromMC")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import Mediacenter Organisation Connection + + Import Mediacenter Organisation Connection. + + :param mc_orgs: Mediacenter Organisation Connection csv to import (required) + :type mc_orgs: object + :param remove_schools_from_mc: removeSchoolsFromMC + :type remove_schools_from_mc: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_mc_org_connections_serialize( + mc_orgs=mc_orgs, + remove_schools_from_mc=remove_schools_from_mc, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "McOrgConnectResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_mc_org_connections_serialize( + self, + mc_orgs, + remove_schools_from_mc, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if remove_schools_from_mc is not None: + + _query_params.append(('removeSchoolsFromMC', remove_schools_from_mc)) + + # process the header parameters + # process the form parameters + if mc_orgs is not None: + _form_params.append(('mcOrgs', mc_orgs)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mediacenter/v1/import/mc_org', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_mediacenters( + self, + mediacenters: Annotated[Dict[str, Any], Field(description="Mediacenters csv to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> MediacentersImportResult: + """Import mediacenters + + Import mediacenters. + + :param mediacenters: Mediacenters csv to import (required) + :type mediacenters: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_mediacenters_serialize( + mediacenters=mediacenters, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MediacentersImportResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_mediacenters_with_http_info( + self, + mediacenters: Annotated[Dict[str, Any], Field(description="Mediacenters csv to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[MediacentersImportResult]: + """Import mediacenters + + Import mediacenters. + + :param mediacenters: Mediacenters csv to import (required) + :type mediacenters: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_mediacenters_serialize( + mediacenters=mediacenters, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MediacentersImportResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_mediacenters_without_preload_content( + self, + mediacenters: Annotated[Dict[str, Any], Field(description="Mediacenters csv to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import mediacenters + + Import mediacenters. + + :param mediacenters: Mediacenters csv to import (required) + :type mediacenters: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_mediacenters_serialize( + mediacenters=mediacenters, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "MediacentersImportResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_mediacenters_serialize( + self, + mediacenters, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if mediacenters is not None: + _form_params.append(('mediacenters', mediacenters)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mediacenter/v1/import/mediacenters', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_organisations( + self, + organisations: Annotated[Dict[str, Any], Field(description="Organisations csv to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> OrganisationsImportResult: + """Import Organisations + + Import Organisations. + + :param organisations: Organisations csv to import (required) + :type organisations: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_organisations_serialize( + organisations=organisations, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OrganisationsImportResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_organisations_with_http_info( + self, + organisations: Annotated[Dict[str, Any], Field(description="Organisations csv to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[OrganisationsImportResult]: + """Import Organisations + + Import Organisations. + + :param organisations: Organisations csv to import (required) + :type organisations: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_organisations_serialize( + organisations=organisations, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OrganisationsImportResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_organisations_without_preload_content( + self, + organisations: Annotated[Dict[str, Any], Field(description="Organisations csv to import")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import Organisations + + Import Organisations. + + :param organisations: Organisations csv to import (required) + :type organisations: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_organisations_serialize( + organisations=organisations, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OrganisationsImportResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_organisations_serialize( + self, + organisations, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + if organisations is not None: + _form_params.append(('organisations', organisations)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/mediacenter/v1/import/organisations', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_mediacenter_group( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + group: Annotated[StrictStr, Field(description="authorityName of the group that should not longer be managed by that mediacenter")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """delete a group that is managed by the given mediacenter + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param group: authorityName of the group that should not longer be managed by that mediacenter (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_mediacenter_group_serialize( + repository=repository, + mediacenter=mediacenter, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_mediacenter_group_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + group: Annotated[StrictStr, Field(description="authorityName of the group that should not longer be managed by that mediacenter")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """delete a group that is managed by the given mediacenter + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param group: authorityName of the group that should not longer be managed by that mediacenter (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_mediacenter_group_serialize( + repository=repository, + mediacenter=mediacenter, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_mediacenter_group_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + mediacenter: Annotated[StrictStr, Field(description="authorityName of the mediacenter that should manage the group")], + group: Annotated[StrictStr, Field(description="authorityName of the group that should not longer be managed by that mediacenter")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete a group that is managed by the given mediacenter + + admin rights are required. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param mediacenter: authorityName of the mediacenter that should manage the group (required) + :type mediacenter: str + :param group: authorityName of the group that should not longer be managed by that mediacenter (required) + :type group: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_mediacenter_group_serialize( + repository=repository, + mediacenter=mediacenter, + group=group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_mediacenter_group_serialize( + self, + repository, + mediacenter, + group, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if mediacenter is not None: + _path_params['mediacenter'] = mediacenter + if group is not None: + _path_params['group'] = group + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/mediacenter/v1/mediacenter/{repository}/{mediacenter}/manages/{group}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/networkv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/networkv1_api.py new file mode 100644 index 00000000..016810ad --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/networkv1_api.py @@ -0,0 +1,1419 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Optional +from typing_extensions import Annotated +from edu_sharing_client.models.repo_entries import RepoEntries +from edu_sharing_client.models.service import Service +from edu_sharing_client.models.stored_service import StoredService + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class NETWORKV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_service( + self, + service: Annotated[Optional[Service], Field(description="Service data object")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StoredService: + """Register service. + + Register a new service. + + :param service: Service data object + :type service: Service + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_service_serialize( + service=service, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_service_with_http_info( + self, + service: Annotated[Optional[Service], Field(description="Service data object")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StoredService]: + """Register service. + + Register a new service. + + :param service: Service data object + :type service: Service + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_service_serialize( + service=service, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_service_without_preload_content( + self, + service: Annotated[Optional[Service], Field(description="Service data object")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Register service. + + Register a new service. + + :param service: Service data object + :type service: Service + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_service_serialize( + service=service, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_service_serialize( + self, + service, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if service is not None: + _body_params = service + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/network/v1/services', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_repositories( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RepoEntries: + """Get repositories. + + Get repositories. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_repositories_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepoEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_repositories_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RepoEntries]: + """Get repositories. + + Get repositories. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_repositories_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepoEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_repositories_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get repositories. + + Get repositories. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_repositories_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RepoEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_repositories_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/network/v1/repositories', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_service( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StoredService: + """Get own service. + + Get the servic entry from the current repository. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_service_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_service_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StoredService]: + """Get own service. + + Get the servic entry from the current repository. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_service_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_service_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get own service. + + Get the servic entry from the current repository. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_service_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_service_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/network/v1/service', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_services( + self, + query: Annotated[Optional[StrictStr], Field(description="search or filter for services")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get services. + + Get registerted services. + + :param query: search or filter for services + :type query: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_services_serialize( + query=query, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_services_with_http_info( + self, + query: Annotated[Optional[StrictStr], Field(description="search or filter for services")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get services. + + Get registerted services. + + :param query: search or filter for services + :type query: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_services_serialize( + query=query, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_services_without_preload_content( + self, + query: Annotated[Optional[StrictStr], Field(description="search or filter for services")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get services. + + Get registerted services. + + :param query: search or filter for services + :type query: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_services_serialize( + query=query, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_services_serialize( + self, + query, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if query is not None: + + _query_params.append(('query', query)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/network/v1/services', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_service( + self, + id: Annotated[StrictStr, Field(description="Service id")], + service: Annotated[Optional[Service], Field(description="Service data object")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StoredService: + """Update a service. + + Update an existing service. + + :param id: Service id (required) + :type id: str + :param service: Service data object + :type service: Service + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_service_serialize( + id=id, + service=service, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_service_with_http_info( + self, + id: Annotated[StrictStr, Field(description="Service id")], + service: Annotated[Optional[Service], Field(description="Service data object")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StoredService]: + """Update a service. + + Update an existing service. + + :param id: Service id (required) + :type id: str + :param service: Service data object + :type service: Service + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_service_serialize( + id=id, + service=service, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_service_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="Service id")], + service: Annotated[Optional[Service], Field(description="Service data object")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update a service. + + Update an existing service. + + :param id: Service id (required) + :type id: str + :param service: Service data object + :type service: Service + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_service_serialize( + id=id, + service=service, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StoredService", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_service_serialize( + self, + id, + service, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if service is not None: + _body_params = service + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/network/v1/services/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/nodev1_api.py b/edu_sharing_openapi/edu_sharing_client/api/nodev1_api.py new file mode 100644 index 00000000..73f844f2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/nodev1_api.py @@ -0,0 +1,15161 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictBytes, StrictInt, StrictStr, field_validator +from typing import Any, Dict, List, Optional, Union +from typing_extensions import Annotated +from edu_sharing_client.models.acl import ACL +from edu_sharing_client.models.handle_param import HandleParam +from edu_sharing_client.models.json_object import JSONObject +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.models.node_locked import NodeLocked +from edu_sharing_client.models.node_permission_entry import NodePermissionEntry +from edu_sharing_client.models.node_remote import NodeRemote +from edu_sharing_client.models.node_share import NodeShare +from edu_sharing_client.models.node_stats import NodeStats +from edu_sharing_client.models.node_text import NodeText +from edu_sharing_client.models.node_version_entries import NodeVersionEntries +from edu_sharing_client.models.node_version_entry import NodeVersionEntry +from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries +from edu_sharing_client.models.parent_entries import ParentEntries +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.models.workflow_history import WorkflowHistory + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class NODEV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_aspects( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[List[StrictStr], Field(description="aspect name, e.g. ccm:lomreplication")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Add aspect to node. + + Add aspect to node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: aspect name, e.g. ccm:lomreplication (required) + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_aspects_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_aspects_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[List[StrictStr], Field(description="aspect name, e.g. ccm:lomreplication")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Add aspect to node. + + Add aspect to node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: aspect name, e.g. ccm:lomreplication (required) + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_aspects_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_aspects_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[List[StrictStr], Field(description="aspect name, e.g. ccm:lomreplication")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add aspect to node. + + Add aspect to node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: aspect name, e.g. ccm:lomreplication (required) + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_aspects_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_aspects_serialize( + self, + repository, + node, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'request_body': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/aspects', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def add_workflow_history( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + workflow_history: Annotated[WorkflowHistory, Field(description="The history entry to put (editor and time can be null and will be filled automatically)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Add workflow. + + Add workflow entry to node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param workflow_history: The history entry to put (editor and time can be null and will be filled automatically) (required) + :type workflow_history: WorkflowHistory + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_workflow_history_serialize( + repository=repository, + node=node, + workflow_history=workflow_history, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_workflow_history_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + workflow_history: Annotated[WorkflowHistory, Field(description="The history entry to put (editor and time can be null and will be filled automatically)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Add workflow. + + Add workflow entry to node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param workflow_history: The history entry to put (editor and time can be null and will be filled automatically) (required) + :type workflow_history: WorkflowHistory + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_workflow_history_serialize( + repository=repository, + node=node, + workflow_history=workflow_history, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_workflow_history_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + workflow_history: Annotated[WorkflowHistory, Field(description="The history entry to put (editor and time can be null and will be filled automatically)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Add workflow. + + Add workflow entry to node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param workflow_history: The history entry to put (editor and time can be null and will be filled automatically) (required) + :type workflow_history: WorkflowHistory + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_workflow_history_serialize( + repository=repository, + node=node, + workflow_history=workflow_history, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_workflow_history_serialize( + self, + repository, + node, + workflow_history, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if workflow_history is not None: + _body_params = workflow_history + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/workflow', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_content1( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + file: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="file upload")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Change content of node. + + Change content of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param file: file upload + :type file: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content1_serialize( + repository=repository, + node=node, + mimetype=mimetype, + version_comment=version_comment, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_content1_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + file: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="file upload")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Change content of node. + + Change content of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param file: file upload + :type file: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content1_serialize( + repository=repository, + node=node, + mimetype=mimetype, + version_comment=version_comment, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_content1_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + file: Annotated[Optional[Union[StrictBytes, StrictStr]], Field(description="file upload")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change content of node. + + Change content of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param file: file upload + :type file: bytearray + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content1_serialize( + repository=repository, + node=node, + mimetype=mimetype, + version_comment=version_comment, + file=file, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_content1_serialize( + self, + repository, + node, + mimetype, + version_comment, + file, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + if mimetype is not None: + + _query_params.append(('mimetype', mimetype)) + + # process the header parameters + # process the form parameters + if file is not None: + _files['file'] = file + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/content', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_content_as_text( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Change content of node as text. + + Change content of node as text. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content_as_text_serialize( + repository=repository, + node=node, + mimetype=mimetype, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_content_as_text_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Change content of node as text. + + Change content of node as text. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content_as_text_serialize( + repository=repository, + node=node, + mimetype=mimetype, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_content_as_text_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no new version, otherwise new version is generated")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change content of node as text. + + Change content of node as text. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param version_comment: comment, leave empty = no new version, otherwise new version is generated + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_content_as_text_serialize( + repository=repository, + node=node, + mimetype=mimetype, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_content_as_text_serialize( + self, + repository, + node, + mimetype, + version_comment, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + if mimetype is not None: + + _query_params.append(('mimetype', mimetype)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/textContent', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_metadata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Change metadata of node. + + Change metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_metadata_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_metadata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Change metadata of node. + + Change metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_metadata_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_metadata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change metadata of node. + + Change metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_metadata_serialize( + repository=repository, + node=node, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_metadata_serialize( + self, + repository, + node, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/metadata', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_metadata_with_versioning( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version_comment: Annotated[StrictStr, Field(description="comment")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Change metadata of node (new version). + + Change metadata of node (new version). + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version_comment: comment (required) + :type version_comment: str + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_metadata_with_versioning_serialize( + repository=repository, + node=node, + version_comment=version_comment, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_metadata_with_versioning_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version_comment: Annotated[StrictStr, Field(description="comment")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Change metadata of node (new version). + + Change metadata of node (new version). + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version_comment: comment (required) + :type version_comment: str + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_metadata_with_versioning_serialize( + repository=repository, + node=node, + version_comment=version_comment, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_metadata_with_versioning_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version_comment: Annotated[StrictStr, Field(description="comment")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change metadata of node (new version). + + Change metadata of node (new version). + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version_comment: comment (required) + :type version_comment: str + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_metadata_with_versioning_serialize( + repository=repository, + node=node, + version_comment=version_comment, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_metadata_with_versioning_serialize( + self, + repository, + node, + version_comment, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/metadata', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_preview( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + create_version: Annotated[Optional[StrictBool], Field(description="create a node version")] = None, + image: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Change preview of node. + + Change preview of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param create_version: create a node version + :type create_version: bool + :param image: + :type image: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_preview_serialize( + repository=repository, + node=node, + mimetype=mimetype, + create_version=create_version, + image=image, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_preview_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + create_version: Annotated[Optional[StrictBool], Field(description="create a node version")] = None, + image: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Change preview of node. + + Change preview of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param create_version: create a node version + :type create_version: bool + :param image: + :type image: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_preview_serialize( + repository=repository, + node=node, + mimetype=mimetype, + create_version=create_version, + image=image, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_preview_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + mimetype: Annotated[StrictStr, Field(description="MIME-Type")], + create_version: Annotated[Optional[StrictBool], Field(description="create a node version")] = None, + image: Optional[Dict[str, Any]] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Change preview of node. + + Change preview of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param mimetype: MIME-Type (required) + :type mimetype: str + :param create_version: create a node version + :type create_version: bool + :param image: + :type image: object + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_preview_serialize( + repository=repository, + node=node, + mimetype=mimetype, + create_version=create_version, + image=image, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_preview_serialize( + self, + repository, + node, + mimetype, + create_version, + image, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if mimetype is not None: + + _query_params.append(('mimetype', mimetype)) + + if create_version is not None: + + _query_params.append(('createVersion', create_version)) + + # process the header parameters + # process the form parameters + if image is not None: + _form_params.append(('image', image)) + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'multipart/form-data' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/preview', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def change_template_metadata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + enable: Annotated[StrictBool, Field(description="Is the inherition currently enabled")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Set the metadata template for this folder. + + All the given metadata will be inherited to child nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param enable: Is the inherition currently enabled (required) + :type enable: bool + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_template_metadata_serialize( + repository=repository, + node=node, + enable=enable, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def change_template_metadata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + enable: Annotated[StrictBool, Field(description="Is the inherition currently enabled")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Set the metadata template for this folder. + + All the given metadata will be inherited to child nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param enable: Is the inherition currently enabled (required) + :type enable: bool + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_template_metadata_serialize( + repository=repository, + node=node, + enable=enable, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def change_template_metadata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + enable: Annotated[StrictBool, Field(description="Is the inherition currently enabled")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set the metadata template for this folder. + + All the given metadata will be inherited to child nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param enable: Is the inherition currently enabled (required) + :type enable: bool + :param request_body: properties (required) + :type request_body: Dict[str, List[str]] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._change_template_metadata_serialize( + repository=repository, + node=node, + enable=enable, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _change_template_metadata_serialize( + self, + repository, + node, + enable, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if enable is not None: + + _query_params.append(('enable', enable)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/metadata/template', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def copy_metadata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + var_from: Annotated[StrictStr, Field(description="The node where to copy the metadata from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Copy metadata from another node. + + Copies all common metadata from one note to another. Current user needs write access to the target node and read access to the source node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param var_from: The node where to copy the metadata from (required) + :type var_from: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._copy_metadata_serialize( + repository=repository, + node=node, + var_from=var_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def copy_metadata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + var_from: Annotated[StrictStr, Field(description="The node where to copy the metadata from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Copy metadata from another node. + + Copies all common metadata from one note to another. Current user needs write access to the target node and read access to the source node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param var_from: The node where to copy the metadata from (required) + :type var_from: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._copy_metadata_serialize( + repository=repository, + node=node, + var_from=var_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def copy_metadata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + var_from: Annotated[StrictStr, Field(description="The node where to copy the metadata from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Copy metadata from another node. + + Copies all common metadata from one note to another. Current user needs write access to the target node and read access to the source node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param var_from: The node where to copy the metadata from (required) + :type var_from: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._copy_metadata_serialize( + repository=repository, + node=node, + var_from=var_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _copy_metadata_serialize( + self, + repository, + node, + var_from, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if var_from is not None: + _path_params['from'] = var_from + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/metadata/copy/{from}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_child( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node use -userhome- for userhome or -inbox- for inbox node")], + type: Annotated[StrictStr, Field(description="type of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + aspects: Annotated[Optional[List[StrictStr]], Field(description="aspects of node")] = None, + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + assoc_type: Annotated[Optional[StrictStr], Field(description="Association type, can be empty")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a new child. + + Create a new child. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node use -userhome- for userhome or -inbox- for inbox node (required) + :type node: str + :param type: type of node (required) + :type type: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param aspects: aspects of node + :type aspects: List[str] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param assoc_type: Association type, can be empty + :type assoc_type: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_serialize( + repository=repository, + node=node, + type=type, + request_body=request_body, + aspects=aspects, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + assoc_type=assoc_type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_child_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node use -userhome- for userhome or -inbox- for inbox node")], + type: Annotated[StrictStr, Field(description="type of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + aspects: Annotated[Optional[List[StrictStr]], Field(description="aspects of node")] = None, + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + assoc_type: Annotated[Optional[StrictStr], Field(description="Association type, can be empty")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a new child. + + Create a new child. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node use -userhome- for userhome or -inbox- for inbox node (required) + :type node: str + :param type: type of node (required) + :type type: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param aspects: aspects of node + :type aspects: List[str] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param assoc_type: Association type, can be empty + :type assoc_type: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_serialize( + repository=repository, + node=node, + type=type, + request_body=request_body, + aspects=aspects, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + assoc_type=assoc_type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_child_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node use -userhome- for userhome or -inbox- for inbox node")], + type: Annotated[StrictStr, Field(description="type of node")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + aspects: Annotated[Optional[List[StrictStr]], Field(description="aspects of node")] = None, + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + assoc_type: Annotated[Optional[StrictStr], Field(description="Association type, can be empty")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new child. + + Create a new child. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node use -userhome- for userhome or -inbox- for inbox node (required) + :type node: str + :param type: type of node (required) + :type type: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param aspects: aspects of node + :type aspects: List[str] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param assoc_type: Association type, can be empty + :type assoc_type: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_serialize( + repository=repository, + node=node, + type=type, + request_body=request_body, + aspects=aspects, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + assoc_type=assoc_type, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_child_serialize( + self, + repository, + node, + type, + request_body, + aspects, + rename_if_exists, + version_comment, + assoc_type, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'aspects': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if type is not None: + + _query_params.append(('type', type)) + + if aspects is not None: + + _query_params.append(('aspects', aspects)) + + if rename_if_exists is not None: + + _query_params.append(('renameIfExists', rename_if_exists)) + + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + if assoc_type is not None: + + _query_params.append(('assocType', assoc_type)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/children', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_child_by_copying( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + with_children: Annotated[StrictBool, Field(description="flag for children")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a new child by copying. + + Create a new child by copying. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param with_children: flag for children (required) + :type with_children: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_by_copying_serialize( + repository=repository, + node=node, + source=source, + with_children=with_children, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_child_by_copying_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + with_children: Annotated[StrictBool, Field(description="flag for children")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a new child by copying. + + Create a new child by copying. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param with_children: flag for children (required) + :type with_children: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_by_copying_serialize( + repository=repository, + node=node, + source=source, + with_children=with_children, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_child_by_copying_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + with_children: Annotated[StrictBool, Field(description="flag for children")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new child by copying. + + Create a new child by copying. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param with_children: flag for children (required) + :type with_children: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_by_copying_serialize( + repository=repository, + node=node, + source=source, + with_children=with_children, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_child_by_copying_serialize( + self, + repository, + node, + source, + with_children, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if source is not None: + + _query_params.append(('source', source)) + + if with_children is not None: + + _query_params.append(('withChildren', with_children)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/children/_copy', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_child_by_moving( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a new child by moving. + + Create a new child by moving. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_by_moving_serialize( + repository=repository, + node=node, + source=source, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_child_by_moving_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a new child by moving. + + Create a new child by moving. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_by_moving_serialize( + repository=repository, + node=node, + source=source, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_child_by_moving_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new child by moving. + + Create a new child by moving. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_child_by_moving_serialize( + repository=repository, + node=node, + source=source, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_child_by_moving_serialize( + self, + repository, + node, + source, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if source is not None: + + _query_params.append(('source', source)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/children/_move', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_fork_of_node( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + with_children: Annotated[StrictBool, Field(description="flag for children")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a copy of a node by creating a forked version (variant). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param with_children: flag for children (required) + :type with_children: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_fork_of_node_serialize( + repository=repository, + node=node, + source=source, + with_children=with_children, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_fork_of_node_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + with_children: Annotated[StrictBool, Field(description="flag for children")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a copy of a node by creating a forked version (variant). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param with_children: flag for children (required) + :type with_children: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_fork_of_node_serialize( + repository=repository, + node=node, + source=source, + with_children=with_children, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_fork_of_node_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node")], + source: Annotated[StrictStr, Field(description="ID of source node")], + with_children: Annotated[StrictBool, Field(description="flag for children")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a copy of a node by creating a forked version (variant). + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (required) + :type node: str + :param source: ID of source node (required) + :type source: str + :param with_children: flag for children (required) + :type with_children: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_fork_of_node_serialize( + repository=repository, + node=node, + source=source, + with_children=with_children, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_fork_of_node_serialize( + self, + repository, + node, + source, + with_children, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if source is not None: + + _query_params.append(('source', source)) + + if with_children is not None: + + _query_params.append(('withChildren', with_children)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/children/_fork', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_share( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + expiry_date: Annotated[Optional[StrictInt], Field(description="expiry date for this share, leave empty or -1 for unlimited")] = None, + password: Annotated[Optional[StrictStr], Field(description="password for this share, use none to not use a password")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeShare: + """Create a share for a node. + + Create a new share for a node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param expiry_date: expiry date for this share, leave empty or -1 for unlimited + :type expiry_date: int + :param password: password for this share, use none to not use a password + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_serialize( + repository=repository, + node=node, + expiry_date=expiry_date, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeShare", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_share_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + expiry_date: Annotated[Optional[StrictInt], Field(description="expiry date for this share, leave empty or -1 for unlimited")] = None, + password: Annotated[Optional[StrictStr], Field(description="password for this share, use none to not use a password")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeShare]: + """Create a share for a node. + + Create a new share for a node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param expiry_date: expiry date for this share, leave empty or -1 for unlimited + :type expiry_date: int + :param password: password for this share, use none to not use a password + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_serialize( + repository=repository, + node=node, + expiry_date=expiry_date, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeShare", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_share_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + expiry_date: Annotated[Optional[StrictInt], Field(description="expiry date for this share, leave empty or -1 for unlimited")] = None, + password: Annotated[Optional[StrictStr], Field(description="password for this share, use none to not use a password")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a share for a node. + + Create a new share for a node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param expiry_date: expiry date for this share, leave empty or -1 for unlimited + :type expiry_date: int + :param password: password for this share, use none to not use a password + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_share_serialize( + repository=repository, + node=node, + expiry_date=expiry_date, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeShare", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_share_serialize( + self, + repository, + node, + expiry_date, + password, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if expiry_date is not None: + + _query_params.append(('expiryDate', expiry_date)) + + if password is not None: + + _query_params.append(('password', password)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/shares', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + recycle: Annotated[Optional[StrictBool], Field(description="move the node to recycle")] = None, + protocol: Annotated[Optional[StrictStr], Field(description="protocol")] = None, + store: Annotated[Optional[StrictStr], Field(description="store")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete node. + + Delete node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param recycle: move the node to recycle + :type recycle: bool + :param protocol: protocol + :type protocol: str + :param store: store + :type store: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_serialize( + repository=repository, + node=node, + recycle=recycle, + protocol=protocol, + store=store, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + recycle: Annotated[Optional[StrictBool], Field(description="move the node to recycle")] = None, + protocol: Annotated[Optional[StrictStr], Field(description="protocol")] = None, + store: Annotated[Optional[StrictStr], Field(description="store")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete node. + + Delete node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param recycle: move the node to recycle + :type recycle: bool + :param protocol: protocol + :type protocol: str + :param store: store + :type store: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_serialize( + repository=repository, + node=node, + recycle=recycle, + protocol=protocol, + store=store, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + recycle: Annotated[Optional[StrictBool], Field(description="move the node to recycle")] = None, + protocol: Annotated[Optional[StrictStr], Field(description="protocol")] = None, + store: Annotated[Optional[StrictStr], Field(description="store")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete node. + + Delete node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param recycle: move the node to recycle + :type recycle: bool + :param protocol: protocol + :type protocol: str + :param store: store + :type store: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_serialize( + repository=repository, + node=node, + recycle=recycle, + protocol=protocol, + store=store, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_serialize( + self, + repository, + node, + recycle, + protocol, + store, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if recycle is not None: + + _query_params.append(('recycle', recycle)) + + if protocol is not None: + + _query_params.append(('protocol', protocol)) + + if store is not None: + + _query_params.append(('store', store)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/node/v1/nodes/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_preview( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Delete preview of node. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_preview_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_preview_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Delete preview of node. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_preview_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_preview_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete preview of node. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_preview_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_preview_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/node/v1/nodes/{repository}/{node}/preview', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_assocs( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + direction: Annotated[StrictStr, Field(description="Either where the given node should be the \"SOURCE\" or the \"TARGET\"")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + assoc_name: Annotated[Optional[StrictStr], Field(description="Association name (e.g. ccm:forkio).")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntries: + """Get related nodes. + + Get nodes related based on an assoc. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param direction: Either where the given node should be the \"SOURCE\" or the \"TARGET\" (required) + :type direction: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param assoc_name: Association name (e.g. ccm:forkio). + :type assoc_name: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_assocs_serialize( + repository=repository, + node=node, + direction=direction, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + assoc_name=assoc_name, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_assocs_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + direction: Annotated[StrictStr, Field(description="Either where the given node should be the \"SOURCE\" or the \"TARGET\"")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + assoc_name: Annotated[Optional[StrictStr], Field(description="Association name (e.g. ccm:forkio).")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntries]: + """Get related nodes. + + Get nodes related based on an assoc. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param direction: Either where the given node should be the \"SOURCE\" or the \"TARGET\" (required) + :type direction: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param assoc_name: Association name (e.g. ccm:forkio). + :type assoc_name: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_assocs_serialize( + repository=repository, + node=node, + direction=direction, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + assoc_name=assoc_name, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_assocs_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + direction: Annotated[StrictStr, Field(description="Either where the given node should be the \"SOURCE\" or the \"TARGET\"")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + assoc_name: Annotated[Optional[StrictStr], Field(description="Association name (e.g. ccm:forkio).")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get related nodes. + + Get nodes related based on an assoc. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param direction: Either where the given node should be the \"SOURCE\" or the \"TARGET\" (required) + :type direction: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param assoc_name: Association name (e.g. ccm:forkio). + :type assoc_name: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_assocs_serialize( + repository=repository, + node=node, + direction=direction, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + assoc_name=assoc_name, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_assocs_serialize( + self, + repository, + node, + direction, + max_items, + skip_count, + sort_properties, + sort_ascending, + assoc_name, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if direction is not None: + + _query_params.append(('direction', direction)) + + if assoc_name is not None: + + _query_params.append(('assocName', assoc_name)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/assocs', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_children( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user)")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + filter: Annotated[Optional[List[StrictStr]], Field(description="filter by type files,folders")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + assoc_name: Annotated[Optional[StrictStr], Field(description="Filter for a specific association. May be empty")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntries: + """Get children of node. + + Get children of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) (required) + :type node: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param filter: filter by type files,folders + :type filter: List[str] + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param assoc_name: Filter for a specific association. May be empty + :type assoc_name: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_children_serialize( + repository=repository, + node=node, + max_items=max_items, + skip_count=skip_count, + filter=filter, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + assoc_name=assoc_name, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_children_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user)")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + filter: Annotated[Optional[List[StrictStr]], Field(description="filter by type files,folders")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + assoc_name: Annotated[Optional[StrictStr], Field(description="Filter for a specific association. May be empty")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntries]: + """Get children of node. + + Get children of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) (required) + :type node: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param filter: filter by type files,folders + :type filter: List[str] + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param assoc_name: Filter for a specific association. May be empty + :type assoc_name: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_children_serialize( + repository=repository, + node=node, + max_items=max_items, + skip_count=skip_count, + filter=filter, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + assoc_name=assoc_name, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_children_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user)")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + filter: Annotated[Optional[List[StrictStr]], Field(description="filter by type files,folders")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + assoc_name: Annotated[Optional[StrictStr], Field(description="Filter for a specific association. May be empty")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get children of node. + + Get children of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of parent node (or \"-userhome-\" for home directory of current user, \"-shared_files-\" for shared folders, \"-to_me_shared_files\" for shared files for the user,\"-my_shared_files-\" for files shared by the user, \"-inbox-\" for the inbox, \"-workflow_receive-\" for files assigned by workflow, \"-saved_search-\" for saved searches of the user) (required) + :type node: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param filter: filter by type files,folders + :type filter: List[str] + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param assoc_name: Filter for a specific association. May be empty + :type assoc_name: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_children_serialize( + repository=repository, + node=node, + max_items=max_items, + skip_count=skip_count, + filter=filter, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + assoc_name=assoc_name, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_children_serialize( + self, + repository, + node, + max_items, + skip_count, + filter, + sort_properties, + sort_ascending, + assoc_name, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'filter': 'multi', + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if filter is not None: + + _query_params.append(('filter', filter)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if assoc_name is not None: + + _query_params.append(('assocName', assoc_name)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/children', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_lrmi_data( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="Version of the node")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> JSONObject: + """Get lrmi data. + + Get lrmi data of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: Version of the node + :type version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_lrmi_data_serialize( + repository=repository, + node=node, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "JSONObject", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_lrmi_data_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="Version of the node")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[JSONObject]: + """Get lrmi data. + + Get lrmi data of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: Version of the node + :type version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_lrmi_data_serialize( + repository=repository, + node=node, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "JSONObject", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_lrmi_data_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="Version of the node")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get lrmi data. + + Get lrmi data of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: Version of the node + :type version: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_lrmi_data_serialize( + repository=repository, + node=node, + version=version, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "JSONObject", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_lrmi_data_serialize( + self, + repository, + node, + version, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version is not None: + + _query_params.append(('version', version)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/lrmi', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_metadata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_serialize( + repository=repository, + node=node, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_metadata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_serialize( + repository=repository, + node=node, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_metadata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metadata_serialize( + repository=repository, + node=node, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_metadata_serialize( + self, + repository, + node, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/metadata', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_nodes( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + query: Annotated[StrictStr, Field(description="lucene query")], + facets: Annotated[Optional[List[StrictStr]], Field(description="facets")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResult: + """Searching nodes. + + Searching nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param query: lucene query (required) + :type query: str + :param facets: facets + :type facets: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_serialize( + repository=repository, + query=query, + facets=facets, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_nodes_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + query: Annotated[StrictStr, Field(description="lucene query")], + facets: Annotated[Optional[List[StrictStr]], Field(description="facets")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResult]: + """Searching nodes. + + Searching nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param query: lucene query (required) + :type query: str + :param facets: facets + :type facets: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_serialize( + repository=repository, + query=query, + facets=facets, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_nodes_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + query: Annotated[StrictStr, Field(description="lucene query")], + facets: Annotated[Optional[List[StrictStr]], Field(description="facets")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Searching nodes. + + Searching nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param query: lucene query (required) + :type query: str + :param facets: facets + :type facets: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_serialize( + repository=repository, + query=query, + facets=facets, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResult", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_nodes_serialize( + self, + repository, + query, + facets, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'facets': 'multi', + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if query is not None: + + _query_params.append(('query', query)) + + if facets is not None: + + _query_params.append(('facets', facets)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_notify_list( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get notifys (sharing history) of the node. + + Ordered by the time of each notify + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_notify_list_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_notify_list_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get notifys (sharing history) of the node. + + Ordered by the time of each notify + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_notify_list_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_notify_list_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get notifys (sharing history) of the node. + + Ordered by the time of each notify + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_notify_list_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_notify_list_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/notifys', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_parents( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + full_path: Annotated[Optional[StrictBool], Field(description="activate to return the full alfresco path, otherwise the path for the user home is resolved")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ParentEntries: + """Get parents of node. + + Get all parents metadata + own metadata of node. Index 0 is always the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param full_path: activate to return the full alfresco path, otherwise the path for the user home is resolved + :type full_path: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_parents_serialize( + repository=repository, + node=node, + property_filter=property_filter, + full_path=full_path, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ParentEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_parents_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + full_path: Annotated[Optional[StrictBool], Field(description="activate to return the full alfresco path, otherwise the path for the user home is resolved")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[ParentEntries]: + """Get parents of node. + + Get all parents metadata + own metadata of node. Index 0 is always the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param full_path: activate to return the full alfresco path, otherwise the path for the user home is resolved + :type full_path: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_parents_serialize( + repository=repository, + node=node, + property_filter=property_filter, + full_path=full_path, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ParentEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_parents_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + full_path: Annotated[Optional[StrictBool], Field(description="activate to return the full alfresco path, otherwise the path for the user home is resolved")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get parents of node. + + Get all parents metadata + own metadata of node. Index 0 is always the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param full_path: activate to return the full alfresco path, otherwise the path for the user home is resolved + :type full_path: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_parents_serialize( + repository=repository, + node=node, + property_filter=property_filter, + full_path=full_path, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "ParentEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_parents_serialize( + self, + repository, + node, + property_filter, + full_path, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + if full_path is not None: + + _query_params.append(('fullPath', full_path)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/parents', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_permission( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodePermissionEntry: + """Get all permission of node. + + Get all permission of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_permission_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodePermissionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_permission_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodePermissionEntry]: + """Get all permission of node. + + Get all permission of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_permission_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodePermissionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_permission_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all permission of node. + + Get all permission of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_permission_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodePermissionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_permission_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/permissions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_published_copies( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntries: + """Publish + + Get all published copies of the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_published_copies_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_published_copies_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntries]: + """Publish + + Get all published copies of the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_published_copies_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_published_copies_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Publish + + Get all published copies of the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_published_copies_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_published_copies_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/publish', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_shares( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + email: Annotated[Optional[StrictStr], Field(description="Filter for a specific email or use LINK for link shares (Optional)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get shares of node. + + Get list of shares (via mail/token) for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param email: Filter for a specific email or use LINK for link shares (Optional) + :type email: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_shares_serialize( + repository=repository, + node=node, + email=email, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_shares_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + email: Annotated[Optional[StrictStr], Field(description="Filter for a specific email or use LINK for link shares (Optional)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get shares of node. + + Get list of shares (via mail/token) for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param email: Filter for a specific email or use LINK for link shares (Optional) + :type email: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_shares_serialize( + repository=repository, + node=node, + email=email, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_shares_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + email: Annotated[Optional[StrictStr], Field(description="Filter for a specific email or use LINK for link shares (Optional)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get shares of node. + + Get list of shares (via mail/token) for a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param email: Filter for a specific email or use LINK for link shares (Optional) + :type email: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_shares_serialize( + repository=repository, + node=node, + email=email, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_shares_serialize( + self, + repository, + node, + email, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if email is not None: + + _query_params.append(('email', email)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/shares', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_stats( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeStats: + """Get statistics of node. + + Get statistics (views, downloads) of node. Requires ChangePermissions permission on node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_stats_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeStats", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_stats_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeStats]: + """Get statistics of node. + + Get statistics (views, downloads) of node. Requires ChangePermissions permission on node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_stats_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeStats", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_stats_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get statistics of node. + + Get statistics (views, downloads) of node. Requires ChangePermissions permission on node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_stats_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeStats", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_stats_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/stats', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_template_metadata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Get the metadata template + status for this folder. + + All the given metadata will be inherited to child nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_template_metadata_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_template_metadata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Get the metadata template + status for this folder. + + All the given metadata will be inherited to child nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_template_metadata_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_template_metadata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get the metadata template + status for this folder. + + All the given metadata will be inherited to child nodes. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_template_metadata_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_template_metadata_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/metadata/template', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_text_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeText: + """Get the text content of a document. + + May fails with 500 if the node can not be read. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_text_content_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeText", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_text_content_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeText]: + """Get the text content of a document. + + May fails with 500 if the node can not be read. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_text_content_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeText", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_text_content_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get the text content of a document. + + May fails with 500 if the node can not be read. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_text_content_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeText", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_text_content_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/textContent', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_version_metadata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + major: Annotated[StrictInt, Field(description="major version")], + minor: Annotated[StrictInt, Field(description="minor version")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeVersionEntry: + """Get metadata of node version. + + Get metadata of node version. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param major: major version (required) + :type major: int + :param minor: minor version (required) + :type minor: int + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_metadata_serialize( + repository=repository, + node=node, + major=major, + minor=minor, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_version_metadata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + major: Annotated[StrictInt, Field(description="major version")], + minor: Annotated[StrictInt, Field(description="minor version")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeVersionEntry]: + """Get metadata of node version. + + Get metadata of node version. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param major: major version (required) + :type major: int + :param minor: minor version (required) + :type minor: int + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_metadata_serialize( + repository=repository, + node=node, + major=major, + minor=minor, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_version_metadata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + major: Annotated[StrictInt, Field(description="major version")], + minor: Annotated[StrictInt, Field(description="minor version")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get metadata of node version. + + Get metadata of node version. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param major: major version (required) + :type major: int + :param minor: minor version (required) + :type minor: int + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_version_metadata_serialize( + repository=repository, + node=node, + major=major, + minor=minor, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_version_metadata_serialize( + self, + repository, + node, + major, + minor, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if major is not None: + _path_params['major'] = major + if minor is not None: + _path_params['minor'] = minor + # process the query parameters + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/metadata', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_versions( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeVersionRefEntries: + """Get all versions of node. + + Get all versions of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_versions_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionRefEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_versions_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeVersionRefEntries]: + """Get all versions of node. + + Get all versions of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_versions_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionRefEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_versions_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all versions of node. + + Get all versions of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_versions_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionRefEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_versions_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/versions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_versions1( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeVersionEntries: + """Get all versions of node, including it's metadata. + + Get all versions of node, including it's metadata. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_versions1_serialize( + repository=repository, + node=node, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_versions1_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeVersionEntries]: + """Get all versions of node, including it's metadata. + + Get all versions of node, including it's metadata. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_versions1_serialize( + repository=repository, + node=node, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_versions1_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all versions of node, including it's metadata. + + Get all versions of node, including it's metadata. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_versions1_serialize( + repository=repository, + node=node, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeVersionEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_versions1_serialize( + self, + repository, + node, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/versions/metadata', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_workflow_history( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get workflow history. + + Get workflow history of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_history_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_workflow_history_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get workflow history. + + Get workflow history of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_history_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_workflow_history_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get workflow history. + + Get workflow history of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_workflow_history_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_workflow_history_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/workflow', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def has_permission( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + user: Annotated[StrictStr, Field(description="Authority (user/group) to check (use \"-me-\" for current user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Which permissions has user/group for node. + + Check for actual permissions (also when user is in groups) for a specific node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param user: Authority (user/group) to check (use \"-me-\" for current user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._has_permission_serialize( + repository=repository, + node=node, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def has_permission_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + user: Annotated[StrictStr, Field(description="Authority (user/group) to check (use \"-me-\" for current user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Which permissions has user/group for node. + + Check for actual permissions (also when user is in groups) for a specific node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param user: Authority (user/group) to check (use \"-me-\" for current user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._has_permission_serialize( + repository=repository, + node=node, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def has_permission_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + user: Annotated[StrictStr, Field(description="Authority (user/group) to check (use \"-me-\" for current user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Which permissions has user/group for node. + + Check for actual permissions (also when user is in groups) for a specific node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param user: Authority (user/group) to check (use \"-me-\" for current user (required) + :type user: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._has_permission_serialize( + repository=repository, + node=node, + user=user, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _has_permission_serialize( + self, + repository, + node, + user, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if user is not None: + _path_params['user'] = user + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/permissions/{user}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def import_node( + self, + repository: Annotated[StrictStr, Field(description="The id of the foreign repository")], + node: Annotated[StrictStr, Field(description="ID of node")], + parent: Annotated[StrictStr, Field(description="Parent node where to store it locally, may also use -userhome- or -inbox-")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Import node + + Import a node from a foreign repository to the local repository. + + :param repository: The id of the foreign repository (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param parent: Parent node where to store it locally, may also use -userhome- or -inbox- (required) + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_node_serialize( + repository=repository, + node=node, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def import_node_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="The id of the foreign repository")], + node: Annotated[StrictStr, Field(description="ID of node")], + parent: Annotated[StrictStr, Field(description="Parent node where to store it locally, may also use -userhome- or -inbox-")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Import node + + Import a node from a foreign repository to the local repository. + + :param repository: The id of the foreign repository (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param parent: Parent node where to store it locally, may also use -userhome- or -inbox- (required) + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_node_serialize( + repository=repository, + node=node, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def import_node_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="The id of the foreign repository")], + node: Annotated[StrictStr, Field(description="ID of node")], + parent: Annotated[StrictStr, Field(description="Parent node where to store it locally, may also use -userhome- or -inbox-")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Import node + + Import a node from a foreign repository to the local repository. + + :param repository: The id of the foreign repository (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param parent: Parent node where to store it locally, may also use -userhome- or -inbox- (required) + :type parent: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._import_node_serialize( + repository=repository, + node=node, + parent=parent, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _import_node_serialize( + self, + repository, + node, + parent, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if parent is not None: + + _query_params.append(('parent', parent)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/import', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def islocked( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeLocked: + """locked status of a node. + + locked status of a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._islocked_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeLocked", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def islocked_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeLocked]: + """locked status of a node. + + locked status of a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._islocked_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeLocked", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def islocked_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """locked status of a node. + + locked status of a node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._islocked_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeLocked", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _islocked_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/lock/status', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def prepare_usage( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeRemote: + """create remote object and get properties. + + create remote object and get properties. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._prepare_usage_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeRemote", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def prepare_usage_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeRemote]: + """create remote object and get properties. + + create remote object and get properties. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._prepare_usage_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeRemote", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def prepare_usage_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create remote object and get properties. + + create remote object and get properties. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._prepare_usage_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeRemote", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _prepare_usage_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/prepareUsage', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def publish_copy( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + handle_mode: Annotated[Optional[StrictStr], Field(description="handle mode, if a handle should be created. Skip this parameter if you don't want an handle")] = None, + handle_param: Annotated[Optional[HandleParam], Field(description="handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi,")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Publish + + Create a published copy of the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param handle_mode: handle mode, if a handle should be created. Skip this parameter if you don't want an handle + :type handle_mode: str + :param handle_param: handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi, + :type handle_param: HandleParam + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._publish_copy_serialize( + repository=repository, + node=node, + handle_mode=handle_mode, + handle_param=handle_param, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def publish_copy_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + handle_mode: Annotated[Optional[StrictStr], Field(description="handle mode, if a handle should be created. Skip this parameter if you don't want an handle")] = None, + handle_param: Annotated[Optional[HandleParam], Field(description="handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi,")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Publish + + Create a published copy of the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param handle_mode: handle mode, if a handle should be created. Skip this parameter if you don't want an handle + :type handle_mode: str + :param handle_param: handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi, + :type handle_param: HandleParam + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._publish_copy_serialize( + repository=repository, + node=node, + handle_mode=handle_mode, + handle_param=handle_param, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def publish_copy_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + handle_mode: Annotated[Optional[StrictStr], Field(description="handle mode, if a handle should be created. Skip this parameter if you don't want an handle")] = None, + handle_param: Annotated[Optional[HandleParam], Field(description="handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi,")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Publish + + Create a published copy of the current node + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param handle_mode: handle mode, if a handle should be created. Skip this parameter if you don't want an handle + :type handle_mode: str + :param handle_param: handle parameter, if a handle and/or doi should be created. Skip this parameter if you don't want a handle or doi, + :type handle_param: HandleParam + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._publish_copy_serialize( + repository=repository, + node=node, + handle_mode=handle_mode, + handle_param=handle_param, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _publish_copy_serialize( + self, + repository, + node, + handle_mode, + handle_param, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if handle_mode is not None: + + _query_params.append(('handleMode', handle_mode)) + + # process the header parameters + # process the form parameters + # process the body parameter + if handle_param is not None: + _body_params = handle_param + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/publish', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_share( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share_id: Annotated[StrictStr, Field(description="share id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove share of a node. + + Remove the specified share id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share_id: share id (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_share_serialize( + repository=repository, + node=node, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_share_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share_id: Annotated[StrictStr, Field(description="share id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove share of a node. + + Remove the specified share id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share_id: share id (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_share_serialize( + repository=repository, + node=node, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_share_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share_id: Annotated[StrictStr, Field(description="share id")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove share of a node. + + Remove the specified share id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share_id: share id (required) + :type share_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_share_serialize( + repository=repository, + node=node, + share_id=share_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_share_serialize( + self, + repository, + node, + share_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if share_id is not None: + _path_params['shareId'] = share_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/node/v1/nodes/{repository}/{node}/shares/{shareId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def report_node( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + reason: Annotated[StrictStr, Field(description="the reason for the report")], + user_email: Annotated[StrictStr, Field(description="mail of reporting user")], + user_comment: Annotated[Optional[StrictStr], Field(description="additional user comment")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Report the node. + + Report a node to notify the admin about an issue) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param reason: the reason for the report (required) + :type reason: str + :param user_email: mail of reporting user (required) + :type user_email: str + :param user_comment: additional user comment + :type user_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._report_node_serialize( + repository=repository, + node=node, + reason=reason, + user_email=user_email, + user_comment=user_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def report_node_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + reason: Annotated[StrictStr, Field(description="the reason for the report")], + user_email: Annotated[StrictStr, Field(description="mail of reporting user")], + user_comment: Annotated[Optional[StrictStr], Field(description="additional user comment")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Report the node. + + Report a node to notify the admin about an issue) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param reason: the reason for the report (required) + :type reason: str + :param user_email: mail of reporting user (required) + :type user_email: str + :param user_comment: additional user comment + :type user_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._report_node_serialize( + repository=repository, + node=node, + reason=reason, + user_email=user_email, + user_comment=user_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def report_node_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + reason: Annotated[StrictStr, Field(description="the reason for the report")], + user_email: Annotated[StrictStr, Field(description="mail of reporting user")], + user_comment: Annotated[Optional[StrictStr], Field(description="additional user comment")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Report the node. + + Report a node to notify the admin about an issue) + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param reason: the reason for the report (required) + :type reason: str + :param user_email: mail of reporting user (required) + :type user_email: str + :param user_comment: additional user comment + :type user_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._report_node_serialize( + repository=repository, + node=node, + reason=reason, + user_email=user_email, + user_comment=user_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _report_node_serialize( + self, + repository, + node, + reason, + user_email, + user_comment, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if reason is not None: + + _query_params.append(('reason', reason)) + + if user_email is not None: + + _query_params.append(('userEmail', user_email)) + + if user_comment is not None: + + _query_params.append(('userComment', user_comment)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/report', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def revert_version( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + major: Annotated[StrictInt, Field(description="major version")], + minor: Annotated[StrictInt, Field(description="minor version")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Revert to node version. + + Revert to node version. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param major: major version (required) + :type major: int + :param minor: minor version (required) + :type minor: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._revert_version_serialize( + repository=repository, + node=node, + major=major, + minor=minor, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def revert_version_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + major: Annotated[StrictInt, Field(description="major version")], + minor: Annotated[StrictInt, Field(description="minor version")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Revert to node version. + + Revert to node version. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param major: major version (required) + :type major: int + :param minor: minor version (required) + :type minor: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._revert_version_serialize( + repository=repository, + node=node, + major=major, + minor=minor, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def revert_version_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + major: Annotated[StrictInt, Field(description="major version")], + minor: Annotated[StrictInt, Field(description="minor version")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Revert to node version. + + Revert to node version. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param major: major version (required) + :type major: int + :param minor: minor version (required) + :type minor: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._revert_version_serialize( + repository=repository, + node=node, + major=major, + minor=minor, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _revert_version_serialize( + self, + repository, + node, + major, + minor, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if major is not None: + _path_params['major'] = major + if minor is not None: + _path_params['minor'] = minor + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/node/v1/nodes/{repository}/{node}/versions/{major}/{minor}/_revert', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_owner( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + username: Annotated[Optional[StrictStr], Field(description="username")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set owner of node. + + Set owner of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param username: username + :type username: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_owner_serialize( + repository=repository, + node=node, + username=username, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_owner_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + username: Annotated[Optional[StrictStr], Field(description="username")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set owner of node. + + Set owner of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param username: username + :type username: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_owner_serialize( + repository=repository, + node=node, + username=username, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_owner_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + username: Annotated[Optional[StrictStr], Field(description="username")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set owner of node. + + Set owner of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param username: username + :type username: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_owner_serialize( + repository=repository, + node=node, + username=username, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_owner_serialize( + self, + repository, + node, + username, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if username is not None: + + _query_params.append(('username', username)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/owner', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_permission( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + send_mail: Annotated[StrictBool, Field(description="sendMail")], + send_copy: Annotated[StrictBool, Field(description="sendCopy")], + acl: Annotated[ACL, Field(description="permissions")], + mailtext: Annotated[Optional[StrictStr], Field(description="mailtext")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set local permissions of node. + + Set local permissions of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param send_mail: sendMail (required) + :type send_mail: bool + :param send_copy: sendCopy (required) + :type send_copy: bool + :param acl: permissions (required) + :type acl: ACL + :param mailtext: mailtext + :type mailtext: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_permission_serialize( + repository=repository, + node=node, + send_mail=send_mail, + send_copy=send_copy, + acl=acl, + mailtext=mailtext, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_permission_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + send_mail: Annotated[StrictBool, Field(description="sendMail")], + send_copy: Annotated[StrictBool, Field(description="sendCopy")], + acl: Annotated[ACL, Field(description="permissions")], + mailtext: Annotated[Optional[StrictStr], Field(description="mailtext")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set local permissions of node. + + Set local permissions of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param send_mail: sendMail (required) + :type send_mail: bool + :param send_copy: sendCopy (required) + :type send_copy: bool + :param acl: permissions (required) + :type acl: ACL + :param mailtext: mailtext + :type mailtext: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_permission_serialize( + repository=repository, + node=node, + send_mail=send_mail, + send_copy=send_copy, + acl=acl, + mailtext=mailtext, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_permission_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + send_mail: Annotated[StrictBool, Field(description="sendMail")], + send_copy: Annotated[StrictBool, Field(description="sendCopy")], + acl: Annotated[ACL, Field(description="permissions")], + mailtext: Annotated[Optional[StrictStr], Field(description="mailtext")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set local permissions of node. + + Set local permissions of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param send_mail: sendMail (required) + :type send_mail: bool + :param send_copy: sendCopy (required) + :type send_copy: bool + :param acl: permissions (required) + :type acl: ACL + :param mailtext: mailtext + :type mailtext: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_permission_serialize( + repository=repository, + node=node, + send_mail=send_mail, + send_copy=send_copy, + acl=acl, + mailtext=mailtext, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_permission_serialize( + self, + repository, + node, + send_mail, + send_copy, + acl, + mailtext, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if mailtext is not None: + + _query_params.append(('mailtext', mailtext)) + + if send_mail is not None: + + _query_params.append(('sendMail', send_mail)) + + if send_copy is not None: + + _query_params.append(('sendCopy', send_copy)) + + # process the header parameters + # process the form parameters + # process the body parameter + if acl is not None: + _body_params = acl + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/permissions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_property( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + var_property: Annotated[StrictStr, Field(description="property")], + keep_modified_date: Annotated[Optional[StrictBool], Field(description="keepModifiedDate")] = None, + value: Annotated[Optional[List[StrictStr]], Field(description="value")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Set single property of node. + + When the property is unset (null), it will be removed + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param var_property: property (required) + :type var_property: str + :param keep_modified_date: keepModifiedDate + :type keep_modified_date: bool + :param value: value + :type value: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_property_serialize( + repository=repository, + node=node, + var_property=var_property, + keep_modified_date=keep_modified_date, + value=value, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_property_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + var_property: Annotated[StrictStr, Field(description="property")], + keep_modified_date: Annotated[Optional[StrictBool], Field(description="keepModifiedDate")] = None, + value: Annotated[Optional[List[StrictStr]], Field(description="value")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Set single property of node. + + When the property is unset (null), it will be removed + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param var_property: property (required) + :type var_property: str + :param keep_modified_date: keepModifiedDate + :type keep_modified_date: bool + :param value: value + :type value: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_property_serialize( + repository=repository, + node=node, + var_property=var_property, + keep_modified_date=keep_modified_date, + value=value, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_property_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + var_property: Annotated[StrictStr, Field(description="property")], + keep_modified_date: Annotated[Optional[StrictBool], Field(description="keepModifiedDate")] = None, + value: Annotated[Optional[List[StrictStr]], Field(description="value")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set single property of node. + + When the property is unset (null), it will be removed + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param var_property: property (required) + :type var_property: str + :param keep_modified_date: keepModifiedDate + :type keep_modified_date: bool + :param value: value + :type value: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_property_serialize( + repository=repository, + node=node, + var_property=var_property, + keep_modified_date=keep_modified_date, + value=value, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_property_serialize( + self, + repository, + node, + var_property, + keep_modified_date, + value, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'value': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if var_property is not None: + + _query_params.append(('property', var_property)) + + if keep_modified_date is not None: + + _query_params.append(('keepModifiedDate', keep_modified_date)) + + if value is not None: + + _query_params.append(('value', value)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/property', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def store_x_api_data( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + body: Annotated[StrictStr, Field(description="xApi conform json data")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> object: + """Store xApi-Conform data for a given node + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param body: xApi conform json data (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._store_x_api_data_serialize( + repository=repository, + node=node, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def store_x_api_data_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + body: Annotated[StrictStr, Field(description="xApi conform json data")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[object]: + """Store xApi-Conform data for a given node + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param body: xApi conform json data (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._store_x_api_data_serialize( + repository=repository, + node=node, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def store_x_api_data_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + body: Annotated[StrictStr, Field(description="xApi conform json data")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Store xApi-Conform data for a given node + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param body: xApi conform json data (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._store_x_api_data_serialize( + repository=repository, + node=node, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "object", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _store_x_api_data_serialize( + self, + repository, + node, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/xapi', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def unlock( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """unlock node. + + unlock node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._unlock_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def unlock_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """unlock node. + + unlock node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._unlock_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def unlock_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """unlock node. + + unlock node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._unlock_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _unlock_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/node/v1/nodes/{repository}/{node}/lock/unlock', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_share( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share_id: Annotated[StrictStr, Field(description="share id")], + expiry_date: Annotated[Optional[StrictInt], Field(description="expiry date for this share, leave empty or -1 for unlimited")] = None, + password: Annotated[Optional[StrictStr], Field(description="new password for share, leave empty if you don't want to change it")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeShare: + """update share of a node. + + update the specified share id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share_id: share id (required) + :type share_id: str + :param expiry_date: expiry date for this share, leave empty or -1 for unlimited + :type expiry_date: int + :param password: new password for share, leave empty if you don't want to change it + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_serialize( + repository=repository, + node=node, + share_id=share_id, + expiry_date=expiry_date, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeShare", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_share_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share_id: Annotated[StrictStr, Field(description="share id")], + expiry_date: Annotated[Optional[StrictInt], Field(description="expiry date for this share, leave empty or -1 for unlimited")] = None, + password: Annotated[Optional[StrictStr], Field(description="new password for share, leave empty if you don't want to change it")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeShare]: + """update share of a node. + + update the specified share id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share_id: share id (required) + :type share_id: str + :param expiry_date: expiry date for this share, leave empty or -1 for unlimited + :type expiry_date: int + :param password: new password for share, leave empty if you don't want to change it + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_serialize( + repository=repository, + node=node, + share_id=share_id, + expiry_date=expiry_date, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeShare", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_share_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share_id: Annotated[StrictStr, Field(description="share id")], + expiry_date: Annotated[Optional[StrictInt], Field(description="expiry date for this share, leave empty or -1 for unlimited")] = None, + password: Annotated[Optional[StrictStr], Field(description="new password for share, leave empty if you don't want to change it")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update share of a node. + + update the specified share id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share_id: share id (required) + :type share_id: str + :param expiry_date: expiry date for this share, leave empty or -1 for unlimited + :type expiry_date: int + :param password: new password for share, leave empty if you don't want to change it + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_share_serialize( + repository=repository, + node=node, + share_id=share_id, + expiry_date=expiry_date, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeShare", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_share_serialize( + self, + repository, + node, + share_id, + expiry_date, + password, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if share_id is not None: + _path_params['shareId'] = share_id + # process the query parameters + if expiry_date is not None: + + _query_params.append(('expiryDate', expiry_date)) + + if password is not None: + + _query_params.append(('password', password)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/node/v1/nodes/{repository}/{node}/shares/{shareId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/notificationv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/notificationv1_api.py new file mode 100644 index 00000000..1e2c4260 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/notificationv1_api.py @@ -0,0 +1,1685 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictInt, StrictStr, field_validator +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.notification_config import NotificationConfig +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.notification_response_page import NotificationResponsePage + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class NOTIFICATIONV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def delete_notification( + self, + id: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Endpoint to delete notification by id + + + :param id: + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_notification_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_notification_with_http_info( + self, + id: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Endpoint to delete notification by id + + + :param id: + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_notification_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_notification_without_preload_content( + self, + id: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Endpoint to delete notification by id + + + :param id: + :type id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_notification_serialize( + id=id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_notification_serialize( + self, + id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if id is not None: + + _query_params.append(('id', id)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/notification/v1/notifications', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_config2( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NotificationConfig: + """get the config for notifications of the current user + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config2_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationConfig", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_config2_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NotificationConfig]: + """get the config for notifications of the current user + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config2_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationConfig", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_config2_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the config for notifications of the current user + + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_config2_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationConfig", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_config2_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/notification/v1/config', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_notifications( + self, + receiver_id: Optional[StrictStr] = None, + status: Annotated[Optional[List[StrictStr]], Field(description="status (or conjunction)")] = None, + page: Annotated[Optional[StrictInt], Field(description="page number")] = None, + size: Annotated[Optional[StrictInt], Field(description="page size")] = None, + sort: Annotated[Optional[List[StrictStr]], Field(description="Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NotificationResponsePage: + """Retrieve stored notification, filtered by receiver and status + + + :param receiver_id: + :type receiver_id: str + :param status: status (or conjunction) + :type status: List[str] + :param page: page number + :type page: int + :param size: page size + :type size: int + :param sort: Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported. + :type sort: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_notifications_serialize( + receiver_id=receiver_id, + status=status, + page=page, + size=size, + sort=sort, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationResponsePage", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_notifications_with_http_info( + self, + receiver_id: Optional[StrictStr] = None, + status: Annotated[Optional[List[StrictStr]], Field(description="status (or conjunction)")] = None, + page: Annotated[Optional[StrictInt], Field(description="page number")] = None, + size: Annotated[Optional[StrictInt], Field(description="page size")] = None, + sort: Annotated[Optional[List[StrictStr]], Field(description="Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NotificationResponsePage]: + """Retrieve stored notification, filtered by receiver and status + + + :param receiver_id: + :type receiver_id: str + :param status: status (or conjunction) + :type status: List[str] + :param page: page number + :type page: int + :param size: page size + :type size: int + :param sort: Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported. + :type sort: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_notifications_serialize( + receiver_id=receiver_id, + status=status, + page=page, + size=size, + sort=sort, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationResponsePage", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_notifications_without_preload_content( + self, + receiver_id: Optional[StrictStr] = None, + status: Annotated[Optional[List[StrictStr]], Field(description="status (or conjunction)")] = None, + page: Annotated[Optional[StrictInt], Field(description="page number")] = None, + size: Annotated[Optional[StrictInt], Field(description="page size")] = None, + sort: Annotated[Optional[List[StrictStr]], Field(description="Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Retrieve stored notification, filtered by receiver and status + + + :param receiver_id: + :type receiver_id: str + :param status: status (or conjunction) + :type status: List[str] + :param page: page number + :type page: int + :param size: page size + :type size: int + :param sort: Sorting criteria in the format: property(,asc|desc)(,ignoreCase). Default sort order is ascending. Multiple sort criteria are supported. + :type sort: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_notifications_serialize( + receiver_id=receiver_id, + status=status, + page=page, + size=size, + sort=sort, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationResponsePage", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_notifications_serialize( + self, + receiver_id, + status, + page, + size, + sort, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'status': 'multi', + 'sort': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if receiver_id is not None: + + _query_params.append(('receiverId', receiver_id)) + + if status is not None: + + _query_params.append(('status', status)) + + if page is not None: + + _query_params.append(('page', page)) + + if size is not None: + + _query_params.append(('size', size)) + + if sort is not None: + + _query_params.append(('sort', sort)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/notification/v1/notifications', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_config1( + self, + notification_config: Optional[NotificationConfig] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Update the config for notifications of the current user + + + :param notification_config: + :type notification_config: NotificationConfig + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_config1_serialize( + notification_config=notification_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_config1_with_http_info( + self, + notification_config: Optional[NotificationConfig] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Update the config for notifications of the current user + + + :param notification_config: + :type notification_config: NotificationConfig + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_config1_serialize( + notification_config=notification_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_config1_without_preload_content( + self, + notification_config: Optional[NotificationConfig] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Update the config for notifications of the current user + + + :param notification_config: + :type notification_config: NotificationConfig + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_config1_serialize( + notification_config=notification_config, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_config1_serialize( + self, + notification_config, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if notification_config is not None: + _body_params = notification_config + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/notification/v1/config', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_notification_status( + self, + id: Optional[StrictStr] = None, + status: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NotificationEventDTO: + """Endpoint to update the notification status + + + :param id: + :type id: str + :param status: + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_notification_status_serialize( + id=id, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationEventDTO", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_notification_status_with_http_info( + self, + id: Optional[StrictStr] = None, + status: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NotificationEventDTO]: + """Endpoint to update the notification status + + + :param id: + :type id: str + :param status: + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_notification_status_serialize( + id=id, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationEventDTO", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_notification_status_without_preload_content( + self, + id: Optional[StrictStr] = None, + status: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Endpoint to update the notification status + + + :param id: + :type id: str + :param status: + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_notification_status_serialize( + id=id, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NotificationEventDTO", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_notification_status_serialize( + self, + id, + status, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if id is not None: + + _query_params.append(('id', id)) + + if status is not None: + + _query_params.append(('status', status)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/notification/v1/notifications/status', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_notification_status_by_receiver_id( + self, + receiver_id: Optional[StrictStr] = None, + old_status: Annotated[Optional[List[StrictStr]], Field(description="The old status (or conjunction)")] = None, + new_status: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Endpoint to update the notification status + + + :param receiver_id: + :type receiver_id: str + :param old_status: The old status (or conjunction) + :type old_status: List[str] + :param new_status: + :type new_status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_notification_status_by_receiver_id_serialize( + receiver_id=receiver_id, + old_status=old_status, + new_status=new_status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_notification_status_by_receiver_id_with_http_info( + self, + receiver_id: Optional[StrictStr] = None, + old_status: Annotated[Optional[List[StrictStr]], Field(description="The old status (or conjunction)")] = None, + new_status: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Endpoint to update the notification status + + + :param receiver_id: + :type receiver_id: str + :param old_status: The old status (or conjunction) + :type old_status: List[str] + :param new_status: + :type new_status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_notification_status_by_receiver_id_serialize( + receiver_id=receiver_id, + old_status=old_status, + new_status=new_status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_notification_status_by_receiver_id_without_preload_content( + self, + receiver_id: Optional[StrictStr] = None, + old_status: Annotated[Optional[List[StrictStr]], Field(description="The old status (or conjunction)")] = None, + new_status: Optional[StrictStr] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Endpoint to update the notification status + + + :param receiver_id: + :type receiver_id: str + :param old_status: The old status (or conjunction) + :type old_status: List[str] + :param new_status: + :type new_status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_notification_status_by_receiver_id_serialize( + receiver_id=receiver_id, + old_status=old_status, + new_status=new_status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_notification_status_by_receiver_id_serialize( + self, + receiver_id, + old_status, + new_status, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'oldStatus': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if receiver_id is not None: + + _query_params.append(('receiverId', receiver_id)) + + if old_status is not None: + + _query_params.append(('oldStatus', old_status)) + + if new_status is not None: + + _query_params.append(('newStatus', new_status)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/notification/v1/notifications/receiver/status', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/organizationv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/organizationv1_api.py new file mode 100644 index 00000000..fd693640 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/organizationv1_api.py @@ -0,0 +1,1604 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.models.organization_entries import OrganizationEntries + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class ORGANIZATIONV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def create_organizations( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="organization name")], + eduscope: Annotated[Optional[StrictStr], Field(description="eduscope (may be null)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Organization: + """create organization in repository. + + create organization in repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: organization name (required) + :type organization: str + :param eduscope: eduscope (may be null) + :type eduscope: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_organizations_serialize( + repository=repository, + organization=organization, + eduscope=eduscope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Organization", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_organizations_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="organization name")], + eduscope: Annotated[Optional[StrictStr], Field(description="eduscope (may be null)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Organization]: + """create organization in repository. + + create organization in repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: organization name (required) + :type organization: str + :param eduscope: eduscope (may be null) + :type eduscope: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_organizations_serialize( + repository=repository, + organization=organization, + eduscope=eduscope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Organization", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_organizations_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="organization name")], + eduscope: Annotated[Optional[StrictStr], Field(description="eduscope (may be null)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create organization in repository. + + create organization in repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: organization name (required) + :type organization: str + :param eduscope: eduscope (may be null) + :type eduscope: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_organizations_serialize( + repository=repository, + organization=organization, + eduscope=eduscope, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Organization", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_organizations_serialize( + self, + repository, + organization, + eduscope, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if organization is not None: + _path_params['organization'] = organization + # process the query parameters + if eduscope is not None: + + _query_params.append(('eduscope', eduscope)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/organization/v1/organizations/{repository}/{organization}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_organizations( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Delete organization of repository. + + Delete organization of repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: groupname (required) + :type organization: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_organizations_serialize( + repository=repository, + organization=organization, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_organizations_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Delete organization of repository. + + Delete organization of repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: groupname (required) + :type organization: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_organizations_serialize( + repository=repository, + organization=organization, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_organizations_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="groupname")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete organization of repository. + + Delete organization of repository. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: groupname (required) + :type organization: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_organizations_serialize( + repository=repository, + organization=organization, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_organizations_serialize( + self, + repository, + organization, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if organization is not None: + _path_params['organization'] = organization + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/organization/v1/organizations/{repository}/{organization}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_organization( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="ID of organization")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Organization: + """Get organization by id. + + Get organization by id. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: ID of organization (required) + :type organization: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_organization_serialize( + repository=repository, + organization=organization, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Organization", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_organization_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="ID of organization")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Organization]: + """Get organization by id. + + Get organization by id. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: ID of organization (required) + :type organization: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_organization_serialize( + repository=repository, + organization=organization, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Organization", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_organization_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="ID of organization")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get organization by id. + + Get organization by id. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: ID of organization (required) + :type organization: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_organization_serialize( + repository=repository, + organization=organization, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Organization", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_organization_serialize( + self, + repository, + organization, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if organization is not None: + _path_params['organization'] = organization + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/organization/v1/organizations/{repository}/{organization}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_organizations( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + only_memberships: Annotated[Optional[StrictBool], Field(description="search only in memberships, false can only be done by admin")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> OrganizationEntries: + """Get organizations of repository. + + Get organizations of repository the current user is member. May returns an empty list. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param only_memberships: search only in memberships, false can only be done by admin + :type only_memberships: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_organizations_serialize( + repository=repository, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + only_memberships=only_memberships, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OrganizationEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_organizations_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + only_memberships: Annotated[Optional[StrictBool], Field(description="search only in memberships, false can only be done by admin")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[OrganizationEntries]: + """Get organizations of repository. + + Get organizations of repository the current user is member. May returns an empty list. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param only_memberships: search only in memberships, false can only be done by admin + :type only_memberships: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_organizations_serialize( + repository=repository, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + only_memberships=only_memberships, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OrganizationEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_organizations_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + pattern: Annotated[Optional[StrictStr], Field(description="pattern")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + only_memberships: Annotated[Optional[StrictBool], Field(description="search only in memberships, false can only be done by admin")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get organizations of repository. + + Get organizations of repository the current user is member. May returns an empty list. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param pattern: pattern + :type pattern: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param only_memberships: search only in memberships, false can only be done by admin + :type only_memberships: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_organizations_serialize( + repository=repository, + pattern=pattern, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + only_memberships=only_memberships, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "OrganizationEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_organizations_serialize( + self, + repository, + pattern, + max_items, + skip_count, + sort_properties, + sort_ascending, + only_memberships, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if pattern is not None: + + _query_params.append(('pattern', pattern)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if only_memberships is not None: + + _query_params.append(('onlyMemberships', only_memberships)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/organization/v1/organizations/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def remove_from_organization( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Remove member from organization. + + Remove member from organization. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: groupname (required) + :type organization: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_from_organization_serialize( + repository=repository, + organization=organization, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def remove_from_organization_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Remove member from organization. + + Remove member from organization. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: groupname (required) + :type organization: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_from_organization_serialize( + repository=repository, + organization=organization, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def remove_from_organization_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + organization: Annotated[StrictStr, Field(description="groupname")], + member: Annotated[StrictStr, Field(description="authorityName of member")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Remove member from organization. + + Remove member from organization. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param organization: groupname (required) + :type organization: str + :param member: authorityName of member (required) + :type member: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._remove_from_organization_serialize( + repository=repository, + organization=organization, + member=member, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _remove_from_organization_serialize( + self, + repository, + organization, + member, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if organization is not None: + _path_params['organization'] = organization + if member is not None: + _path_params['member'] = member + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/organization/v1/organizations/{repository}/{organization}/member/{member}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/ratingv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/ratingv1_api.py new file mode 100644 index 00000000..e87d2962 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/ratingv1_api.py @@ -0,0 +1,1254 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictFloat, StrictInt, StrictStr +from typing import Optional, Union +from typing_extensions import Annotated + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class RATINGV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_or_update_rating( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + rating: Annotated[Union[StrictFloat, StrictInt], Field(description="The rating (usually in range 1-5)")], + body: Annotated[StrictStr, Field(description="Text content of rating")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """create or update a rating + + Adds the rating. If the current user already rated that element, the rating will be altered + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param rating: The rating (usually in range 1-5) (required) + :type rating: float + :param body: Text content of rating (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_or_update_rating_serialize( + repository=repository, + node=node, + rating=rating, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_or_update_rating_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + rating: Annotated[Union[StrictFloat, StrictInt], Field(description="The rating (usually in range 1-5)")], + body: Annotated[StrictStr, Field(description="Text content of rating")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """create or update a rating + + Adds the rating. If the current user already rated that element, the rating will be altered + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param rating: The rating (usually in range 1-5) (required) + :type rating: float + :param body: Text content of rating (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_or_update_rating_serialize( + repository=repository, + node=node, + rating=rating, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_or_update_rating_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + rating: Annotated[Union[StrictFloat, StrictInt], Field(description="The rating (usually in range 1-5)")], + body: Annotated[StrictStr, Field(description="Text content of rating")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create or update a rating + + Adds the rating. If the current user already rated that element, the rating will be altered + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param rating: The rating (usually in range 1-5) (required) + :type rating: float + :param body: Text content of rating (required) + :type body: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_or_update_rating_serialize( + repository=repository, + node=node, + rating=rating, + body=body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_or_update_rating_serialize( + self, + repository, + node, + rating, + body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if rating is not None: + + _query_params.append(('rating', rating)) + + # process the header parameters + # process the form parameters + # process the body parameter + if body is not None: + _body_params = body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/rating/v1/ratings/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_rating( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """delete a comment + + Delete the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_rating_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_rating_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """delete a comment + + Delete the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_rating_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_rating_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete a comment + + Delete the comment with the given id + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_rating_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_rating_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/rating/v1/ratings/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_accumulated_ratings( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + date_from: Annotated[Optional[StrictInt], Field(description="date range from")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param date_from: date range from + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_accumulated_ratings_serialize( + repository=repository, + node=node, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_accumulated_ratings_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + date_from: Annotated[Optional[StrictInt], Field(description="date range from")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param date_from: date range from + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_accumulated_ratings_serialize( + repository=repository, + node=node, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_accumulated_ratings_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + date_from: Annotated[Optional[StrictInt], Field(description="date range from")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param date_from: date range from + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_accumulated_ratings_serialize( + repository=repository, + node=node, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_accumulated_ratings_serialize( + self, + repository, + node, + date_from, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if date_from is not None: + + _query_params.append(('dateFrom', date_from)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/rating/v1/ratings/{repository}/{node}/history', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_nodes_altered_in_range( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_altered_in_range_serialize( + repository=repository, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_nodes_altered_in_range_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_altered_in_range_serialize( + repository=repository, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_nodes_altered_in_range_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_altered_in_range_serialize( + repository=repository, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_nodes_altered_in_range_serialize( + self, + repository, + date_from, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if date_from is not None: + + _query_params.append(('dateFrom', date_from)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/rating/v1/ratings/{repository}/nodes/altered', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/registerv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/registerv1_api.py new file mode 100644 index 00000000..7a364ff6 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/registerv1_api.py @@ -0,0 +1,1619 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Optional +from typing_extensions import Annotated +from edu_sharing_client.models.register_exists import RegisterExists +from edu_sharing_client.models.register_information import RegisterInformation + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class REGISTERV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def activate( + self, + key: Annotated[StrictStr, Field(description="The key for the user to activate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Activate a new user (by using a supplied key) + + + :param key: The key for the user to activate (required) + :type key: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._activate_serialize( + key=key, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def activate_with_http_info( + self, + key: Annotated[StrictStr, Field(description="The key for the user to activate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Activate a new user (by using a supplied key) + + + :param key: The key for the user to activate (required) + :type key: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._activate_serialize( + key=key, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def activate_without_preload_content( + self, + key: Annotated[StrictStr, Field(description="The key for the user to activate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Activate a new user (by using a supplied key) + + + :param key: The key for the user to activate (required) + :type key: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._activate_serialize( + key=key, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _activate_serialize( + self, + key, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if key is not None: + _path_params['key'] = key + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/register/v1/activate/{key}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def mail_exists( + self, + mail: Annotated[StrictStr, Field(description="The mail (authority) of the user to check")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RegisterExists: + """Check if the given mail is already successfully registered + + + :param mail: The mail (authority) of the user to check (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._mail_exists_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RegisterExists", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def mail_exists_with_http_info( + self, + mail: Annotated[StrictStr, Field(description="The mail (authority) of the user to check")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RegisterExists]: + """Check if the given mail is already successfully registered + + + :param mail: The mail (authority) of the user to check (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._mail_exists_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RegisterExists", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def mail_exists_without_preload_content( + self, + mail: Annotated[StrictStr, Field(description="The mail (authority) of the user to check")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Check if the given mail is already successfully registered + + + :param mail: The mail (authority) of the user to check (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._mail_exists_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RegisterExists", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _mail_exists_serialize( + self, + mail, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if mail is not None: + _path_params['mail'] = mail + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/register/v1/exists/{mail}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def recover_password( + self, + mail: Annotated[StrictStr, Field(description="The mail (authority) of the user to recover")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Send a mail to recover/reset password + + + :param mail: The mail (authority) of the user to recover (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._recover_password_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def recover_password_with_http_info( + self, + mail: Annotated[StrictStr, Field(description="The mail (authority) of the user to recover")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Send a mail to recover/reset password + + + :param mail: The mail (authority) of the user to recover (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._recover_password_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def recover_password_without_preload_content( + self, + mail: Annotated[StrictStr, Field(description="The mail (authority) of the user to recover")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Send a mail to recover/reset password + + + :param mail: The mail (authority) of the user to recover (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._recover_password_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _recover_password_serialize( + self, + mail, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if mail is not None: + _path_params['mail'] = mail + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/register/v1/recover/{mail}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def register( + self, + register_information: Optional[RegisterInformation] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Register a new user + + + :param register_information: + :type register_information: RegisterInformation + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_serialize( + register_information=register_information, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def register_with_http_info( + self, + register_information: Optional[RegisterInformation] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Register a new user + + + :param register_information: + :type register_information: RegisterInformation + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_serialize( + register_information=register_information, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def register_without_preload_content( + self, + register_information: Optional[RegisterInformation] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Register a new user + + + :param register_information: + :type register_information: RegisterInformation + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._register_serialize( + register_information=register_information, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _register_serialize( + self, + register_information, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if register_information is not None: + _body_params = register_information + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/register/v1/register', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def resend_mail( + self, + mail: Annotated[StrictStr, Field(description="The mail a registration is pending for and should be resend to")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Resend a registration mail for a given mail address + + The method will return false if there is no pending registration for the given mail + + :param mail: The mail a registration is pending for and should be resend to (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resend_mail_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def resend_mail_with_http_info( + self, + mail: Annotated[StrictStr, Field(description="The mail a registration is pending for and should be resend to")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Resend a registration mail for a given mail address + + The method will return false if there is no pending registration for the given mail + + :param mail: The mail a registration is pending for and should be resend to (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resend_mail_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def resend_mail_without_preload_content( + self, + mail: Annotated[StrictStr, Field(description="The mail a registration is pending for and should be resend to")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Resend a registration mail for a given mail address + + The method will return false if there is no pending registration for the given mail + + :param mail: The mail a registration is pending for and should be resend to (required) + :type mail: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._resend_mail_serialize( + mail=mail, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _resend_mail_serialize( + self, + mail, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if mail is not None: + _path_params['mail'] = mail + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/register/v1/resend/{mail}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def reset_password( + self, + key: Annotated[StrictStr, Field(description="The key for the password reset request")], + password: Annotated[StrictStr, Field(description="The new password for the user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Send a mail to recover/reset password + + + :param key: The key for the password reset request (required) + :type key: str + :param password: The new password for the user (required) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._reset_password_serialize( + key=key, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def reset_password_with_http_info( + self, + key: Annotated[StrictStr, Field(description="The key for the password reset request")], + password: Annotated[StrictStr, Field(description="The new password for the user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Send a mail to recover/reset password + + + :param key: The key for the password reset request (required) + :type key: str + :param password: The new password for the user (required) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._reset_password_serialize( + key=key, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def reset_password_without_preload_content( + self, + key: Annotated[StrictStr, Field(description="The key for the password reset request")], + password: Annotated[StrictStr, Field(description="The new password for the user")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Send a mail to recover/reset password + + + :param key: The key for the password reset request (required) + :type key: str + :param password: The new password for the user (required) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._reset_password_serialize( + key=key, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _reset_password_serialize( + self, + key, + password, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if key is not None: + _path_params['key'] = key + if password is not None: + _path_params['password'] = password + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/register/v1/reset/{key}/{password}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/relationv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/relationv1_api.py new file mode 100644 index 00000000..45a12544 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/relationv1_api.py @@ -0,0 +1,962 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr, field_validator +from typing_extensions import Annotated +from edu_sharing_client.models.node_relation import NodeRelation + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class RELATIONV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def create_relation( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + source: Annotated[StrictStr, Field(description="ID of node")], + type: Annotated[StrictStr, Field(description="ID of node")], + target: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """create a relation between nodes + + Creates a relation between two nodes of the given type. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param source: ID of node (required) + :type source: str + :param type: ID of node (required) + :type type: str + :param target: ID of node (required) + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_relation_serialize( + repository=repository, + source=source, + type=type, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_relation_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + source: Annotated[StrictStr, Field(description="ID of node")], + type: Annotated[StrictStr, Field(description="ID of node")], + target: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """create a relation between nodes + + Creates a relation between two nodes of the given type. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param source: ID of node (required) + :type source: str + :param type: ID of node (required) + :type type: str + :param target: ID of node (required) + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_relation_serialize( + repository=repository, + source=source, + type=type, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_relation_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + source: Annotated[StrictStr, Field(description="ID of node")], + type: Annotated[StrictStr, Field(description="ID of node")], + target: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """create a relation between nodes + + Creates a relation between two nodes of the given type. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param source: ID of node (required) + :type source: str + :param type: ID of node (required) + :type type: str + :param target: ID of node (required) + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_relation_serialize( + repository=repository, + source=source, + type=type, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_relation_serialize( + self, + repository, + source, + type, + target, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if source is not None: + _path_params['source'] = source + if type is not None: + _path_params['type'] = type + if target is not None: + _path_params['target'] = target + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/relation/v1/relation/{repository}/{source}/{type}/{target}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_relation( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + source: Annotated[StrictStr, Field(description="ID of node")], + type: Annotated[StrictStr, Field(description="ID of node")], + target: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """delete a relation between nodes + + Delete a relation between two nodes of the given type. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param source: ID of node (required) + :type source: str + :param type: ID of node (required) + :type type: str + :param target: ID of node (required) + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_relation_serialize( + repository=repository, + source=source, + type=type, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_relation_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + source: Annotated[StrictStr, Field(description="ID of node")], + type: Annotated[StrictStr, Field(description="ID of node")], + target: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """delete a relation between nodes + + Delete a relation between two nodes of the given type. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param source: ID of node (required) + :type source: str + :param type: ID of node (required) + :type type: str + :param target: ID of node (required) + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_relation_serialize( + repository=repository, + source=source, + type=type, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_relation_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + source: Annotated[StrictStr, Field(description="ID of node")], + type: Annotated[StrictStr, Field(description="ID of node")], + target: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete a relation between nodes + + Delete a relation between two nodes of the given type. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param source: ID of node (required) + :type source: str + :param type: ID of node (required) + :type type: str + :param target: ID of node (required) + :type target: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_relation_serialize( + repository=repository, + source=source, + type=type, + target=target, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_relation_serialize( + self, + repository, + source, + type, + target, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if source is not None: + _path_params['source'] = source + if type is not None: + _path_params['type'] = type + if target is not None: + _path_params['target'] = target + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/relation/v1/relation/{repository}/{source}/{type}/{target}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_relations( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeRelation: + """get all relation of the node + + Returns all relations of the node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_relations_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeRelation", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_relations_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeRelation]: + """get all relation of the node + + Returns all relations of the node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_relations_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeRelation", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_relations_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get all relation of the node + + Returns all relations of the node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_relations_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeRelation", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_relations_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/relation/v1/relation/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/renderingv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/renderingv1_api.py new file mode 100644 index 00000000..1cf5c83d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/renderingv1_api.py @@ -0,0 +1,711 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr +from typing import Dict, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class RENDERINGV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_details_snippet1( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RenderingDetailsEntry: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet1_serialize( + repository=repository, + node=node, + version=version, + display_mode=display_mode, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_details_snippet1_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RenderingDetailsEntry]: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet1_serialize( + repository=repository, + node=node, + version=version, + display_mode=display_mode, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_details_snippet1_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet1_serialize( + repository=repository, + node=node, + version=version, + display_mode=display_mode, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_details_snippet1_serialize( + self, + repository, + node, + version, + display_mode, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version is not None: + + _query_params.append(('version', version)) + + if display_mode is not None: + + _query_params.append(('displayMode', display_mode)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/rendering/v1/details/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_details_snippet_with_parameters( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="additional parameters to send to the rendering service")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RenderingDetailsEntry: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param request_body: additional parameters to send to the rendering service + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet_with_parameters_serialize( + repository=repository, + node=node, + version=version, + display_mode=display_mode, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_details_snippet_with_parameters_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="additional parameters to send to the rendering service")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[RenderingDetailsEntry]: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param request_body: additional parameters to send to the rendering service + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet_with_parameters_serialize( + repository=repository, + node=node, + version=version, + display_mode=display_mode, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_details_snippet_with_parameters_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + version: Annotated[Optional[StrictStr], Field(description="version of node")] = None, + display_mode: Annotated[Optional[StrictStr], Field(description="Rendering displayMode")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="additional parameters to send to the rendering service")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get metadata of node. + + Get metadata of node. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param version: version of node + :type version: str + :param display_mode: Rendering displayMode + :type display_mode: str + :param request_body: additional parameters to send to the rendering service + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_details_snippet_with_parameters_serialize( + repository=repository, + node=node, + version=version, + display_mode=display_mode, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "RenderingDetailsEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_details_snippet_with_parameters_serialize( + self, + repository, + node, + version, + display_mode, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + if version is not None: + + _query_params.append(('version', version)) + + if display_mode is not None: + + _query_params.append(('displayMode', display_mode)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/rendering/v1/details/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/searchv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/searchv1_api.py new file mode 100644 index 00000000..3972d92c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/searchv1_api.py @@ -0,0 +1,3812 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.models.search_parameters_facets import SearchParametersFacets +from edu_sharing_client.models.search_result_lrmi import SearchResultLrmi +from edu_sharing_client.models.search_result_node import SearchResultNode + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class SEARCHV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_metdata( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node_ids: Annotated[Optional[List[StrictStr]], Field(description="nodeIds")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntries: + """get nodes with metadata and collections + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node_ids: nodeIds + :type node_ids: List[str] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metdata_serialize( + repository=repository, + node_ids=node_ids, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_metdata_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node_ids: Annotated[Optional[List[StrictStr]], Field(description="nodeIds")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntries]: + """get nodes with metadata and collections + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node_ids: nodeIds + :type node_ids: List[str] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metdata_serialize( + repository=repository, + node_ids=node_ids, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_metdata_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node_ids: Annotated[Optional[List[StrictStr]], Field(description="nodeIds")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get nodes with metadata and collections + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node_ids: nodeIds + :type node_ids: List[str] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_metdata_serialize( + repository=repository, + node_ids=node_ids, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_metdata_serialize( + self, + repository, + node_ids, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'nodeIds': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if node_ids is not None: + + _query_params.append(('nodeIds', node_ids)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/search/v1/metadata/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_relevant_nodes( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultNode: + """Get relevant nodes for the current user + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_relevant_nodes_serialize( + repository=repository, + property_filter=property_filter, + max_items=max_items, + skip_count=skip_count, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_relevant_nodes_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultNode]: + """Get relevant nodes for the current user + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_relevant_nodes_serialize( + repository=repository, + property_filter=property_filter, + max_items=max_items, + skip_count=skip_count, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_relevant_nodes_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get relevant nodes for the current user + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_relevant_nodes_serialize( + repository=repository, + property_filter=property_filter, + max_items=max_items, + skip_count=skip_count, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_relevant_nodes_serialize( + self, + repository, + property_filter, + max_items, + skip_count, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/search/v1/relevant/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def load_save_search( + self, + node_id: Annotated[StrictStr, Field(description="Node id of the search item")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + request_body: Annotated[Optional[List[StrictStr]], Field(description="facets")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Node: + """Load a saved search query. + + Load a saved search query. + + :param node_id: Node id of the search item (required) + :type node_id: str + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param request_body: facets + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._load_save_search_serialize( + node_id=node_id, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def load_save_search_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="Node id of the search item")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + request_body: Annotated[Optional[List[StrictStr]], Field(description="facets")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Node]: + """Load a saved search query. + + Load a saved search query. + + :param node_id: Node id of the search item (required) + :type node_id: str + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param request_body: facets + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._load_save_search_serialize( + node_id=node_id, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def load_save_search_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="Node id of the search item")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + request_body: Annotated[Optional[List[StrictStr]], Field(description="facets")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Load a saved search query. + + Load a saved search query. + + :param node_id: Node id of the search item (required) + :type node_id: str + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param request_body: facets + :type request_body: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._load_save_search_serialize( + node_id=node_id, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Node", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _load_save_search_serialize( + self, + node_id, + content_type, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + 'request_body': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if node_id is not None: + _path_params['nodeId'] = node_id + # process the query parameters + if content_type is not None: + + _query_params.append(('contentType', content_type)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/search/v1/queries/load/{nodeId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def save_search( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + name: Annotated[StrictStr, Field(description="Name of the new search item")], + mds_query_criteria: Annotated[List[MdsQueryCriteria], Field(description="search parameters")], + replace: Annotated[Optional[StrictBool], Field(description="Replace if search with the same name exists")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Save a search query. + + Save a search query. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param name: Name of the new search item (required) + :type name: str + :param mds_query_criteria: search parameters (required) + :type mds_query_criteria: List[MdsQueryCriteria] + :param replace: Replace if search with the same name exists + :type replace: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._save_search_serialize( + repository=repository, + metadataset=metadataset, + query=query, + name=name, + mds_query_criteria=mds_query_criteria, + replace=replace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def save_search_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + name: Annotated[StrictStr, Field(description="Name of the new search item")], + mds_query_criteria: Annotated[List[MdsQueryCriteria], Field(description="search parameters")], + replace: Annotated[Optional[StrictBool], Field(description="Replace if search with the same name exists")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Save a search query. + + Save a search query. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param name: Name of the new search item (required) + :type name: str + :param mds_query_criteria: search parameters (required) + :type mds_query_criteria: List[MdsQueryCriteria] + :param replace: Replace if search with the same name exists + :type replace: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._save_search_serialize( + repository=repository, + metadataset=metadataset, + query=query, + name=name, + mds_query_criteria=mds_query_criteria, + replace=replace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def save_search_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + name: Annotated[StrictStr, Field(description="Name of the new search item")], + mds_query_criteria: Annotated[List[MdsQueryCriteria], Field(description="search parameters")], + replace: Annotated[Optional[StrictBool], Field(description="Replace if search with the same name exists")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Save a search query. + + Save a search query. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param name: Name of the new search item (required) + :type name: str + :param mds_query_criteria: search parameters (required) + :type mds_query_criteria: List[MdsQueryCriteria] + :param replace: Replace if search with the same name exists + :type replace: bool + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._save_search_serialize( + repository=repository, + metadataset=metadataset, + query=query, + name=name, + mds_query_criteria=mds_query_criteria, + replace=replace, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _save_search_serialize( + self, + repository, + metadataset, + query, + name, + mds_query_criteria, + replace, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'MdsQueryCriteria': '', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + if query is not None: + _path_params['query'] = query + # process the query parameters + if name is not None: + + _query_params.append(('name', name)) + + if replace is not None: + + _query_params.append(('replace', replace)) + + # process the header parameters + # process the form parameters + # process the body parameter + if mds_query_criteria is not None: + _body_params = mds_query_criteria + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/search/v1/queries/{repository}/{metadataset}/{query}/save', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultNode: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters=search_parameters, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultNode]: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters=search_parameters, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters=search_parameters, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_serialize( + self, + repository, + metadataset, + query, + search_parameters, + content_type, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + if query is not None: + _path_params['query'] = query + # process the query parameters + if content_type is not None: + + _query_params.append(('contentType', content_type)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + if search_parameters is not None: + _body_params = search_parameters + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/search/v1/queries/{repository}/{metadataset}/{query}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_by_property( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + combine_mode: Annotated[Optional[StrictStr], Field(description="Combine mode, AND or OR, defaults to AND")] = None, + var_property: Annotated[Optional[List[StrictStr]], Field(description="One (or more) properties to search for, will be combined by specified combine mode")] = None, + value: Annotated[Optional[List[StrictStr]], Field(description="One (or more) values to search for, matching the properties defined before")] = None, + comparator: Annotated[Optional[List[StrictStr]], Field(description="(Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >=")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultNode: + """Search for custom properties with custom values + + e.g. property=cm:name, value:*Test* + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param content_type: Type of element + :type content_type: str + :param combine_mode: Combine mode, AND or OR, defaults to AND + :type combine_mode: str + :param var_property: One (or more) properties to search for, will be combined by specified combine mode + :type var_property: List[str] + :param value: One (or more) values to search for, matching the properties defined before + :type value: List[str] + :param comparator: (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= + :type comparator: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_property_serialize( + repository=repository, + content_type=content_type, + combine_mode=combine_mode, + var_property=var_property, + value=value, + comparator=comparator, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_by_property_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + combine_mode: Annotated[Optional[StrictStr], Field(description="Combine mode, AND or OR, defaults to AND")] = None, + var_property: Annotated[Optional[List[StrictStr]], Field(description="One (or more) properties to search for, will be combined by specified combine mode")] = None, + value: Annotated[Optional[List[StrictStr]], Field(description="One (or more) values to search for, matching the properties defined before")] = None, + comparator: Annotated[Optional[List[StrictStr]], Field(description="(Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >=")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultNode]: + """Search for custom properties with custom values + + e.g. property=cm:name, value:*Test* + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param content_type: Type of element + :type content_type: str + :param combine_mode: Combine mode, AND or OR, defaults to AND + :type combine_mode: str + :param var_property: One (or more) properties to search for, will be combined by specified combine mode + :type var_property: List[str] + :param value: One (or more) values to search for, matching the properties defined before + :type value: List[str] + :param comparator: (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= + :type comparator: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_property_serialize( + repository=repository, + content_type=content_type, + combine_mode=combine_mode, + var_property=var_property, + value=value, + comparator=comparator, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_by_property_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + combine_mode: Annotated[Optional[StrictStr], Field(description="Combine mode, AND or OR, defaults to AND")] = None, + var_property: Annotated[Optional[List[StrictStr]], Field(description="One (or more) properties to search for, will be combined by specified combine mode")] = None, + value: Annotated[Optional[List[StrictStr]], Field(description="One (or more) values to search for, matching the properties defined before")] = None, + comparator: Annotated[Optional[List[StrictStr]], Field(description="(Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >=")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search for custom properties with custom values + + e.g. property=cm:name, value:*Test* + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param content_type: Type of element + :type content_type: str + :param combine_mode: Combine mode, AND or OR, defaults to AND + :type combine_mode: str + :param var_property: One (or more) properties to search for, will be combined by specified combine mode + :type var_property: List[str] + :param value: One (or more) values to search for, matching the properties defined before + :type value: List[str] + :param comparator: (Optional) comparator, only relevant for date or numerical fields, currently allowed =, <=, >= + :type comparator: List[str] + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_by_property_serialize( + repository=repository, + content_type=content_type, + combine_mode=combine_mode, + var_property=var_property, + value=value, + comparator=comparator, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_by_property_serialize( + self, + repository, + content_type, + combine_mode, + var_property, + value, + comparator, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'property': 'multi', + 'value': 'multi', + 'comparator': 'multi', + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if content_type is not None: + + _query_params.append(('contentType', content_type)) + + if combine_mode is not None: + + _query_params.append(('combineMode', combine_mode)) + + if var_property is not None: + + _query_params.append(('property', var_property)) + + if value is not None: + + _query_params.append(('value', value)) + + if comparator is not None: + + _query_params.append(('comparator', comparator)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/search/v1/custom/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_contributor( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + search_word: Annotated[StrictStr, Field(description="search word")], + contributor_kind: Annotated[StrictStr, Field(description="contributor kind")], + fields: Annotated[Optional[List[StrictStr]], Field(description="define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url']")] = None, + contributor_properties: Annotated[Optional[List[StrictStr]], Field(description="define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator']")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Search for contributors + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param search_word: search word (required) + :type search_word: str + :param contributor_kind: contributor kind (required) + :type contributor_kind: str + :param fields: define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url'] + :type fields: List[str] + :param contributor_properties: define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator'] + :type contributor_properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_contributor_serialize( + repository=repository, + search_word=search_word, + contributor_kind=contributor_kind, + fields=fields, + contributor_properties=contributor_properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_contributor_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + search_word: Annotated[StrictStr, Field(description="search word")], + contributor_kind: Annotated[StrictStr, Field(description="contributor kind")], + fields: Annotated[Optional[List[StrictStr]], Field(description="define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url']")] = None, + contributor_properties: Annotated[Optional[List[StrictStr]], Field(description="define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator']")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Search for contributors + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param search_word: search word (required) + :type search_word: str + :param contributor_kind: contributor kind (required) + :type contributor_kind: str + :param fields: define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url'] + :type fields: List[str] + :param contributor_properties: define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator'] + :type contributor_properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_contributor_serialize( + repository=repository, + search_word=search_word, + contributor_kind=contributor_kind, + fields=fields, + contributor_properties=contributor_properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_contributor_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + search_word: Annotated[StrictStr, Field(description="search word")], + contributor_kind: Annotated[StrictStr, Field(description="contributor kind")], + fields: Annotated[Optional[List[StrictStr]], Field(description="define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url']")] = None, + contributor_properties: Annotated[Optional[List[StrictStr]], Field(description="define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator']")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search for contributors + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param search_word: search word (required) + :type search_word: str + :param contributor_kind: contributor kind (required) + :type contributor_kind: str + :param fields: define which authority fields should be searched: ['firstname', 'lastname', 'email', 'uuid', 'url'] + :type fields: List[str] + :param contributor_properties: define which contributor props should be searched: ['ccm:lifecyclecontributer_author', 'ccm:lifecyclecontributer_publisher', ..., 'ccm:metadatacontributer_creator', 'ccm:metadatacontributer_validator'] + :type contributor_properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_contributor_serialize( + repository=repository, + search_word=search_word, + contributor_kind=contributor_kind, + fields=fields, + contributor_properties=contributor_properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_contributor_serialize( + self, + repository, + search_word, + contributor_kind, + fields, + contributor_properties, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'fields': 'multi', + 'contributorProperties': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if search_word is not None: + + _query_params.append(('searchWord', search_word)) + + if contributor_kind is not None: + + _query_params.append(('contributorKind', contributor_kind)) + + if fields is not None: + + _query_params.append(('fields', fields)) + + if contributor_properties is not None: + + _query_params.append(('contributorProperties', contributor_properties)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/search/v1/queries/{repository}/contributor', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_facets( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters_facets: Annotated[SearchParametersFacets, Field(description="facet parameters")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultNode: + """Search in facets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters_facets: facet parameters (required) + :type search_parameters_facets: SearchParametersFacets + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_facets_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters_facets=search_parameters_facets, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_facets_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters_facets: Annotated[SearchParametersFacets, Field(description="facet parameters")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultNode]: + """Search in facets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters_facets: facet parameters (required) + :type search_parameters_facets: SearchParametersFacets + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_facets_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters_facets=search_parameters_facets, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_facets_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters_facets: Annotated[SearchParametersFacets, Field(description="facet parameters")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Search in facets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters_facets: facet parameters (required) + :type search_parameters_facets: SearchParametersFacets + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_facets_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters_facets=search_parameters_facets, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_facets_serialize( + self, + repository, + metadataset, + query, + search_parameters_facets, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + if query is not None: + _path_params['query'] = query + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if search_parameters_facets is not None: + _body_params = search_parameters_facets + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/search/v1/queries/{repository}/{metadataset}/{query}/facets', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_fingerprint( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + nodeid: Annotated[StrictStr, Field(description="nodeid")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultNode: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param nodeid: nodeid (required) + :type nodeid: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_fingerprint_serialize( + repository=repository, + nodeid=nodeid, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_fingerprint_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + nodeid: Annotated[StrictStr, Field(description="nodeid")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultNode]: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param nodeid: nodeid (required) + :type nodeid: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_fingerprint_serialize( + repository=repository, + nodeid=nodeid, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_fingerprint_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + nodeid: Annotated[StrictStr, Field(description="nodeid")], + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param nodeid: nodeid (required) + :type nodeid: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_fingerprint_serialize( + repository=repository, + nodeid=nodeid, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultNode", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_fingerprint_serialize( + self, + repository, + nodeid, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if nodeid is not None: + _path_params['nodeid'] = nodeid + # process the query parameters + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/search/v1/queries/{repository}/fingerprint/{nodeid}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search_lrmi( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SearchResultLrmi: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_lrmi_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters=search_parameters, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultLrmi", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search_lrmi_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SearchResultLrmi]: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_lrmi_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters=search_parameters, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultLrmi", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search_lrmi_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + metadataset: Annotated[StrictStr, Field(description="ID of metadataset (or \"-default-\" for default metadata set)")], + query: Annotated[StrictStr, Field(description="ID of query")], + search_parameters: Annotated[SearchParameters, Field(description="search parameters")], + content_type: Annotated[Optional[StrictStr], Field(description="Type of element")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + property_filter: Annotated[Optional[List[StrictStr]], Field(description="property filter for result nodes (or \"-all-\" for all properties)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Perform queries based on metadata sets. + + Perform queries based on metadata sets. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param metadataset: ID of metadataset (or \"-default-\" for default metadata set) (required) + :type metadataset: str + :param query: ID of query (required) + :type query: str + :param search_parameters: search parameters (required) + :type search_parameters: SearchParameters + :param content_type: Type of element + :type content_type: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param property_filter: property filter for result nodes (or \"-all-\" for all properties) + :type property_filter: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search_lrmi_serialize( + repository=repository, + metadataset=metadataset, + query=query, + search_parameters=search_parameters, + content_type=content_type, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + property_filter=property_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SearchResultLrmi", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search_lrmi_serialize( + self, + repository, + metadataset, + query, + search_parameters, + content_type, + max_items, + skip_count, + sort_properties, + sort_ascending, + property_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + 'propertyFilter': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if metadataset is not None: + _path_params['metadataset'] = metadataset + if query is not None: + _path_params['query'] = query + # process the query parameters + if content_type is not None: + + _query_params.append(('contentType', content_type)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + if property_filter is not None: + + _query_params.append(('propertyFilter', property_filter)) + + # process the header parameters + # process the form parameters + # process the body parameter + if search_parameters is not None: + _body_params = search_parameters + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/search/v1/queries/{repository}/{metadataset}/{query}/lrmi', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/sharingv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/sharingv1_api.py new file mode 100644 index 00000000..c088b541 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/sharingv1_api.py @@ -0,0 +1,747 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.models.sharing_info import SharingInfo + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class SHARINGV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get_children1( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share: Annotated[StrictStr, Field(description="Share token")], + password: Annotated[Optional[StrictStr], Field(description="Password (required if share is locked)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntries: + """Get all children of this share. + + Only valid for shared folders + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share: Share token (required) + :type share: str + :param password: Password (required if share is locked) + :type password: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_children1_serialize( + repository=repository, + node=node, + share=share, + password=password, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_children1_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share: Annotated[StrictStr, Field(description="Share token")], + password: Annotated[Optional[StrictStr], Field(description="Password (required if share is locked)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntries]: + """Get all children of this share. + + Only valid for shared folders + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share: Share token (required) + :type share: str + :param password: Password (required if share is locked) + :type password: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_children1_serialize( + repository=repository, + node=node, + share=share, + password=password, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_children1_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share: Annotated[StrictStr, Field(description="Share token")], + password: Annotated[Optional[StrictStr], Field(description="Password (required if share is locked)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all children of this share. + + Only valid for shared folders + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share: Share token (required) + :type share: str + :param password: Password (required if share is locked) + :type password: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_children1_serialize( + repository=repository, + node=node, + share=share, + password=password, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntries", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_children1_serialize( + self, + repository, + node, + share, + password, + max_items, + skip_count, + sort_properties, + sort_ascending, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if share is not None: + _path_params['share'] = share + # process the query parameters + if password is not None: + + _query_params.append(('password', password)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/sharing/v1/sharing/{repository}/{node}/{share}/children', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share: Annotated[StrictStr, Field(description="Share token")], + password: Annotated[Optional[StrictStr], Field(description="Password to validate (optional)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> SharingInfo: + """Get general info of a share. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share: Share token (required) + :type share: str + :param password: Password to validate (optional) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_info_serialize( + repository=repository, + node=node, + share=share, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SharingInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_info_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share: Annotated[StrictStr, Field(description="Share token")], + password: Annotated[Optional[StrictStr], Field(description="Password to validate (optional)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[SharingInfo]: + """Get general info of a share. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share: Share token (required) + :type share: str + :param password: Password to validate (optional) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_info_serialize( + repository=repository, + node=node, + share=share, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SharingInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_info_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="ID of node")], + share: Annotated[StrictStr, Field(description="Share token")], + password: Annotated[Optional[StrictStr], Field(description="Password to validate (optional)")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get general info of a share. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: ID of node (required) + :type node: str + :param share: Share token (required) + :type share: str + :param password: Password to validate (optional) + :type password: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_info_serialize( + repository=repository, + node=node, + share=share, + password=password, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "SharingInfo", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_info_serialize( + self, + repository, + node, + share, + password, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + if share is not None: + _path_params['share'] = share + # process the query parameters + if password is not None: + + _query_params.append(('password', password)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/sharing/v1/sharing/{repository}/{node}/{share}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/statisticv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/statisticv1_api.py new file mode 100644 index 00000000..4b080a7c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/statisticv1_api.py @@ -0,0 +1,1989 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictInt, StrictStr, field_validator +from typing import Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.filter import Filter +from edu_sharing_client.models.statistics import Statistics +from edu_sharing_client.models.statistics_global import StatisticsGlobal + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class STATISTICV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def get( + self, + context: Annotated[StrictStr, Field(description="context, the node where to start")], + filter: Annotated[Filter, Field(description="filter")], + properties: Annotated[Optional[List[StrictStr]], Field(description="properties")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Statistics: + """Get statistics of repository. + + Statistics. + + :param context: context, the node where to start (required) + :type context: str + :param filter: filter (required) + :type filter: Filter + :param properties: properties + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_serialize( + context=context, + filter=filter, + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Statistics", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_with_http_info( + self, + context: Annotated[StrictStr, Field(description="context, the node where to start")], + filter: Annotated[Filter, Field(description="filter")], + properties: Annotated[Optional[List[StrictStr]], Field(description="properties")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Statistics]: + """Get statistics of repository. + + Statistics. + + :param context: context, the node where to start (required) + :type context: str + :param filter: filter (required) + :type filter: Filter + :param properties: properties + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_serialize( + context=context, + filter=filter, + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Statistics", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_without_preload_content( + self, + context: Annotated[StrictStr, Field(description="context, the node where to start")], + filter: Annotated[Filter, Field(description="filter")], + properties: Annotated[Optional[List[StrictStr]], Field(description="properties")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get statistics of repository. + + Statistics. + + :param context: context, the node where to start (required) + :type context: str + :param filter: filter (required) + :type filter: Filter + :param properties: properties + :type properties: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_serialize( + context=context, + filter=filter, + properties=properties, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Statistics", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_serialize( + self, + context, + filter, + properties, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'properties': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if context is not None: + _path_params['context'] = context + # process the query parameters + if properties is not None: + + _query_params.append(('properties', properties)) + + # process the header parameters + # process the form parameters + # process the body parameter + if filter is not None: + _body_params = filter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/statistic/v1/facets/{context}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_global_statistics( + self, + group: Annotated[Optional[StrictStr], Field(description="primary property to build facets and count+group values")] = None, + sub_group: Annotated[Optional[List[StrictStr]], Field(description="additional properties to build facets and count+sub-group values")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StatisticsGlobal: + """Get stats. + + Get global statistics for this repository. + + :param group: primary property to build facets and count+group values + :type group: str + :param sub_group: additional properties to build facets and count+sub-group values + :type sub_group: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_global_statistics_serialize( + group=group, + sub_group=sub_group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StatisticsGlobal", + '401': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_global_statistics_with_http_info( + self, + group: Annotated[Optional[StrictStr], Field(description="primary property to build facets and count+group values")] = None, + sub_group: Annotated[Optional[List[StrictStr]], Field(description="additional properties to build facets and count+sub-group values")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StatisticsGlobal]: + """Get stats. + + Get global statistics for this repository. + + :param group: primary property to build facets and count+group values + :type group: str + :param sub_group: additional properties to build facets and count+sub-group values + :type sub_group: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_global_statistics_serialize( + group=group, + sub_group=sub_group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StatisticsGlobal", + '401': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_global_statistics_without_preload_content( + self, + group: Annotated[Optional[StrictStr], Field(description="primary property to build facets and count+group values")] = None, + sub_group: Annotated[Optional[List[StrictStr]], Field(description="additional properties to build facets and count+sub-group values")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get stats. + + Get global statistics for this repository. + + :param group: primary property to build facets and count+group values + :type group: str + :param sub_group: additional properties to build facets and count+sub-group values + :type sub_group: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_global_statistics_serialize( + group=group, + sub_group=sub_group, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StatisticsGlobal", + '401': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_global_statistics_serialize( + self, + group, + sub_group, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'subGroup': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if group is not None: + + _query_params.append(('group', group)) + + if sub_group is not None: + + _query_params.append(('subGroup', sub_group)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/statistic/v1/public', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_node_data( + self, + id: Annotated[StrictStr, Field(description="node id to fetch data for")], + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param id: node id to fetch data for (required) + :type id: str + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_node_data_serialize( + id=id, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_node_data_with_http_info( + self, + id: Annotated[StrictStr, Field(description="node id to fetch data for")], + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param id: node id to fetch data for (required) + :type id: str + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_node_data_serialize( + id=id, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_node_data_without_preload_content( + self, + id: Annotated[StrictStr, Field(description="node id to fetch data for")], + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param id: node id to fetch data for (required) + :type id: str + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_node_data_serialize( + id=id, + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_node_data_serialize( + self, + id, + date_from, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if id is not None: + _path_params['id'] = id + # process the query parameters + if date_from is not None: + + _query_params.append(('dateFrom', date_from)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/statistic/v1/statistics/nodes/node/{id}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_nodes_altered_in_range1( + self, + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_altered_in_range1_serialize( + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_nodes_altered_in_range1_with_http_info( + self, + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_altered_in_range1_serialize( + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_nodes_altered_in_range1_without_preload_content( + self, + date_from: Annotated[StrictInt, Field(description="date range from")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get the range of nodes which had tracked actions since a given timestamp + + requires admin + + :param date_from: date range from (required) + :type date_from: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_nodes_altered_in_range1_serialize( + date_from=date_from, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_nodes_altered_in_range1_serialize( + self, + date_from, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if date_from is not None: + + _query_params.append(('dateFrom', date_from)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/statistic/v1/statistics/nodes/altered', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_statistics_node( + self, + grouping: Annotated[StrictStr, Field(description="Grouping type (by date)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + date_to: Annotated[StrictInt, Field(description="date range to")], + mediacenter: Annotated[Optional[StrictStr], Field(description="the mediacenter to filter for statistics")] = None, + additional_fields: Annotated[Optional[List[StrictStr]], Field(description="additionals fields of the custom json object stored in each query that should be returned")] = None, + group_field: Annotated[Optional[List[StrictStr]], Field(description="grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date)")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="filters for the custom json object stored in each entry")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get statistics for node actions + + requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_NODES for global stats or to be admin of the requested mediacenter + + :param grouping: Grouping type (by date) (required) + :type grouping: str + :param date_from: date range from (required) + :type date_from: int + :param date_to: date range to (required) + :type date_to: int + :param mediacenter: the mediacenter to filter for statistics + :type mediacenter: str + :param additional_fields: additionals fields of the custom json object stored in each query that should be returned + :type additional_fields: List[str] + :param group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) + :type group_field: List[str] + :param request_body: filters for the custom json object stored in each entry + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_node_serialize( + grouping=grouping, + date_from=date_from, + date_to=date_to, + mediacenter=mediacenter, + additional_fields=additional_fields, + group_field=group_field, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_statistics_node_with_http_info( + self, + grouping: Annotated[StrictStr, Field(description="Grouping type (by date)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + date_to: Annotated[StrictInt, Field(description="date range to")], + mediacenter: Annotated[Optional[StrictStr], Field(description="the mediacenter to filter for statistics")] = None, + additional_fields: Annotated[Optional[List[StrictStr]], Field(description="additionals fields of the custom json object stored in each query that should be returned")] = None, + group_field: Annotated[Optional[List[StrictStr]], Field(description="grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date)")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="filters for the custom json object stored in each entry")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get statistics for node actions + + requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_NODES for global stats or to be admin of the requested mediacenter + + :param grouping: Grouping type (by date) (required) + :type grouping: str + :param date_from: date range from (required) + :type date_from: int + :param date_to: date range to (required) + :type date_to: int + :param mediacenter: the mediacenter to filter for statistics + :type mediacenter: str + :param additional_fields: additionals fields of the custom json object stored in each query that should be returned + :type additional_fields: List[str] + :param group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) + :type group_field: List[str] + :param request_body: filters for the custom json object stored in each entry + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_node_serialize( + grouping=grouping, + date_from=date_from, + date_to=date_to, + mediacenter=mediacenter, + additional_fields=additional_fields, + group_field=group_field, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_statistics_node_without_preload_content( + self, + grouping: Annotated[StrictStr, Field(description="Grouping type (by date)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + date_to: Annotated[StrictInt, Field(description="date range to")], + mediacenter: Annotated[Optional[StrictStr], Field(description="the mediacenter to filter for statistics")] = None, + additional_fields: Annotated[Optional[List[StrictStr]], Field(description="additionals fields of the custom json object stored in each query that should be returned")] = None, + group_field: Annotated[Optional[List[StrictStr]], Field(description="grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date)")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="filters for the custom json object stored in each entry")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get statistics for node actions + + requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_NODES for global stats or to be admin of the requested mediacenter + + :param grouping: Grouping type (by date) (required) + :type grouping: str + :param date_from: date range from (required) + :type date_from: int + :param date_to: date range to (required) + :type date_to: int + :param mediacenter: the mediacenter to filter for statistics + :type mediacenter: str + :param additional_fields: additionals fields of the custom json object stored in each query that should be returned + :type additional_fields: List[str] + :param group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) + :type group_field: List[str] + :param request_body: filters for the custom json object stored in each entry + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_node_serialize( + grouping=grouping, + date_from=date_from, + date_to=date_to, + mediacenter=mediacenter, + additional_fields=additional_fields, + group_field=group_field, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_statistics_node_serialize( + self, + grouping, + date_from, + date_to, + mediacenter, + additional_fields, + group_field, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'additionalFields': 'multi', + 'groupField': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if grouping is not None: + + _query_params.append(('grouping', grouping)) + + if date_from is not None: + + _query_params.append(('dateFrom', date_from)) + + if date_to is not None: + + _query_params.append(('dateTo', date_to)) + + if mediacenter is not None: + + _query_params.append(('mediacenter', mediacenter)) + + if additional_fields is not None: + + _query_params.append(('additionalFields', additional_fields)) + + if group_field is not None: + + _query_params.append(('groupField', group_field)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/statistic/v1/statistics/nodes', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_statistics_user( + self, + grouping: Annotated[StrictStr, Field(description="Grouping type (by date)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + date_to: Annotated[StrictInt, Field(description="date range to")], + mediacenter: Annotated[Optional[StrictStr], Field(description="the mediacenter to filter for statistics")] = None, + additional_fields: Annotated[Optional[List[StrictStr]], Field(description="additionals fields of the custom json object stored in each query that should be returned")] = None, + group_field: Annotated[Optional[List[StrictStr]], Field(description="grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date)")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="filters for the custom json object stored in each entry")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """get statistics for user actions (login, logout) + + requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_USER for global stats or to be admin of the requested mediacenter + + :param grouping: Grouping type (by date) (required) + :type grouping: str + :param date_from: date range from (required) + :type date_from: int + :param date_to: date range to (required) + :type date_to: int + :param mediacenter: the mediacenter to filter for statistics + :type mediacenter: str + :param additional_fields: additionals fields of the custom json object stored in each query that should be returned + :type additional_fields: List[str] + :param group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) + :type group_field: List[str] + :param request_body: filters for the custom json object stored in each entry + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_user_serialize( + grouping=grouping, + date_from=date_from, + date_to=date_to, + mediacenter=mediacenter, + additional_fields=additional_fields, + group_field=group_field, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_statistics_user_with_http_info( + self, + grouping: Annotated[StrictStr, Field(description="Grouping type (by date)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + date_to: Annotated[StrictInt, Field(description="date range to")], + mediacenter: Annotated[Optional[StrictStr], Field(description="the mediacenter to filter for statistics")] = None, + additional_fields: Annotated[Optional[List[StrictStr]], Field(description="additionals fields of the custom json object stored in each query that should be returned")] = None, + group_field: Annotated[Optional[List[StrictStr]], Field(description="grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date)")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="filters for the custom json object stored in each entry")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """get statistics for user actions (login, logout) + + requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_USER for global stats or to be admin of the requested mediacenter + + :param grouping: Grouping type (by date) (required) + :type grouping: str + :param date_from: date range from (required) + :type date_from: int + :param date_to: date range to (required) + :type date_to: int + :param mediacenter: the mediacenter to filter for statistics + :type mediacenter: str + :param additional_fields: additionals fields of the custom json object stored in each query that should be returned + :type additional_fields: List[str] + :param group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) + :type group_field: List[str] + :param request_body: filters for the custom json object stored in each entry + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_user_serialize( + grouping=grouping, + date_from=date_from, + date_to=date_to, + mediacenter=mediacenter, + additional_fields=additional_fields, + group_field=group_field, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_statistics_user_without_preload_content( + self, + grouping: Annotated[StrictStr, Field(description="Grouping type (by date)")], + date_from: Annotated[StrictInt, Field(description="date range from")], + date_to: Annotated[StrictInt, Field(description="date range to")], + mediacenter: Annotated[Optional[StrictStr], Field(description="the mediacenter to filter for statistics")] = None, + additional_fields: Annotated[Optional[List[StrictStr]], Field(description="additionals fields of the custom json object stored in each query that should be returned")] = None, + group_field: Annotated[Optional[List[StrictStr]], Field(description="grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date)")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="filters for the custom json object stored in each entry")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get statistics for user actions (login, logout) + + requires either toolpermission TOOLPERMISSION_GLOBAL_STATISTICS_USER for global stats or to be admin of the requested mediacenter + + :param grouping: Grouping type (by date) (required) + :type grouping: str + :param date_from: date range from (required) + :type date_from: int + :param date_to: date range to (required) + :type date_to: int + :param mediacenter: the mediacenter to filter for statistics + :type mediacenter: str + :param additional_fields: additionals fields of the custom json object stored in each query that should be returned + :type additional_fields: List[str] + :param group_field: grouping fields of the custom json object stored in each query (currently only meant to be combined with no grouping by date) + :type group_field: List[str] + :param request_body: filters for the custom json object stored in each entry + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_statistics_user_serialize( + grouping=grouping, + date_from=date_from, + date_to=date_to, + mediacenter=mediacenter, + additional_fields=additional_fields, + group_field=group_field, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_statistics_user_serialize( + self, + grouping, + date_from, + date_to, + mediacenter, + additional_fields, + group_field, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'additionalFields': 'multi', + 'groupField': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if grouping is not None: + + _query_params.append(('grouping', grouping)) + + if date_from is not None: + + _query_params.append(('dateFrom', date_from)) + + if date_to is not None: + + _query_params.append(('dateTo', date_to)) + + if mediacenter is not None: + + _query_params.append(('mediacenter', mediacenter)) + + if additional_fields is not None: + + _query_params.append(('additionalFields', additional_fields)) + + if group_field is not None: + + _query_params.append(('groupField', group_field)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/statistic/v1/statistics/users', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/streamv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/streamv1_api.py new file mode 100644 index 00000000..fbc1fe2f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/streamv1_api.py @@ -0,0 +1,1920 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictInt, StrictStr +from typing import Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.stream_entry_input import StreamEntryInput +from edu_sharing_client.models.stream_list import StreamList + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class STREAMV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def add_entry( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + stream_entry_input: Annotated[StreamEntryInput, Field(description="Stream object to add")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamEntryInput: + """add a new stream object. + + will return the object and add the id to the object if creation succeeded + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param stream_entry_input: Stream object to add (required) + :type stream_entry_input: StreamEntryInput + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_entry_serialize( + repository=repository, + stream_entry_input=stream_entry_input, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamEntryInput", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def add_entry_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + stream_entry_input: Annotated[StreamEntryInput, Field(description="Stream object to add")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamEntryInput]: + """add a new stream object. + + will return the object and add the id to the object if creation succeeded + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param stream_entry_input: Stream object to add (required) + :type stream_entry_input: StreamEntryInput + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_entry_serialize( + repository=repository, + stream_entry_input=stream_entry_input, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamEntryInput", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def add_entry_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + stream_entry_input: Annotated[StreamEntryInput, Field(description="Stream object to add")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """add a new stream object. + + will return the object and add the id to the object if creation succeeded + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param stream_entry_input: Stream object to add (required) + :type stream_entry_input: StreamEntryInput + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._add_entry_serialize( + repository=repository, + stream_entry_input=stream_entry_input, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamEntryInput", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _add_entry_serialize( + self, + repository, + stream_entry_input, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if stream_entry_input is not None: + _body_params = stream_entry_input + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/stream/v1/add/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def can_access( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="The property to aggregate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """test + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: The property to aggregate (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._can_access_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def can_access_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="The property to aggregate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """test + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: The property to aggregate (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._can_access_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def can_access_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + node: Annotated[StrictStr, Field(description="The property to aggregate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """test + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param node: The property to aggregate (required) + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._can_access_serialize( + repository=repository, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _can_access_serialize( + self, + repository, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if node is not None: + _path_params['node'] = node + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/stream/v1/access/{repository}/{node}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def delete_entry( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + entry: Annotated[StrictStr, Field(description="entry id to delete")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """delete a stream object + + the current user must be author of the given stream object + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param entry: entry id to delete (required) + :type entry: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_entry_serialize( + repository=repository, + entry=entry, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_entry_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + entry: Annotated[StrictStr, Field(description="entry id to delete")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """delete a stream object + + the current user must be author of the given stream object + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param entry: entry id to delete (required) + :type entry: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_entry_serialize( + repository=repository, + entry=entry, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_entry_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + entry: Annotated[StrictStr, Field(description="entry id to delete")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """delete a stream object + + the current user must be author of the given stream object + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param entry: entry id to delete (required) + :type entry: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_entry_serialize( + repository=repository, + entry=entry, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_entry_serialize( + self, + repository, + entry, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if entry is not None: + _path_params['entry'] = entry + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/stream/v1/delete/{repository}/{entry}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_property_values( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + var_property: Annotated[StrictStr, Field(description="The property to aggregate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get top values for a property + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param var_property: The property to aggregate (required) + :type var_property: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_property_values_serialize( + repository=repository, + var_property=var_property, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_property_values_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + var_property: Annotated[StrictStr, Field(description="The property to aggregate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get top values for a property + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param var_property: The property to aggregate (required) + :type var_property: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_property_values_serialize( + repository=repository, + var_property=var_property, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_property_values_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + var_property: Annotated[StrictStr, Field(description="The property to aggregate")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get top values for a property + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param var_property: The property to aggregate (required) + :type var_property: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_property_values_serialize( + repository=repository, + var_property=var_property, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_property_values_serialize( + self, + repository, + var_property, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if var_property is not None: + _path_params['property'] = var_property + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/stream/v1/properties/{repository}/{property}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def search1( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + status: Annotated[Optional[StrictStr], Field(description="Stream object status to search for")] = None, + query: Annotated[Optional[StrictStr], Field(description="generic text to search for (in title or description)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties, currently supported: created, priority, default: priority desc, created desc")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="map with property + value to search")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> StreamList: + """Get the stream content for the current user with the given status. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param status: Stream object status to search for + :type status: str + :param query: generic text to search for (in title or description) + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties, currently supported: created, priority, default: priority desc, created desc + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param request_body: map with property + value to search + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search1_serialize( + repository=repository, + status=status, + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamList", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def search1_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + status: Annotated[Optional[StrictStr], Field(description="Stream object status to search for")] = None, + query: Annotated[Optional[StrictStr], Field(description="generic text to search for (in title or description)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties, currently supported: created, priority, default: priority desc, created desc")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="map with property + value to search")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[StreamList]: + """Get the stream content for the current user with the given status. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param status: Stream object status to search for + :type status: str + :param query: generic text to search for (in title or description) + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties, currently supported: created, priority, default: priority desc, created desc + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param request_body: map with property + value to search + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search1_serialize( + repository=repository, + status=status, + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamList", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def search1_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + status: Annotated[Optional[StrictStr], Field(description="Stream object status to search for")] = None, + query: Annotated[Optional[StrictStr], Field(description="generic text to search for (in title or description)")] = None, + max_items: Annotated[Optional[StrictInt], Field(description="maximum items per page")] = None, + skip_count: Annotated[Optional[StrictInt], Field(description="skip a number of items")] = None, + sort_properties: Annotated[Optional[List[StrictStr]], Field(description="sort properties, currently supported: created, priority, default: priority desc, created desc")] = None, + sort_ascending: Annotated[Optional[List[StrictBool]], Field(description="sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index")] = None, + request_body: Annotated[Optional[Dict[str, StrictStr]], Field(description="map with property + value to search")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get the stream content for the current user with the given status. + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param status: Stream object status to search for + :type status: str + :param query: generic text to search for (in title or description) + :type query: str + :param max_items: maximum items per page + :type max_items: int + :param skip_count: skip a number of items + :type skip_count: int + :param sort_properties: sort properties, currently supported: created, priority, default: priority desc, created desc + :type sort_properties: List[str] + :param sort_ascending: sort ascending, true if not set. Use multiple values to change the direction according to the given property at the same index + :type sort_ascending: List[bool] + :param request_body: map with property + value to search + :type request_body: Dict[str, str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._search1_serialize( + repository=repository, + status=status, + query=query, + max_items=max_items, + skip_count=skip_count, + sort_properties=sort_properties, + sort_ascending=sort_ascending, + request_body=request_body, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "StreamList", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _search1_serialize( + self, + repository, + status, + query, + max_items, + skip_count, + sort_properties, + sort_ascending, + request_body, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'sortProperties': 'multi', + 'sortAscending': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if status is not None: + + _query_params.append(('status', status)) + + if query is not None: + + _query_params.append(('query', query)) + + if max_items is not None: + + _query_params.append(('maxItems', max_items)) + + if skip_count is not None: + + _query_params.append(('skipCount', skip_count)) + + if sort_properties is not None: + + _query_params.append(('sortProperties', sort_properties)) + + if sort_ascending is not None: + + _query_params.append(('sortAscending', sort_ascending)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/stream/v1/search/{repository}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def update_entry( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + entry: Annotated[StrictStr, Field(description="entry id to update")], + authority: Annotated[StrictStr, Field(description="authority to set/change status")], + status: Annotated[StrictStr, Field(description="New status for this authority")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """update status for a stream object and authority + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param entry: entry id to update (required) + :type entry: str + :param authority: authority to set/change status (required) + :type authority: str + :param status: New status for this authority (required) + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_entry_serialize( + repository=repository, + entry=entry, + authority=authority, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def update_entry_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + entry: Annotated[StrictStr, Field(description="entry id to update")], + authority: Annotated[StrictStr, Field(description="authority to set/change status")], + status: Annotated[StrictStr, Field(description="New status for this authority")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """update status for a stream object and authority + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param entry: entry id to update (required) + :type entry: str + :param authority: authority to set/change status (required) + :type authority: str + :param status: New status for this authority (required) + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_entry_serialize( + repository=repository, + entry=entry, + authority=authority, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def update_entry_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + entry: Annotated[StrictStr, Field(description="entry id to update")], + authority: Annotated[StrictStr, Field(description="authority to set/change status")], + status: Annotated[StrictStr, Field(description="New status for this authority")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """update status for a stream object and authority + + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param entry: entry id to update (required) + :type entry: str + :param authority: authority to set/change status (required) + :type authority: str + :param status: New status for this authority (required) + :type status: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._update_entry_serialize( + repository=repository, + entry=entry, + authority=authority, + status=status, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _update_entry_serialize( + self, + repository, + entry, + authority, + status, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if entry is not None: + _path_params['entry'] = entry + # process the query parameters + if authority is not None: + + _query_params.append(('authority', authority)) + + if status is not None: + + _query_params.append(('status', status)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/stream/v1/status/{repository}/{entry}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/toolv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/toolv1_api.py new file mode 100644 index 00000000..c3b067c6 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/toolv1_api.py @@ -0,0 +1,1932 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictBool, StrictStr +from typing import Dict, List, Optional +from typing_extensions import Annotated +from edu_sharing_client.models.node_entry import NodeEntry + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class TOOLV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def create_tool_defintition( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a new tool definition object. + + Create a new tool definition object. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_defintition_serialize( + repository=repository, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_tool_defintition_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a new tool definition object. + + Create a new tool definition object. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_defintition_serialize( + repository=repository, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_tool_defintition_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new tool definition object. + + Create a new tool definition object. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_defintition_serialize( + repository=repository, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_tool_defintition_serialize( + self, + repository, + request_body, + rename_if_exists, + version_comment, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + if rename_if_exists is not None: + + _query_params.append(('renameIfExists', rename_if_exists)) + + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/tool/v1/tools/{repository}/tooldefinitions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_tool_instance( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + tool_definition: Annotated[StrictStr, Field(description="ID of parent node must have tool_definition aspect")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a new tool Instance object. + + Create a new tool Instance object. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param tool_definition: ID of parent node must have tool_definition aspect (required) + :type tool_definition: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_instance_serialize( + repository=repository, + tool_definition=tool_definition, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_tool_instance_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + tool_definition: Annotated[StrictStr, Field(description="ID of parent node must have tool_definition aspect")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a new tool Instance object. + + Create a new tool Instance object. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param tool_definition: ID of parent node must have tool_definition aspect (required) + :type tool_definition: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_instance_serialize( + repository=repository, + tool_definition=tool_definition, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_tool_instance_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + tool_definition: Annotated[StrictStr, Field(description="ID of parent node must have tool_definition aspect")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new tool Instance object. + + Create a new tool Instance object. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param tool_definition: ID of parent node must have tool_definition aspect (required) + :type tool_definition: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_instance_serialize( + repository=repository, + tool_definition=tool_definition, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_tool_instance_serialize( + self, + repository, + tool_definition, + request_body, + rename_if_exists, + version_comment, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if tool_definition is not None: + _path_params['toolDefinition'] = tool_definition + # process the query parameters + if rename_if_exists is not None: + + _query_params.append(('renameIfExists', rename_if_exists)) + + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/tool/v1/tools/{repository}/{toolDefinition}/toolinstances', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def create_tool_object( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + toolinstance: Annotated[StrictStr, Field(description="ID of parent node (a tool instance object)")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Create a new tool object for a given tool instance. + + Create a new tool object for a given tool instance. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param toolinstance: ID of parent node (a tool instance object) (required) + :type toolinstance: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_object_serialize( + repository=repository, + toolinstance=toolinstance, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def create_tool_object_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + toolinstance: Annotated[StrictStr, Field(description="ID of parent node (a tool instance object)")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Create a new tool object for a given tool instance. + + Create a new tool object for a given tool instance. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param toolinstance: ID of parent node (a tool instance object) (required) + :type toolinstance: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_object_serialize( + repository=repository, + toolinstance=toolinstance, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def create_tool_object_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + toolinstance: Annotated[StrictStr, Field(description="ID of parent node (a tool instance object)")], + request_body: Annotated[Dict[str, List[StrictStr]], Field(description="properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]}")], + rename_if_exists: Annotated[Optional[StrictBool], Field(description="rename if the same node name exists")] = None, + version_comment: Annotated[Optional[StrictStr], Field(description="comment, leave empty = no inital version")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a new tool object for a given tool instance. + + Create a new tool object for a given tool instance. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param toolinstance: ID of parent node (a tool instance object) (required) + :type toolinstance: str + :param request_body: properties, example: {\"{http://www.alfresco.org/model/content/1.0}name\": [\"test\"]} (required) + :type request_body: Dict[str, List[str]] + :param rename_if_exists: rename if the same node name exists + :type rename_if_exists: bool + :param version_comment: comment, leave empty = no inital version + :type version_comment: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._create_tool_object_serialize( + repository=repository, + toolinstance=toolinstance, + request_body=request_body, + rename_if_exists=rename_if_exists, + version_comment=version_comment, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '409': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _create_tool_object_serialize( + self, + repository, + toolinstance, + request_body, + rename_if_exists, + version_comment, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if toolinstance is not None: + _path_params['toolinstance'] = toolinstance + # process the query parameters + if rename_if_exists is not None: + + _query_params.append(('renameIfExists', rename_if_exists)) + + if version_comment is not None: + + _query_params.append(('versionComment', version_comment)) + + # process the header parameters + # process the form parameters + # process the body parameter + if request_body is not None: + _body_params = request_body + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/tool/v1/tools/{repository}/{toolinstance}/toolobject', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_all_tool_definitions( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Get all ToolDefinitions. + + Get all ToolDefinitions. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_tool_definitions_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_all_tool_definitions_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Get all ToolDefinitions. + + Get all ToolDefinitions. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_tool_definitions_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_all_tool_definitions_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all ToolDefinitions. + + Get all ToolDefinitions. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_all_tool_definitions_serialize( + repository=repository, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_all_tool_definitions_serialize( + self, + repository, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/tool/v1/tools/{repository}/tooldefinitions', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_instance( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + nodeid: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Get Instances of a ToolDefinition. + + Get Instances of a ToolDefinition. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param nodeid: ID of node (required) + :type nodeid: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_instance_serialize( + repository=repository, + nodeid=nodeid, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_instance_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + nodeid: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Get Instances of a ToolDefinition. + + Get Instances of a ToolDefinition. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param nodeid: ID of node (required) + :type nodeid: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_instance_serialize( + repository=repository, + nodeid=nodeid, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_instance_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + nodeid: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Instances of a ToolDefinition. + + Get Instances of a ToolDefinition. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param nodeid: ID of node (required) + :type nodeid: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_instance_serialize( + repository=repository, + nodeid=nodeid, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_instance_serialize( + self, + repository, + nodeid, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if nodeid is not None: + _path_params['nodeid'] = nodeid + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/tool/v1/tools/{repository}/{nodeid}/toolinstance', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_instances( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + tool_definition: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> NodeEntry: + """Get Instances of a ToolDefinition. + + Get Instances of a ToolDefinition. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param tool_definition: ID of node (required) + :type tool_definition: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_instances_serialize( + repository=repository, + tool_definition=tool_definition, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_instances_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + tool_definition: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[NodeEntry]: + """Get Instances of a ToolDefinition. + + Get Instances of a ToolDefinition. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param tool_definition: ID of node (required) + :type tool_definition: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_instances_serialize( + repository=repository, + tool_definition=tool_definition, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_instances_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + tool_definition: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Instances of a ToolDefinition. + + Get Instances of a ToolDefinition. + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param tool_definition: ID of node (required) + :type tool_definition: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_instances_serialize( + repository=repository, + tool_definition=tool_definition, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "NodeEntry", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_instances_serialize( + self, + repository, + tool_definition, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if tool_definition is not None: + _path_params['toolDefinition'] = tool_definition + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/tool/v1/tools/{repository}/{toolDefinition}/toolinstances', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/trackingv1_api.py b/edu_sharing_openapi/edu_sharing_client/api/trackingv1_api.py new file mode 100644 index 00000000..e7a1085e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/trackingv1_api.py @@ -0,0 +1,343 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictStr, field_validator +from typing import Optional +from typing_extensions import Annotated + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class TRACKINGV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def track_event( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + event: Annotated[StrictStr, Field(description="type of event to track")], + node: Annotated[Optional[StrictStr], Field(description="node id for which the event is tracked. For some event, this can be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Track a user interaction + + Currently limited to video / audio play interactions + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param event: type of event to track (required) + :type event: str + :param node: node id for which the event is tracked. For some event, this can be null + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._track_event_serialize( + repository=repository, + event=event, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def track_event_with_http_info( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + event: Annotated[StrictStr, Field(description="type of event to track")], + node: Annotated[Optional[StrictStr], Field(description="node id for which the event is tracked. For some event, this can be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Track a user interaction + + Currently limited to video / audio play interactions + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param event: type of event to track (required) + :type event: str + :param node: node id for which the event is tracked. For some event, this can be null + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._track_event_serialize( + repository=repository, + event=event, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def track_event_without_preload_content( + self, + repository: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + event: Annotated[StrictStr, Field(description="type of event to track")], + node: Annotated[Optional[StrictStr], Field(description="node id for which the event is tracked. For some event, this can be null")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Track a user interaction + + Currently limited to video / audio play interactions + + :param repository: ID of repository (or \"-home-\" for home repository) (required) + :type repository: str + :param event: type of event to track (required) + :type event: str + :param node: node id for which the event is tracked. For some event, this can be null + :type node: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._track_event_serialize( + repository=repository, + event=event, + node=node, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': None, + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _track_event_serialize( + self, + repository, + event, + node, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository is not None: + _path_params['repository'] = repository + if event is not None: + _path_params['event'] = event + # process the query parameters + if node is not None: + + _query_params.append(('node', node)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='PUT', + resource_path='/tracking/v1/tracking/{repository}/{event}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api/usagev1_api.py b/edu_sharing_openapi/edu_sharing_client/api/usagev1_api.py new file mode 100644 index 00000000..d38e5ff4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api/usagev1_api.py @@ -0,0 +1,2032 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from pydantic import validate_call, Field, StrictFloat, StrictStr, StrictInt +from typing import Any, Dict, List, Optional, Tuple, Union +from typing_extensions import Annotated + +from pydantic import Field, StrictInt, StrictStr +from typing import Optional +from typing_extensions import Annotated +from edu_sharing_client.models.create_usage import CreateUsage +from edu_sharing_client.models.usage import Usage +from edu_sharing_client.models.usages import Usages + +from edu_sharing_client.api_client import ApiClient, RequestSerialized +from edu_sharing_client.api_response import ApiResponse +from edu_sharing_client.rest import RESTResponseType + + +class USAGEV1Api: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + + @validate_call + def delete_usage( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + usage_id: Annotated[StrictStr, Field(description="ID of usage")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Usages: + """Delete an usage of a node. + + + :param node_id: ID of node (required) + :type node_id: str + :param usage_id: ID of usage (required) + :type usage_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_usage_serialize( + node_id=node_id, + usage_id=usage_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def delete_usage_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + usage_id: Annotated[StrictStr, Field(description="ID of usage")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Usages]: + """Delete an usage of a node. + + + :param node_id: ID of node (required) + :type node_id: str + :param usage_id: ID of usage (required) + :type usage_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_usage_serialize( + node_id=node_id, + usage_id=usage_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def delete_usage_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + usage_id: Annotated[StrictStr, Field(description="ID of usage")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Delete an usage of a node. + + + :param node_id: ID of node (required) + :type node_id: str + :param usage_id: ID of usage (required) + :type usage_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._delete_usage_serialize( + node_id=node_id, + usage_id=usage_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _delete_usage_serialize( + self, + node_id, + usage_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if node_id is not None: + _path_params['nodeId'] = node_id + if usage_id is not None: + _path_params['usageId'] = usage_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='DELETE', + resource_path='/usage/v1/usages/node/{nodeId}/{usageId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_usages( + self, + app_id: Annotated[StrictStr, Field(description="ID of application (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Usages: + """Get all usages of an application. + + Get all usages of an application. + + :param app_id: ID of application (or \"-home-\" for home repository) (required) + :type app_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_serialize( + app_id=app_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_usages_with_http_info( + self, + app_id: Annotated[StrictStr, Field(description="ID of application (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Usages]: + """Get all usages of an application. + + Get all usages of an application. + + :param app_id: ID of application (or \"-home-\" for home repository) (required) + :type app_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_serialize( + app_id=app_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_usages_without_preload_content( + self, + app_id: Annotated[StrictStr, Field(description="ID of application (or \"-home-\" for home repository)")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all usages of an application. + + Get all usages of an application. + + :param app_id: ID of application (or \"-home-\" for home repository) (required) + :type app_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_serialize( + app_id=app_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_usages_serialize( + self, + app_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if app_id is not None: + _path_params['appId'] = app_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/usage/v1/usages/{appId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_usages1( + self, + repository_id: Annotated[StrictStr, Field(description="ID of repository")], + node_id: Annotated[StrictStr, Field(description="ID of node. Use -all- for getting usages of all nodes")], + var_from: Annotated[Optional[StrictInt], Field(description="from date")] = None, + to: Annotated[Optional[StrictInt], Field(description="to date")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """get_usages1 + + + :param repository_id: ID of repository (required) + :type repository_id: str + :param node_id: ID of node. Use -all- for getting usages of all nodes (required) + :type node_id: str + :param var_from: from date + :type var_from: int + :param to: to date + :type to: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages1_serialize( + repository_id=repository_id, + node_id=node_id, + var_from=var_from, + to=to, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_usages1_with_http_info( + self, + repository_id: Annotated[StrictStr, Field(description="ID of repository")], + node_id: Annotated[StrictStr, Field(description="ID of node. Use -all- for getting usages of all nodes")], + var_from: Annotated[Optional[StrictInt], Field(description="from date")] = None, + to: Annotated[Optional[StrictInt], Field(description="to date")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """get_usages1 + + + :param repository_id: ID of repository (required) + :type repository_id: str + :param node_id: ID of node. Use -all- for getting usages of all nodes (required) + :type node_id: str + :param var_from: from date + :type var_from: int + :param to: to date + :type to: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages1_serialize( + repository_id=repository_id, + node_id=node_id, + var_from=var_from, + to=to, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_usages1_without_preload_content( + self, + repository_id: Annotated[StrictStr, Field(description="ID of repository")], + node_id: Annotated[StrictStr, Field(description="ID of node. Use -all- for getting usages of all nodes")], + var_from: Annotated[Optional[StrictInt], Field(description="from date")] = None, + to: Annotated[Optional[StrictInt], Field(description="to date")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """get_usages1 + + + :param repository_id: ID of repository (required) + :type repository_id: str + :param node_id: ID of node. Use -all- for getting usages of all nodes (required) + :type node_id: str + :param var_from: from date + :type var_from: int + :param to: to date + :type to: int + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages1_serialize( + repository_id=repository_id, + node_id=node_id, + var_from=var_from, + to=to, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_usages1_serialize( + self, + repository_id, + node_id, + var_from, + to, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository_id is not None: + _path_params['repositoryId'] = repository_id + if node_id is not None: + _path_params['nodeId'] = node_id + # process the query parameters + if var_from is not None: + + _query_params.append(('from', var_from)) + + if to is not None: + + _query_params.append(('to', to)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/usage/v1/usages/repository/{repositoryId}/{nodeId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_usages_by_course( + self, + app_id: Annotated[StrictStr, Field(description="ID of application (or \"-home-\" for home repository)")], + course_id: Annotated[StrictStr, Field(description="ID of course")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Usages: + """Get all usages of an course. + + Get all usages of an course. + + :param app_id: ID of application (or \"-home-\" for home repository) (required) + :type app_id: str + :param course_id: ID of course (required) + :type course_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_course_serialize( + app_id=app_id, + course_id=course_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_usages_by_course_with_http_info( + self, + app_id: Annotated[StrictStr, Field(description="ID of application (or \"-home-\" for home repository)")], + course_id: Annotated[StrictStr, Field(description="ID of course")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Usages]: + """Get all usages of an course. + + Get all usages of an course. + + :param app_id: ID of application (or \"-home-\" for home repository) (required) + :type app_id: str + :param course_id: ID of course (required) + :type course_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_course_serialize( + app_id=app_id, + course_id=course_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_usages_by_course_without_preload_content( + self, + app_id: Annotated[StrictStr, Field(description="ID of application (or \"-home-\" for home repository)")], + course_id: Annotated[StrictStr, Field(description="ID of course")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all usages of an course. + + Get all usages of an course. + + :param app_id: ID of application (or \"-home-\" for home repository) (required) + :type app_id: str + :param course_id: ID of course (required) + :type course_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_course_serialize( + app_id=app_id, + course_id=course_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_usages_by_course_serialize( + self, + app_id, + course_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if app_id is not None: + _path_params['appId'] = app_id + if course_id is not None: + _path_params['courseId'] = course_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/usage/v1/usages/course/{appId}/{courseId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_usages_by_node( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Usages: + """Get all usages of an node. + + Get all usages of an node. + + :param node_id: ID of node (required) + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_node_serialize( + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_usages_by_node_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Usages]: + """Get all usages of an node. + + Get all usages of an node. + + :param node_id: ID of node (required) + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_node_serialize( + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_usages_by_node_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all usages of an node. + + Get all usages of an node. + + :param node_id: ID of node (required) + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_node_serialize( + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usages", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_usages_by_node_serialize( + self, + node_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if node_id is not None: + _path_params['nodeId'] = node_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/usage/v1/usages/node/{nodeId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_usages_by_node_collections( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> str: + """Get all collections where this node is used. + + + :param node_id: ID of node (required) + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_node_collections_serialize( + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_usages_by_node_collections_with_http_info( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[str]: + """Get all collections where this node is used. + + + :param node_id: ID of node (required) + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_node_collections_serialize( + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_usages_by_node_collections_without_preload_content( + self, + node_id: Annotated[StrictStr, Field(description="ID of node")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get all collections where this node is used. + + + :param node_id: ID of node (required) + :type node_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_usages_by_node_collections_serialize( + node_id=node_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "str", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_usages_by_node_collections_serialize( + self, + node_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if node_id is not None: + _path_params['nodeId'] = node_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/usage/v1/usages/node/{nodeId}/collections', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def set_usage( + self, + repository_id: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + create_usage: Annotated[CreateUsage, Field(description=" usage date")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> Usage: + """Set a usage for a node. app signature headers and authenticated user required. + + headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + + :param repository_id: ID of repository (or \"-home-\" for home repository) (required) + :type repository_id: str + :param create_usage: usage date (required) + :type create_usage: CreateUsage + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_usage_serialize( + repository_id=repository_id, + create_usage=create_usage, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usage", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def set_usage_with_http_info( + self, + repository_id: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + create_usage: Annotated[CreateUsage, Field(description=" usage date")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[Usage]: + """Set a usage for a node. app signature headers and authenticated user required. + + headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + + :param repository_id: ID of repository (or \"-home-\" for home repository) (required) + :type repository_id: str + :param create_usage: usage date (required) + :type create_usage: CreateUsage + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_usage_serialize( + repository_id=repository_id, + create_usage=create_usage, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usage", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def set_usage_without_preload_content( + self, + repository_id: Annotated[StrictStr, Field(description="ID of repository (or \"-home-\" for home repository)")], + create_usage: Annotated[CreateUsage, Field(description=" usage date")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Set a usage for a node. app signature headers and authenticated user required. + + headers must be set: X-Edu-App-Id, X-Edu-App-Sig, X-Edu-App-Signed, X-Edu-App-Ts + + :param repository_id: ID of repository (or \"-home-\" for home repository) (required) + :type repository_id: str + :param create_usage: usage date (required) + :type create_usage: CreateUsage + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._set_usage_serialize( + repository_id=repository_id, + create_usage=create_usage, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "Usage", + '400': "ErrorResponse", + '401': "ErrorResponse", + '403': "ErrorResponse", + '404': "ErrorResponse", + '500': "ErrorResponse", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _set_usage_serialize( + self, + repository_id, + create_usage, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[str, Union[str, bytes]] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if repository_id is not None: + _path_params['repositoryId'] = repository_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if create_usage is not None: + _body_params = create_usage + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params['Content-Type'] = _content_type + else: + _default_content_type = ( + self.api_client.select_header_content_type( + [ + 'application/json' + ] + ) + ) + if _default_content_type is not None: + _header_params['Content-Type'] = _default_content_type + + # authentication setting + _auth_settings: List[str] = [ + ] + + return self.api_client.param_serialize( + method='POST', + resource_path='/usage/v1/usages/repository/{repositoryId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + diff --git a/edu_sharing_openapi/edu_sharing_client/api_client.py b/edu_sharing_openapi/edu_sharing_client/api_client.py new file mode 100644 index 00000000..6d078cfb --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api_client.py @@ -0,0 +1,788 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import datetime +from dateutil.parser import parse +from enum import Enum +import decimal +import json +import mimetypes +import os +import re +import tempfile + +from urllib.parse import quote +from typing import Tuple, Optional, List, Dict, Union +from pydantic import SecretStr + +from edu_sharing_client.configuration import Configuration +from edu_sharing_client.api_response import ApiResponse, T as ApiResponseT +import edu_sharing_client.models +from edu_sharing_client import rest +from edu_sharing_client.exceptions import ( + ApiValueError, + ApiException, + BadRequestException, + UnauthorizedException, + ForbiddenException, + NotFoundException, + ServiceException +) + +RequestSerialized = Tuple[str, str, Dict[str, str], Optional[str], List[str]] + +class ApiClient: + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + """ + + PRIMITIVE_TYPES = (float, bool, bytes, str, int) + NATIVE_TYPES_MAPPING = { + 'int': int, + 'long': int, # TODO remove as only py3 is supported? + 'float': float, + 'str': str, + 'bool': bool, + 'date': datetime.date, + 'datetime': datetime.datetime, + 'decimal': decimal.Decimal, + 'object': object, + } + _pool = None + + def __init__( + self, + configuration=None, + header_name=None, + header_value=None, + cookie=None + ) -> None: + # use default configuration if none is provided + if configuration is None: + configuration = Configuration.get_default() + self.configuration = configuration + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.client_side_validation = configuration.client_side_validation + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + pass + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + + _default = None + + @classmethod + def get_default(cls): + """Return new instance of ApiClient. + + This method returns newly created, based on default constructor, + object of ApiClient class or returns a copy of default + ApiClient. + + :return: The ApiClient object. + """ + if cls._default is None: + cls._default = ApiClient() + return cls._default + + @classmethod + def set_default(cls, default): + """Set default instance of ApiClient. + + It stores default ApiClient. + + :param default: object of ApiClient. + """ + cls._default = default + + def param_serialize( + self, + method, + resource_path, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, auth_settings=None, + collection_formats=None, + _host=None, + _request_auth=None + ) -> RequestSerialized: + + """Builds the HTTP request params needed by the request. + :param method: Method to call. + :param resource_path: Path to method endpoint. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param files dict: key -> filename, value -> filepath, + for `multipart/form-data`. + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the authentication + in the spec for a single request. + :return: tuple of form (path, http_method, query_params, header_params, + body, post_params, files) + """ + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict( + self.parameters_to_tuples(header_params,collection_formats) + ) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples( + path_params, + collection_formats + ) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples( + post_params, + collection_formats + ) + if files: + post_params.extend(self.files_parameters(files)) + + # auth setting + self.update_params_for_auth( + header_params, + query_params, + auth_settings, + resource_path, + method, + body, + request_auth=_request_auth + ) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # request url + if _host is None or self.configuration.ignore_operation_servers: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + url_query = self.parameters_to_url_query( + query_params, + collection_formats + ) + url += "?" + url_query + + return method, url, header_params, body, post_params + + + def call_api( + self, + method, + url, + header_params=None, + body=None, + post_params=None, + _request_timeout=None + ) -> rest.RESTResponse: + """Makes the HTTP request (synchronous) + :param method: Method to call. + :param url: Path to method endpoint. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param _request_timeout: timeout setting for this request. + :return: RESTResponse + """ + + try: + # perform request and return response + response_data = self.rest_client.request( + method, url, + headers=header_params, + body=body, post_params=post_params, + _request_timeout=_request_timeout + ) + + except ApiException as e: + raise e + + return response_data + + def response_deserialize( + self, + response_data: rest.RESTResponse, + response_types_map: Optional[Dict[str, ApiResponseT]]=None + ) -> ApiResponse[ApiResponseT]: + """Deserializes response into an object. + :param response_data: RESTResponse object to be deserialized. + :param response_types_map: dict of response types. + :return: ApiResponse + """ + + msg = "RESTResponse.read() must be called before passing it to response_deserialize()" + assert response_data.data is not None, msg + + response_type = response_types_map.get(str(response_data.status), None) + if not response_type and isinstance(response_data.status, int) and 100 <= response_data.status <= 599: + # if not found, look for '1XX', '2XX', etc. + response_type = response_types_map.get(str(response_data.status)[0] + "XX", None) + + # deserialize response data + response_text = None + return_data = None + try: + if response_type == "bytearray": + return_data = response_data.data + elif response_type == "file": + return_data = self.__deserialize_file(response_data) + elif response_type is not None: + match = None + content_type = response_data.getheader('content-type') + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type) + encoding = match.group(1) if match else "utf-8" + response_text = response_data.data.decode(encoding) + return_data = self.deserialize(response_text, response_type, content_type) + finally: + if not 200 <= response_data.status <= 299: + raise ApiException.from_response( + http_resp=response_data, + body=response_text, + data=return_data, + ) + + return ApiResponse( + status_code = response_data.status, + data = return_data, + headers = response_data.getheaders(), + raw_data = response_data.data + ) + + def sanitize_for_serialization(self, obj): + """Builds a JSON POST object. + + If obj is None, return None. + If obj is SecretStr, return obj.get_secret_value() + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is decimal.Decimal return string representation. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if obj is None: + return None + elif isinstance(obj, Enum): + return obj.value + elif isinstance(obj, SecretStr): + return obj.get_secret_value() + elif isinstance(obj, self.PRIMITIVE_TYPES): + return obj + elif isinstance(obj, list): + return [ + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ] + elif isinstance(obj, tuple): + return tuple( + self.sanitize_for_serialization(sub_obj) for sub_obj in obj + ) + elif isinstance(obj, (datetime.datetime, datetime.date)): + return obj.isoformat() + elif isinstance(obj, decimal.Decimal): + return str(obj) + + elif isinstance(obj, dict): + obj_dict = obj + else: + # Convert model obj to dict except + # attributes `openapi_types`, `attribute_map` + # and attributes which value is not None. + # Convert attribute name to json key in + # model definition for request. + if hasattr(obj, 'to_dict') and callable(getattr(obj, 'to_dict')): + obj_dict = obj.to_dict() + else: + obj_dict = obj.__dict__ + + return { + key: self.sanitize_for_serialization(val) + for key, val in obj_dict.items() + } + + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: class literal for + deserialized object, or string of class name. + :param content_type: content type of response. + + :return: deserialized object. + """ + + # fetch data from response object + if content_type is None: + try: + data = json.loads(response_text) + except ValueError: + data = response_text + elif content_type.startswith("application/json"): + if response_text == "": + data = "" + else: + data = json.loads(response_text) + elif content_type.startswith("text/plain"): + data = response_text + else: + raise ApiException( + status=0, + reason="Unsupported content type: {0}".format(content_type) + ) + + return self.__deserialize(data, response_type) + + def __deserialize(self, data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if isinstance(klass, str): + if klass.startswith('List['): + m = re.match(r'List\[(.*)]', klass) + assert m is not None, "Malformed List type definition" + sub_kls = m.group(1) + return [self.__deserialize(sub_data, sub_kls) + for sub_data in data] + + if klass.startswith('Dict['): + m = re.match(r'Dict\[([^,]*), (.*)]', klass) + assert m is not None, "Malformed Dict type definition" + sub_kls = m.group(2) + return {k: self.__deserialize(v, sub_kls) + for k, v in data.items()} + + # convert str to class + if klass in self.NATIVE_TYPES_MAPPING: + klass = self.NATIVE_TYPES_MAPPING[klass] + else: + klass = getattr(edu_sharing_client.models, klass) + + if klass in self.PRIMITIVE_TYPES: + return self.__deserialize_primitive(data, klass) + elif klass == object: + return self.__deserialize_object(data) + elif klass == datetime.date: + return self.__deserialize_date(data) + elif klass == datetime.datetime: + return self.__deserialize_datetime(data) + elif klass == decimal.Decimal: + return decimal.Decimal(data) + elif issubclass(klass, Enum): + return self.__deserialize_enum(data, klass) + else: + return self.__deserialize_model(data, klass) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + def parameters_to_url_query(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: URL query string (e.g. a=Hello%20World&b=123) + """ + new_params: List[Tuple[str, str]] = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: + if isinstance(v, bool): + v = str(v).lower() + if isinstance(v, (int, float)): + v = str(v) + if isinstance(v, dict): + v = json.dumps(v) + + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, str(value)) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(quote(str(value)) for value in v)) + ) + else: + new_params.append((k, quote(str(v)))) + + return "&".join(["=".join(map(str, item)) for item in new_params]) + + def files_parameters(self, files: Dict[str, Union[str, bytes]]): + """Builds form parameters. + + :param files: File parameters. + :return: Form parameters with files. + """ + params = [] + for k, v in files.items(): + if isinstance(v, str): + with open(v, 'rb') as f: + filename = os.path.basename(f.name) + filedata = f.read() + elif isinstance(v, bytes): + filename = k + filedata = v + else: + raise ValueError("Unsupported file value") + mimetype = ( + mimetypes.guess_type(filename)[0] + or 'application/octet-stream' + ) + params.append( + tuple([k, tuple([filename, filedata, mimetype])]) + ) + return params + + def select_header_accept(self, accepts: List[str]) -> Optional[str]: + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return None + + for accept in accepts: + if re.search('json', accept, re.IGNORECASE): + return accept + + return accepts[0] + + def select_header_content_type(self, content_types): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return None + + for content_type in content_types: + if re.search('json', content_type, re.IGNORECASE): + return content_type + + return content_types[0] + + def update_params_for_auth( + self, + headers, + queries, + auth_settings, + resource_path, + method, + body, + request_auth=None + ) -> None: + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param request_auth: if set, the provided settings will + override the token in the configuration. + """ + if not auth_settings: + return + + if request_auth: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + request_auth + ) + else: + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + self._apply_auth_params( + headers, + queries, + resource_path, + method, + body, + auth_setting + ) + + def _apply_auth_params( + self, + headers, + queries, + resource_path, + method, + body, + auth_setting + ) -> None: + """Updates the request parameters based on a single auth_setting + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :resource_path: A string representation of the HTTP request resource path. + :method: A string representation of the HTTP request method. + :body: A object representing the body of the HTTP request. + The object type is the return value of sanitize_for_serialization(). + :param auth_setting: auth settings for the endpoint + """ + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + if auth_setting['type'] != 'http-signature': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + queries.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + def __deserialize_file(self, response): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + handle file downloading + save response body into a tmp file and return the instance + + :param response: RESTResponse. + :return: file path. + """ + fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + content_disposition = response.getheader("Content-Disposition") + if content_disposition: + m = re.search( + r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition + ) + assert m is not None, "Unexpected 'content-disposition' header value" + filename = m.group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + f.write(response.data) + + return path + + def __deserialize_primitive(self, data, klass): + """Deserializes string to primitive type. + + :param data: str. + :param klass: class literal. + + :return: int, long, float, str, bool. + """ + try: + return klass(data) + except UnicodeEncodeError: + return str(data) + except TypeError: + return data + + def __deserialize_object(self, value): + """Return an original value. + + :return: object. + """ + return value + + def __deserialize_date(self, string): + """Deserializes string to date. + + :param string: str. + :return: date. + """ + try: + return parse(string).date() + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason="Failed to parse `{0}` as date object".format(string) + ) + + def __deserialize_datetime(self, string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :return: datetime. + """ + try: + return parse(string) + except ImportError: + return string + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as datetime object" + .format(string) + ) + ) + + def __deserialize_enum(self, data, klass): + """Deserializes primitive type to enum. + + :param data: primitive type. + :param klass: class literal. + :return: enum value. + """ + try: + return klass(data) + except ValueError: + raise rest.ApiException( + status=0, + reason=( + "Failed to parse `{0}` as `{1}`" + .format(data, klass) + ) + ) + + def __deserialize_model(self, data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :param klass: class literal. + :return: model object. + """ + + return klass.from_dict(data) diff --git a/edu_sharing_openapi/edu_sharing_client/api_response.py b/edu_sharing_openapi/edu_sharing_client/api_response.py new file mode 100644 index 00000000..9bc7c11f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/api_response.py @@ -0,0 +1,21 @@ +"""API response object.""" + +from __future__ import annotations +from typing import Optional, Generic, Mapping, TypeVar +from pydantic import Field, StrictInt, StrictBytes, BaseModel + +T = TypeVar("T") + +class ApiResponse(BaseModel, Generic[T]): + """ + API response object + """ + + status_code: StrictInt = Field(description="HTTP status code") + headers: Optional[Mapping[str, str]] = Field(None, description="HTTP headers") + data: T = Field(description="Deserialized data given the data type") + raw_data: StrictBytes = Field(description="Raw data (HTTP response body)") + + model_config = { + "arbitrary_types_allowed": True + } diff --git a/edu_sharing_openapi/edu_sharing_client/configuration.py b/edu_sharing_openapi/edu_sharing_client/configuration.py new file mode 100644 index 00000000..d3554b34 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/configuration.py @@ -0,0 +1,450 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import copy +import logging +from logging import FileHandler +import multiprocessing +import sys +from typing import Optional +import urllib3 + +import http.client as httplib + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + 'multipleOf', 'maximum', 'exclusiveMaximum', + 'minimum', 'exclusiveMinimum', 'maxLength', + 'minLength', 'pattern', 'maxItems', 'minItems' +} + +class Configuration: + """This class contains various settings of the API client. + + :param host: Base url. + :param ignore_operation_servers + Boolean to ignore operation servers for the API client. + Config will use `host` as the base url regardless of the operation servers. + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer). + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication. + :param password: Password for HTTP basic authentication. + :param access_token: Access token. + :param server_index: Index to servers configuration. + :param server_variables: Mapping with string values to replace variables in + templated server configuration. The validation of enums is performed for + variables with defined enum values before. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum + values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format. + :param retries: Number of retries for API requests. + + """ + + _default = None + + def __init__(self, host=None, + api_key=None, api_key_prefix=None, + username=None, password=None, + access_token=None, + server_index=None, server_variables=None, + server_operation_index=None, server_operation_variables=None, + ignore_operation_servers=False, + ssl_ca_cert=None, + retries=None, + *, + debug: Optional[bool] = None + ) -> None: + """Constructor + """ + self._base_path = "https://stable.demo.edu-sharing.net/edu-sharing/rest" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.ignore_operation_servers = ignore_operation_servers + """Ignore operation servers + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.access_token = access_token + """Access token + """ + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("edu_sharing_client") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler: Optional[FileHandler] = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + if debug is not None: + self.debug = debug + else: + self.__debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = ssl_ca_cert + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + self.tls_server_name = None + """SSL/TLS Server Name Indication (SNI) + Set this to the SNI value expected by the server. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy: Optional[str] = None + """Proxy URL + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = retries + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + self.socket_options = None + """Options to pass down to the underlying urllib3 socket + """ + + self.datetime_format = "%Y-%m-%dT%H:%M:%S.%f%z" + """datetime format + """ + + self.date_format = "%Y-%m-%d" + """date format + """ + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = default + + @classmethod + def get_default_copy(cls): + """Deprecated. Please use `get_default` instead. + + Deprecated. Please use `get_default` instead. + + :return: The configuration object. + """ + return cls.get_default() + + @classmethod + def get_default(cls): + """Return the default configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration. + + :return: The configuration object. + """ + if cls._default is None: + cls._default = Configuration() + return cls._default + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + # turn on httplib debug + httplib.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.logger.items(): + logger.setLevel(logging.WARNING) + # turn off httplib debug + httplib.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: 1.1\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "https://stable.demo.edu-sharing.net/edu-sharing/rest", + 'description': "No description provided", + } + ] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server.get('variables', {}).items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None diff --git a/edu_sharing_openapi/edu_sharing_client/exceptions.py b/edu_sharing_openapi/edu_sharing_client/exceptions.py new file mode 100644 index 00000000..e8845a2a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/exceptions.py @@ -0,0 +1,199 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from typing import Any, Optional +from typing_extensions import Self + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None) -> None: + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiAttributeError(OpenApiException, AttributeError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiAttributeError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None) -> None: + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__( + self, + status=None, + reason=None, + http_resp=None, + *, + body: Optional[str] = None, + data: Optional[Any] = None, + ) -> None: + self.status = status + self.reason = reason + self.body = body + self.data = data + self.headers = None + + if http_resp: + if self.status is None: + self.status = http_resp.status + if self.reason is None: + self.reason = http_resp.reason + if self.body is None: + try: + self.body = http_resp.data.decode('utf-8') + except Exception: + pass + self.headers = http_resp.getheaders() + + @classmethod + def from_response( + cls, + *, + http_resp, + body: Optional[str], + data: Optional[Any], + ) -> Self: + if http_resp.status == 400: + raise BadRequestException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 401: + raise UnauthorizedException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 403: + raise ForbiddenException(http_resp=http_resp, body=body, data=data) + + if http_resp.status == 404: + raise NotFoundException(http_resp=http_resp, body=body, data=data) + + if 500 <= http_resp.status <= 599: + raise ServiceException(http_resp=http_resp, body=body, data=data) + raise ApiException(http_resp=http_resp, body=body, data=data) + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.data or self.body: + error_message += "HTTP response body: {0}\n".format(self.data or self.body) + + return error_message + + +class BadRequestException(ApiException): + pass + + +class NotFoundException(ApiException): + pass + + +class UnauthorizedException(ApiException): + pass + + +class ForbiddenException(ApiException): + pass + + +class ServiceException(ApiException): + pass + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/edu_sharing_openapi/edu_sharing_client/models/__init__.py b/edu_sharing_openapi/edu_sharing_client/models/__init__.py new file mode 100644 index 00000000..2a9ca17a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/__init__.py @@ -0,0 +1,292 @@ +# coding: utf-8 + +# flake8: noqa +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +# import models into model package +from edu_sharing_client.models.ace import ACE +from edu_sharing_client.models.acl import ACL +from edu_sharing_client.models.about import About +from edu_sharing_client.models.about_service import AboutService +from edu_sharing_client.models.abstract_entries import AbstractEntries +from edu_sharing_client.models.add_to_collection_event_dto import AddToCollectionEventDTO +from edu_sharing_client.models.admin import Admin +from edu_sharing_client.models.admin_statistics import AdminStatistics +from edu_sharing_client.models.application import Application +from edu_sharing_client.models.audience import Audience +from edu_sharing_client.models.authentication_token import AuthenticationToken +from edu_sharing_client.models.authority import Authority +from edu_sharing_client.models.authority_entries import AuthorityEntries +from edu_sharing_client.models.available_mds import AvailableMds +from edu_sharing_client.models.banner import Banner +from edu_sharing_client.models.cache_cluster import CacheCluster +from edu_sharing_client.models.cache_info import CacheInfo +from edu_sharing_client.models.cache_member import CacheMember +from edu_sharing_client.models.catalog import Catalog +from edu_sharing_client.models.collection import Collection +from edu_sharing_client.models.collection_counts import CollectionCounts +from edu_sharing_client.models.collection_dto import CollectionDTO +from edu_sharing_client.models.collection_entries import CollectionEntries +from edu_sharing_client.models.collection_entry import CollectionEntry +from edu_sharing_client.models.collection_options import CollectionOptions +from edu_sharing_client.models.collection_proposal_entries import CollectionProposalEntries +from edu_sharing_client.models.collection_reference import CollectionReference +from edu_sharing_client.models.collections import Collections +from edu_sharing_client.models.collections_result import CollectionsResult +from edu_sharing_client.models.comment import Comment +from edu_sharing_client.models.comment_event_dto import CommentEventDTO +from edu_sharing_client.models.comments import Comments +from edu_sharing_client.models.condition import Condition +from edu_sharing_client.models.config import Config +from edu_sharing_client.models.config_frontpage import ConfigFrontpage +from edu_sharing_client.models.config_privacy import ConfigPrivacy +from edu_sharing_client.models.config_publish import ConfigPublish +from edu_sharing_client.models.config_rating import ConfigRating +from edu_sharing_client.models.config_remote import ConfigRemote +from edu_sharing_client.models.config_theme_color import ConfigThemeColor +from edu_sharing_client.models.config_theme_colors import ConfigThemeColors +from edu_sharing_client.models.config_tutorial import ConfigTutorial +from edu_sharing_client.models.config_upload import ConfigUpload +from edu_sharing_client.models.config_workflow import ConfigWorkflow +from edu_sharing_client.models.config_workflow_list import ConfigWorkflowList +from edu_sharing_client.models.connector import Connector +from edu_sharing_client.models.connector_file_type import ConnectorFileType +from edu_sharing_client.models.connector_list import ConnectorList +from edu_sharing_client.models.content import Content +from edu_sharing_client.models.context_menu_entry import ContextMenuEntry +from edu_sharing_client.models.contributor import Contributor +from edu_sharing_client.models.counts import Counts +from edu_sharing_client.models.create import Create +from edu_sharing_client.models.create_usage import CreateUsage +from edu_sharing_client.models.delete_option import DeleteOption +from edu_sharing_client.models.dynamic_config import DynamicConfig +from edu_sharing_client.models.dynamic_registration_token import DynamicRegistrationToken +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens +from edu_sharing_client.models.element import Element +from edu_sharing_client.models.error_response import ErrorResponse +from edu_sharing_client.models.excel_result import ExcelResult +from edu_sharing_client.models.facet import Facet +from edu_sharing_client.models.feature_info import FeatureInfo +from edu_sharing_client.models.feedback_data import FeedbackData +from edu_sharing_client.models.feedback_result import FeedbackResult +from edu_sharing_client.models.filter import Filter +from edu_sharing_client.models.filter_entry import FilterEntry +from edu_sharing_client.models.font_icon import FontIcon +from edu_sharing_client.models.frontpage import Frontpage +from edu_sharing_client.models.general import General +from edu_sharing_client.models.geo import Geo +from edu_sharing_client.models.group import Group +from edu_sharing_client.models.group_entries import GroupEntries +from edu_sharing_client.models.group_entry import GroupEntry +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.group_signup_details import GroupSignupDetails +from edu_sharing_client.models.guest import Guest +from edu_sharing_client.models.handle_param import HandleParam +from edu_sharing_client.models.help_menu_options import HelpMenuOptions +from edu_sharing_client.models.home_folder_options import HomeFolderOptions +from edu_sharing_client.models.icon import Icon +from edu_sharing_client.models.image import Image +from edu_sharing_client.models.interface import Interface +from edu_sharing_client.models.invite_event_dto import InviteEventDTO +from edu_sharing_client.models.json_object import JSONObject +from edu_sharing_client.models.job import Job +from edu_sharing_client.models.job_builder import JobBuilder +from edu_sharing_client.models.job_data_map import JobDataMap +from edu_sharing_client.models.job_description import JobDescription +from edu_sharing_client.models.job_detail import JobDetail +from edu_sharing_client.models.job_detail_job_data_map import JobDetailJobDataMap +from edu_sharing_client.models.job_entry import JobEntry +from edu_sharing_client.models.job_field_description import JobFieldDescription +from edu_sharing_client.models.job_info import JobInfo +from edu_sharing_client.models.job_key import JobKey +from edu_sharing_client.models.key_value_pair import KeyValuePair +from edu_sharing_client.models.lti_platform_configuration import LTIPlatformConfiguration +from edu_sharing_client.models.lti_session import LTISession +from edu_sharing_client.models.lti_tool_configuration import LTIToolConfiguration +from edu_sharing_client.models.language import Language +from edu_sharing_client.models.level import Level +from edu_sharing_client.models.license import License +from edu_sharing_client.models.license_agreement import LicenseAgreement +from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode +from edu_sharing_client.models.licenses import Licenses +from edu_sharing_client.models.location import Location +from edu_sharing_client.models.log_entry import LogEntry +from edu_sharing_client.models.logger_config_result import LoggerConfigResult +from edu_sharing_client.models.login import Login +from edu_sharing_client.models.login_credentials import LoginCredentials +from edu_sharing_client.models.logout_info import LogoutInfo +from edu_sharing_client.models.mainnav import Mainnav +from edu_sharing_client.models.manual_registration_data import ManualRegistrationData +from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult +from edu_sharing_client.models.mds import Mds +from edu_sharing_client.models.mds_column import MdsColumn +from edu_sharing_client.models.mds_entries import MdsEntries +from edu_sharing_client.models.mds_group import MdsGroup +from edu_sharing_client.models.mds_list import MdsList +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from edu_sharing_client.models.mds_sort import MdsSort +from edu_sharing_client.models.mds_sort_column import MdsSortColumn +from edu_sharing_client.models.mds_sort_default import MdsSortDefault +from edu_sharing_client.models.mds_subwidget import MdsSubwidget +from edu_sharing_client.models.mds_value import MdsValue +from edu_sharing_client.models.mds_view import MdsView +from edu_sharing_client.models.mds_widget import MdsWidget +from edu_sharing_client.models.mds_widget_condition import MdsWidgetCondition +from edu_sharing_client.models.mediacenter import Mediacenter +from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension +from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult +from edu_sharing_client.models.menu_entry import MenuEntry +from edu_sharing_client.models.message import Message +from edu_sharing_client.models.metadata_set_info import MetadataSetInfo +from edu_sharing_client.models.metadata_suggestion_event_dto import MetadataSuggestionEventDTO +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.node_collection_proposal_count import NodeCollectionProposalCount +from edu_sharing_client.models.node_data import NodeData +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.node_entries import NodeEntries +from edu_sharing_client.models.node_entry import NodeEntry +from edu_sharing_client.models.node_issue_event_dto import NodeIssueEventDTO +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.models.node_locked import NodeLocked +from edu_sharing_client.models.node_permission_entry import NodePermissionEntry +from edu_sharing_client.models.node_permissions import NodePermissions +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.node_relation import NodeRelation +from edu_sharing_client.models.node_remote import NodeRemote +from edu_sharing_client.models.node_share import NodeShare +from edu_sharing_client.models.node_stats import NodeStats +from edu_sharing_client.models.node_text import NodeText +from edu_sharing_client.models.node_version import NodeVersion +from edu_sharing_client.models.node_version_entries import NodeVersionEntries +from edu_sharing_client.models.node_version_entry import NodeVersionEntry +from edu_sharing_client.models.node_version_ref import NodeVersionRef +from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries +from edu_sharing_client.models.notification_config import NotificationConfig +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.notification_intervals import NotificationIntervals +from edu_sharing_client.models.notification_response_page import NotificationResponsePage +from edu_sharing_client.models.notify_entry import NotifyEntry +from edu_sharing_client.models.open_id_configuration import OpenIdConfiguration +from edu_sharing_client.models.open_id_registration_result import OpenIdRegistrationResult +from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.models.organization_entries import OrganizationEntries +from edu_sharing_client.models.pageable import Pageable +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.parameters import Parameters +from edu_sharing_client.models.parent_entries import ParentEntries +from edu_sharing_client.models.person import Person +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions +from edu_sharing_client.models.person_delete_result import PersonDeleteResult +from edu_sharing_client.models.person_report import PersonReport +from edu_sharing_client.models.plugin_info import PluginInfo +from edu_sharing_client.models.plugin_status import PluginStatus +from edu_sharing_client.models.preferences import Preferences +from edu_sharing_client.models.preview import Preview +from edu_sharing_client.models.profile import Profile +from edu_sharing_client.models.profile_settings import ProfileSettings +from edu_sharing_client.models.propose_for_collection_event_dto import ProposeForCollectionEventDTO +from edu_sharing_client.models.provider import Provider +from edu_sharing_client.models.query import Query +from edu_sharing_client.models.rating_data import RatingData +from edu_sharing_client.models.rating_details import RatingDetails +from edu_sharing_client.models.rating_event_dto import RatingEventDTO +from edu_sharing_client.models.rating_history import RatingHistory +from edu_sharing_client.models.reference_entries import ReferenceEntries +from edu_sharing_client.models.register import Register +from edu_sharing_client.models.register_exists import RegisterExists +from edu_sharing_client.models.register_information import RegisterInformation +from edu_sharing_client.models.registration_url import RegistrationUrl +from edu_sharing_client.models.relation_data import RelationData +from edu_sharing_client.models.remote import Remote +from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription +from edu_sharing_client.models.rendering import Rendering +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry +from edu_sharing_client.models.rendering_gdpr import RenderingGdpr +from edu_sharing_client.models.repo import Repo +from edu_sharing_client.models.repo_entries import RepoEntries +from edu_sharing_client.models.repository_config import RepositoryConfig +from edu_sharing_client.models.repository_version_info import RepositoryVersionInfo +from edu_sharing_client.models.restore_result import RestoreResult +from edu_sharing_client.models.restore_results import RestoreResults +from edu_sharing_client.models.search_parameters import SearchParameters +from edu_sharing_client.models.search_parameters_facets import SearchParametersFacets +from edu_sharing_client.models.search_result import SearchResult +from edu_sharing_client.models.search_result_elastic import SearchResultElastic +from edu_sharing_client.models.search_result_lrmi import SearchResultLrmi +from edu_sharing_client.models.search_result_node import SearchResultNode +from edu_sharing_client.models.search_v_card import SearchVCard +from edu_sharing_client.models.server_update_info import ServerUpdateInfo +from edu_sharing_client.models.service import Service +from edu_sharing_client.models.service_instance import ServiceInstance +from edu_sharing_client.models.service_version import ServiceVersion +from edu_sharing_client.models.services import Services +from edu_sharing_client.models.shared_folder_options import SharedFolderOptions +from edu_sharing_client.models.sharing_info import SharingInfo +from edu_sharing_client.models.simple_edit import SimpleEdit +from edu_sharing_client.models.simple_edit_global_groups import SimpleEditGlobalGroups +from edu_sharing_client.models.simple_edit_organization import SimpleEditOrganization +from edu_sharing_client.models.sort import Sort +from edu_sharing_client.models.statistic_entity import StatisticEntity +from edu_sharing_client.models.statistic_entry import StatisticEntry +from edu_sharing_client.models.statistics import Statistics +from edu_sharing_client.models.statistics_global import StatisticsGlobal +from edu_sharing_client.models.statistics_group import StatisticsGroup +from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup +from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup +from edu_sharing_client.models.statistics_user import StatisticsUser +from edu_sharing_client.models.stored_service import StoredService +from edu_sharing_client.models.stream import Stream +from edu_sharing_client.models.stream_entry import StreamEntry +from edu_sharing_client.models.stream_entry_input import StreamEntryInput +from edu_sharing_client.models.stream_list import StreamList +from edu_sharing_client.models.sub_group_item import SubGroupItem +from edu_sharing_client.models.suggest import Suggest +from edu_sharing_client.models.suggestion import Suggestion +from edu_sharing_client.models.suggestion_param import SuggestionParam +from edu_sharing_client.models.suggestions import Suggestions +from edu_sharing_client.models.tool import Tool +from edu_sharing_client.models.tools import Tools +from edu_sharing_client.models.tracking import Tracking +from edu_sharing_client.models.tracking_authority import TrackingAuthority +from edu_sharing_client.models.tracking_node import TrackingNode +from edu_sharing_client.models.upload_result import UploadResult +from edu_sharing_client.models.usage import Usage +from edu_sharing_client.models.usages import Usages +from edu_sharing_client.models.user import User +from edu_sharing_client.models.user_credential import UserCredential +from edu_sharing_client.models.user_data_dto import UserDataDTO +from edu_sharing_client.models.user_entries import UserEntries +from edu_sharing_client.models.user_entry import UserEntry +from edu_sharing_client.models.user_profile import UserProfile +from edu_sharing_client.models.user_profile_app_auth import UserProfileAppAuth +from edu_sharing_client.models.user_profile_edit import UserProfileEdit +from edu_sharing_client.models.user_quota import UserQuota +from edu_sharing_client.models.user_simple import UserSimple +from edu_sharing_client.models.user_stats import UserStats +from edu_sharing_client.models.user_status import UserStatus +from edu_sharing_client.models.value import Value +from edu_sharing_client.models.value_parameters import ValueParameters +from edu_sharing_client.models.values import Values +from edu_sharing_client.models.variables import Variables +from edu_sharing_client.models.version import Version +from edu_sharing_client.models.version_build import VersionBuild +from edu_sharing_client.models.version_git import VersionGit +from edu_sharing_client.models.version_git_commit import VersionGitCommit +from edu_sharing_client.models.version_maven import VersionMaven +from edu_sharing_client.models.version_project import VersionProject +from edu_sharing_client.models.version_timestamp import VersionTimestamp +from edu_sharing_client.models.website_information import WebsiteInformation +from edu_sharing_client.models.widget_data_dto import WidgetDataDTO +from edu_sharing_client.models.workflow_event_dto import WorkflowEventDTO +from edu_sharing_client.models.workflow_history import WorkflowHistory diff --git a/edu_sharing_openapi/edu_sharing_client/models/about.py b/edu_sharing_openapi/edu_sharing_client/models/about.py new file mode 100644 index 00000000..b1d7558e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/about.py @@ -0,0 +1,125 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.about_service import AboutService +from edu_sharing_client.models.feature_info import FeatureInfo +from edu_sharing_client.models.plugin_info import PluginInfo +from edu_sharing_client.models.service_version import ServiceVersion +from typing import Optional, Set +from typing_extensions import Self + +class About(BaseModel): + """ + About + """ # noqa: E501 + plugins: Optional[List[PluginInfo]] = None + features: Optional[List[FeatureInfo]] = None + themes_url: Optional[StrictStr] = Field(default=None, alias="themesUrl") + last_cache_update: Optional[StrictInt] = Field(default=None, alias="lastCacheUpdate") + version: ServiceVersion + services: List[AboutService] + __properties: ClassVar[List[str]] = ["plugins", "features", "themesUrl", "lastCacheUpdate", "version", "services"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of About from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in plugins (list) + _items = [] + if self.plugins: + for _item_plugins in self.plugins: + if _item_plugins: + _items.append(_item_plugins.to_dict()) + _dict['plugins'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in features (list) + _items = [] + if self.features: + for _item_features in self.features: + if _item_features: + _items.append(_item_features.to_dict()) + _dict['features'] = _items + # override the default output from pydantic by calling `to_dict()` of version + if self.version: + _dict['version'] = self.version.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in services (list) + _items = [] + if self.services: + for _item_services in self.services: + if _item_services: + _items.append(_item_services.to_dict()) + _dict['services'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of About from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "plugins": [PluginInfo.from_dict(_item) for _item in obj["plugins"]] if obj.get("plugins") is not None else None, + "features": [FeatureInfo.from_dict(_item) for _item in obj["features"]] if obj.get("features") is not None else None, + "themesUrl": obj.get("themesUrl"), + "lastCacheUpdate": obj.get("lastCacheUpdate"), + "version": ServiceVersion.from_dict(obj["version"]) if obj.get("version") is not None else None, + "services": [AboutService.from_dict(_item) for _item in obj["services"]] if obj.get("services") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/about_service.py b/edu_sharing_openapi/edu_sharing_client/models/about_service.py new file mode 100644 index 00000000..969946c9 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/about_service.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.service_instance import ServiceInstance +from typing import Optional, Set +from typing_extensions import Self + +class AboutService(BaseModel): + """ + AboutService + """ # noqa: E501 + name: StrictStr + instances: List[ServiceInstance] + __properties: ClassVar[List[str]] = ["name", "instances"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AboutService from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in instances (list) + _items = [] + if self.instances: + for _item_instances in self.instances: + if _item_instances: + _items.append(_item_instances.to_dict()) + _dict['instances'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AboutService from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "instances": [ServiceInstance.from_dict(_item) for _item in obj["instances"]] if obj.get("instances") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/abstract_entries.py b/edu_sharing_openapi/edu_sharing_client/models/abstract_entries.py new file mode 100644 index 00000000..ee92ea2f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/abstract_entries.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class AbstractEntries(BaseModel): + """ + AbstractEntries + """ # noqa: E501 + nodes: List[Dict[str, Any]] + pagination: Pagination + __properties: ClassVar[List[str]] = ["nodes", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AbstractEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AbstractEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodes": obj.get("nodes"), + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/ace.py b/edu_sharing_openapi/edu_sharing_client/models/ace.py new file mode 100644 index 00000000..295e0b39 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/ace.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.authority import Authority +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.user_profile import UserProfile +from typing import Optional, Set +from typing_extensions import Self + +class ACE(BaseModel): + """ + ACE + """ # noqa: E501 + editable: Optional[StrictBool] = None + authority: Authority + user: Optional[UserProfile] = None + group: Optional[GroupProfile] = None + permissions: List[StrictStr] + __properties: ClassVar[List[str]] = ["editable", "authority", "user", "group", "permissions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ACE from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of authority + if self.authority: + _dict['authority'] = self.authority.to_dict() + # override the default output from pydantic by calling `to_dict()` of user + if self.user: + _dict['user'] = self.user.to_dict() + # override the default output from pydantic by calling `to_dict()` of group + if self.group: + _dict['group'] = self.group.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ACE from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "editable": obj.get("editable"), + "authority": Authority.from_dict(obj["authority"]) if obj.get("authority") is not None else None, + "user": UserProfile.from_dict(obj["user"]) if obj.get("user") is not None else None, + "group": GroupProfile.from_dict(obj["group"]) if obj.get("group") is not None else None, + "permissions": obj.get("permissions") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/acl.py b/edu_sharing_openapi/edu_sharing_client/models/acl.py new file mode 100644 index 00000000..a7cd6d0f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/acl.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.ace import ACE +from typing import Optional, Set +from typing_extensions import Self + +class ACL(BaseModel): + """ + ACL + """ # noqa: E501 + inherited: StrictBool + permissions: List[ACE] + __properties: ClassVar[List[str]] = ["inherited", "permissions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ACL from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in permissions (list) + _items = [] + if self.permissions: + for _item_permissions in self.permissions: + if _item_permissions: + _items.append(_item_permissions.to_dict()) + _dict['permissions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ACL from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "inherited": obj.get("inherited"), + "permissions": [ACE.from_dict(_item) for _item in obj["permissions"]] if obj.get("permissions") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/add_to_collection_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/add_to_collection_event_dto.py new file mode 100644 index 00000000..ddfe12b4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/add_to_collection_event_dto.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection_dto import CollectionDTO +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class AddToCollectionEventDTO(NotificationEventDTO): + """ + AddToCollectionEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + collection: Optional[CollectionDTO] = None + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "collection"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AddToCollectionEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of collection + if self.collection: + _dict['collection'] = self.collection.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AddToCollectionEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "collection": CollectionDTO.from_dict(obj["collection"]) if obj.get("collection") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/admin.py b/edu_sharing_openapi/edu_sharing_client/models/admin.py new file mode 100644 index 00000000..035145f4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/admin.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.statistics import Statistics +from typing import Optional, Set +from typing_extensions import Self + +class Admin(BaseModel): + """ + Admin + """ # noqa: E501 + statistics: Optional[Statistics] = None + editor_type: Optional[StrictStr] = Field(default=None, alias="editorType") + __properties: ClassVar[List[str]] = ["statistics", "editorType"] + + @field_validator('editor_type') + def editor_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Textarea', 'Monaco']): + raise ValueError("must be one of enum values ('Textarea', 'Monaco')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Admin from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of statistics + if self.statistics: + _dict['statistics'] = self.statistics.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Admin from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "statistics": Statistics.from_dict(obj["statistics"]) if obj.get("statistics") is not None else None, + "editorType": obj.get("editorType") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/admin_statistics.py b/edu_sharing_openapi/edu_sharing_client/models/admin_statistics.py new file mode 100644 index 00000000..307347ed --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/admin_statistics.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class AdminStatistics(BaseModel): + """ + AdminStatistics + """ # noqa: E501 + active_sessions: Optional[StrictInt] = Field(default=None, alias="activeSessions") + number_of_previews: Optional[StrictInt] = Field(default=None, alias="numberOfPreviews") + max_memory: Optional[StrictInt] = Field(default=None, alias="maxMemory") + allocated_memory: Optional[StrictInt] = Field(default=None, alias="allocatedMemory") + preview_cache_size: Optional[StrictInt] = Field(default=None, alias="previewCacheSize") + active_locks: Optional[List[Node]] = Field(default=None, alias="activeLocks") + __properties: ClassVar[List[str]] = ["activeSessions", "numberOfPreviews", "maxMemory", "allocatedMemory", "previewCacheSize", "activeLocks"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AdminStatistics from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in active_locks (list) + _items = [] + if self.active_locks: + for _item_active_locks in self.active_locks: + if _item_active_locks: + _items.append(_item_active_locks.to_dict()) + _dict['activeLocks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AdminStatistics from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "activeSessions": obj.get("activeSessions"), + "numberOfPreviews": obj.get("numberOfPreviews"), + "maxMemory": obj.get("maxMemory"), + "allocatedMemory": obj.get("allocatedMemory"), + "previewCacheSize": obj.get("previewCacheSize"), + "activeLocks": [Node.from_dict(_item) for _item in obj["activeLocks"]] if obj.get("activeLocks") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/application.py b/edu_sharing_openapi/edu_sharing_client/models/application.py new file mode 100644 index 00000000..73d5b3bf --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/application.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Application(BaseModel): + """ + Application + """ # noqa: E501 + id: Optional[StrictStr] = None + title: Optional[StrictStr] = None + webserver_url: Optional[StrictStr] = Field(default=None, alias="webserverUrl") + client_base_url: Optional[StrictStr] = Field(default=None, alias="clientBaseUrl") + type: Optional[StrictStr] = None + subtype: Optional[StrictStr] = None + repository_type: Optional[StrictStr] = Field(default=None, alias="repositoryType") + xml: Optional[StrictStr] = None + file: Optional[StrictStr] = None + content_url: Optional[StrictStr] = Field(default=None, alias="contentUrl") + config_url: Optional[StrictStr] = Field(default=None, alias="configUrl") + __properties: ClassVar[List[str]] = ["id", "title", "webserverUrl", "clientBaseUrl", "type", "subtype", "repositoryType", "xml", "file", "contentUrl", "configUrl"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Application from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Application from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "title": obj.get("title"), + "webserverUrl": obj.get("webserverUrl"), + "clientBaseUrl": obj.get("clientBaseUrl"), + "type": obj.get("type"), + "subtype": obj.get("subtype"), + "repositoryType": obj.get("repositoryType"), + "xml": obj.get("xml"), + "file": obj.get("file"), + "contentUrl": obj.get("contentUrl"), + "configUrl": obj.get("configUrl") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/audience.py b/edu_sharing_openapi/edu_sharing_client/models/audience.py new file mode 100644 index 00000000..2884a8f8 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/audience.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Audience(BaseModel): + """ + Audience + """ # noqa: E501 + name: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Audience from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Audience from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/authentication_token.py b/edu_sharing_openapi/edu_sharing_client/models/authentication_token.py new file mode 100644 index 00000000..0000ef3a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/authentication_token.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AuthenticationToken(BaseModel): + """ + AuthenticationToken + """ # noqa: E501 + user_id: Optional[StrictStr] = Field(default=None, alias="userId") + ticket: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["userId", "ticket"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuthenticationToken from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuthenticationToken from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "userId": obj.get("userId"), + "ticket": obj.get("ticket") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/authority.py b/edu_sharing_openapi/edu_sharing_client/models/authority.py new file mode 100644 index 00000000..d9fc676b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/authority.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Authority(BaseModel): + """ + Authority + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + editable: Optional[StrictBool] = None + authority_name: StrictStr = Field(alias="authorityName") + authority_type: Optional[StrictStr] = Field(default=None, alias="authorityType") + __properties: ClassVar[List[str]] = ["properties", "editable", "authorityName", "authorityType"] + + @field_validator('authority_type') + def authority_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST']): + raise ValueError("must be one of enum values ('USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Authority from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Authority from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "editable": obj.get("editable"), + "authorityName": obj.get("authorityName"), + "authorityType": obj.get("authorityType") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/authority_entries.py b/edu_sharing_openapi/edu_sharing_client/models/authority_entries.py new file mode 100644 index 00000000..b076948b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/authority_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.authority import Authority +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class AuthorityEntries(BaseModel): + """ + AuthorityEntries + """ # noqa: E501 + authorities: List[Authority] + pagination: Pagination + __properties: ClassVar[List[str]] = ["authorities", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AuthorityEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in authorities (list) + _items = [] + if self.authorities: + for _item_authorities in self.authorities: + if _item_authorities: + _items.append(_item_authorities.to_dict()) + _dict['authorities'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AuthorityEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authorities": [Authority.from_dict(_item) for _item in obj["authorities"]] if obj.get("authorities") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/available_mds.py b/edu_sharing_openapi/edu_sharing_client/models/available_mds.py new file mode 100644 index 00000000..114a6fe1 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/available_mds.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class AvailableMds(BaseModel): + """ + AvailableMds + """ # noqa: E501 + repository: Optional[StrictStr] = None + mds: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["repository", "mds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of AvailableMds from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of AvailableMds from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repository": obj.get("repository"), + "mds": obj.get("mds") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/banner.py b/edu_sharing_openapi/edu_sharing_client/models/banner.py new file mode 100644 index 00000000..6826fb3b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/banner.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Banner(BaseModel): + """ + Banner + """ # noqa: E501 + url: Optional[StrictStr] = None + href: Optional[StrictStr] = None + components: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["url", "href", "components"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Banner from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Banner from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url"), + "href": obj.get("href"), + "components": obj.get("components") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/cache_cluster.py b/edu_sharing_openapi/edu_sharing_client/models/cache_cluster.py new file mode 100644 index 00000000..d0a5591d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/cache_cluster.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.cache_info import CacheInfo +from edu_sharing_client.models.cache_member import CacheMember +from typing import Optional, Set +from typing_extensions import Self + +class CacheCluster(BaseModel): + """ + CacheCluster + """ # noqa: E501 + instances: Optional[List[CacheMember]] = None + cache_infos: Optional[List[CacheInfo]] = Field(default=None, alias="cacheInfos") + local_member: Optional[StrictStr] = Field(default=None, alias="localMember") + free_memory: Optional[StrictInt] = Field(default=None, alias="freeMemory") + total_memory: Optional[StrictInt] = Field(default=None, alias="totalMemory") + max_memory: Optional[StrictInt] = Field(default=None, alias="maxMemory") + available_processors: Optional[StrictInt] = Field(default=None, alias="availableProcessors") + time_stamp: Optional[datetime] = Field(default=None, alias="timeStamp") + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + __properties: ClassVar[List[str]] = ["instances", "cacheInfos", "localMember", "freeMemory", "totalMemory", "maxMemory", "availableProcessors", "timeStamp", "groupName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CacheCluster from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in instances (list) + _items = [] + if self.instances: + for _item_instances in self.instances: + if _item_instances: + _items.append(_item_instances.to_dict()) + _dict['instances'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in cache_infos (list) + _items = [] + if self.cache_infos: + for _item_cache_infos in self.cache_infos: + if _item_cache_infos: + _items.append(_item_cache_infos.to_dict()) + _dict['cacheInfos'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CacheCluster from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "instances": [CacheMember.from_dict(_item) for _item in obj["instances"]] if obj.get("instances") is not None else None, + "cacheInfos": [CacheInfo.from_dict(_item) for _item in obj["cacheInfos"]] if obj.get("cacheInfos") is not None else None, + "localMember": obj.get("localMember"), + "freeMemory": obj.get("freeMemory"), + "totalMemory": obj.get("totalMemory"), + "maxMemory": obj.get("maxMemory"), + "availableProcessors": obj.get("availableProcessors"), + "timeStamp": obj.get("timeStamp"), + "groupName": obj.get("groupName") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/cache_info.py b/edu_sharing_openapi/edu_sharing_client/models/cache_info.py new file mode 100644 index 00000000..bad81edc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/cache_info.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CacheInfo(BaseModel): + """ + CacheInfo + """ # noqa: E501 + size: Optional[StrictInt] = None + statistic_hits: Optional[StrictInt] = Field(default=None, alias="statisticHits") + name: Optional[StrictStr] = None + backup_count: Optional[StrictInt] = Field(default=None, alias="backupCount") + backup_entry_count: Optional[StrictInt] = Field(default=None, alias="backupEntryCount") + backup_entry_memory_cost: Optional[StrictInt] = Field(default=None, alias="backupEntryMemoryCost") + heap_cost: Optional[StrictInt] = Field(default=None, alias="heapCost") + owned_entry_count: Optional[StrictInt] = Field(default=None, alias="ownedEntryCount") + get_owned_entry_memory_cost: Optional[StrictInt] = Field(default=None, alias="getOwnedEntryMemoryCost") + size_in_memory: Optional[StrictInt] = Field(default=None, alias="sizeInMemory") + member: Optional[StrictStr] = None + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + max_size: Optional[StrictInt] = Field(default=None, alias="maxSize") + __properties: ClassVar[List[str]] = ["size", "statisticHits", "name", "backupCount", "backupEntryCount", "backupEntryMemoryCost", "heapCost", "ownedEntryCount", "getOwnedEntryMemoryCost", "sizeInMemory", "member", "groupName", "maxSize"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CacheInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CacheInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "size": obj.get("size"), + "statisticHits": obj.get("statisticHits"), + "name": obj.get("name"), + "backupCount": obj.get("backupCount"), + "backupEntryCount": obj.get("backupEntryCount"), + "backupEntryMemoryCost": obj.get("backupEntryMemoryCost"), + "heapCost": obj.get("heapCost"), + "ownedEntryCount": obj.get("ownedEntryCount"), + "getOwnedEntryMemoryCost": obj.get("getOwnedEntryMemoryCost"), + "sizeInMemory": obj.get("sizeInMemory"), + "member": obj.get("member"), + "groupName": obj.get("groupName"), + "maxSize": obj.get("maxSize") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/cache_member.py b/edu_sharing_openapi/edu_sharing_client/models/cache_member.py new file mode 100644 index 00000000..8bef8fcc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/cache_member.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CacheMember(BaseModel): + """ + CacheMember + """ # noqa: E501 + name: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CacheMember from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CacheMember from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/catalog.py b/edu_sharing_openapi/edu_sharing_client/models/catalog.py new file mode 100644 index 00000000..e9b106a2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/catalog.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Catalog(BaseModel): + """ + Catalog + """ # noqa: E501 + name: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Catalog from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Catalog from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "url": obj.get("url") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection.py b/edu_sharing_openapi/edu_sharing_client/models/collection.py new file mode 100644 index 00000000..5e68e7ef --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Collection(BaseModel): + """ + Collection + """ # noqa: E501 + scope: Optional[StrictStr] = None + author_freetext: Optional[StrictStr] = Field(default=None, alias="authorFreetext") + order_ascending: Optional[StrictBool] = Field(default=None, alias="orderAscending") + level0: StrictBool = Field(description="false") + title: StrictStr + description: Optional[StrictStr] = None + type: StrictStr + viewtype: StrictStr + order_mode: Optional[StrictStr] = Field(default=None, alias="orderMode") + x: Optional[StrictInt] = None + y: Optional[StrictInt] = None + z: Optional[StrictInt] = None + color: Optional[StrictStr] = None + from_user: StrictBool = Field(description="false", alias="fromUser") + pinned: Optional[StrictBool] = None + child_collections_count: Optional[StrictInt] = Field(default=None, alias="childCollectionsCount") + child_references_count: Optional[StrictInt] = Field(default=None, alias="childReferencesCount") + __properties: ClassVar[List[str]] = ["scope", "authorFreetext", "orderAscending", "level0", "title", "description", "type", "viewtype", "orderMode", "x", "y", "z", "color", "fromUser", "pinned", "childCollectionsCount", "childReferencesCount"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Collection from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Collection from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "scope": obj.get("scope"), + "authorFreetext": obj.get("authorFreetext"), + "orderAscending": obj.get("orderAscending"), + "level0": obj.get("level0"), + "title": obj.get("title"), + "description": obj.get("description"), + "type": obj.get("type"), + "viewtype": obj.get("viewtype"), + "orderMode": obj.get("orderMode"), + "x": obj.get("x"), + "y": obj.get("y"), + "z": obj.get("z"), + "color": obj.get("color"), + "fromUser": obj.get("fromUser"), + "pinned": obj.get("pinned"), + "childCollectionsCount": obj.get("childCollectionsCount"), + "childReferencesCount": obj.get("childReferencesCount") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_counts.py b/edu_sharing_openapi/edu_sharing_client/models/collection_counts.py new file mode 100644 index 00000000..18356085 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_counts.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.element import Element +from typing import Optional, Set +from typing_extensions import Self + +class CollectionCounts(BaseModel): + """ + CollectionCounts + """ # noqa: E501 + refs: Optional[List[Element]] = None + collections: Optional[List[Element]] = None + __properties: ClassVar[List[str]] = ["refs", "collections"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionCounts from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in refs (list) + _items = [] + if self.refs: + for _item_refs in self.refs: + if _item_refs: + _items.append(_item_refs.to_dict()) + _dict['refs'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in collections (list) + _items = [] + if self.collections: + for _item_collections in self.collections: + if _item_collections: + _items.append(_item_collections.to_dict()) + _dict['collections'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionCounts from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "refs": [Element.from_dict(_item) for _item in obj["refs"]] if obj.get("refs") is not None else None, + "collections": [Element.from_dict(_item) for _item in obj["collections"]] if obj.get("collections") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_dto.py b/edu_sharing_openapi/edu_sharing_client/models/collection_dto.py new file mode 100644 index 00000000..83418d48 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_dto.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CollectionDTO(BaseModel): + """ + CollectionDTO + """ # noqa: E501 + type: Optional[StrictStr] = None + aspects: Optional[List[StrictStr]] = None + properties: Optional[Dict[str, Dict[str, Any]]] = None + __properties: ClassVar[List[str]] = ["type", "aspects", "properties"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type": obj.get("type"), + "aspects": obj.get("aspects"), + "properties": obj.get("properties") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_entries.py b/edu_sharing_openapi/edu_sharing_client/models/collection_entries.py new file mode 100644 index 00000000..ef961ef7 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class CollectionEntries(BaseModel): + """ + CollectionEntries + """ # noqa: E501 + pagination: Optional[Pagination] = None + collections: List[Node] + __properties: ClassVar[List[str]] = ["pagination", "collections"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in collections (list) + _items = [] + if self.collections: + for _item_collections in self.collections: + if _item_collections: + _items.append(_item_collections.to_dict()) + _dict['collections'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "collections": [Node.from_dict(_item) for _item in obj["collections"]] if obj.get("collections") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_entry.py b/edu_sharing_openapi/edu_sharing_client/models/collection_entry.py new file mode 100644 index 00000000..840a1069 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_entry.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class CollectionEntry(BaseModel): + """ + CollectionEntry + """ # noqa: E501 + collection: Node + __properties: ClassVar[List[str]] = ["collection"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of collection + if self.collection: + _dict['collection'] = self.collection.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "collection": Node.from_dict(obj["collection"]) if obj.get("collection") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_options.py b/edu_sharing_openapi/edu_sharing_client/models/collection_options.py new file mode 100644 index 00000000..b43f1205 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_options.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CollectionOptions(BaseModel): + """ + CollectionOptions + """ # noqa: E501 + private_collections: Optional[StrictStr] = Field(default=None, alias="privateCollections") + public_collections: Optional[StrictStr] = Field(default=None, alias="publicCollections") + __properties: ClassVar[List[str]] = ["privateCollections", "publicCollections"] + + @field_validator('private_collections') + def private_collections_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + @field_validator('public_collections') + def public_collections_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "privateCollections": obj.get("privateCollections"), + "publicCollections": obj.get("publicCollections") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_proposal_entries.py b/edu_sharing_openapi/edu_sharing_client/models/collection_proposal_entries.py new file mode 100644 index 00000000..8141492a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_proposal_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_collection_proposal_count import NodeCollectionProposalCount +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class CollectionProposalEntries(BaseModel): + """ + CollectionProposalEntries + """ # noqa: E501 + pagination: Optional[Pagination] = None + collections: List[NodeCollectionProposalCount] + __properties: ClassVar[List[str]] = ["pagination", "collections"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionProposalEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in collections (list) + _items = [] + if self.collections: + for _item_collections in self.collections: + if _item_collections: + _items.append(_item_collections.to_dict()) + _dict['collections'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionProposalEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "collections": [NodeCollectionProposalCount.from_dict(_item) for _item in obj["collections"]] if obj.get("collections") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collection_reference.py b/edu_sharing_openapi/edu_sharing_client/models/collection_reference.py new file mode 100644 index 00000000..ffcd3688 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collection_reference.py @@ -0,0 +1,231 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection import Collection +from edu_sharing_client.models.content import Content +from edu_sharing_client.models.contributor import Contributor +from edu_sharing_client.models.license import License +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.person import Person +from edu_sharing_client.models.preview import Preview +from edu_sharing_client.models.rating_details import RatingDetails +from edu_sharing_client.models.remote import Remote +from typing import Optional, Set +from typing_extensions import Self + +class CollectionReference(BaseModel): + """ + CollectionReference + """ # noqa: E501 + node_lti_deep_link: Optional[NodeLTIDeepLink] = Field(default=None, alias="nodeLTIDeepLink") + remote: Optional[Remote] = None + content: Optional[Content] = None + license: Optional[License] = None + is_directory: Optional[StrictBool] = Field(default=None, alias="isDirectory") + comment_count: Optional[StrictInt] = Field(default=None, alias="commentCount") + rating: Optional[RatingDetails] = None + used_in_collections: Optional[List[Node]] = Field(default=None, alias="usedInCollections") + relations: Optional[Dict[str, Node]] = None + contributors: Optional[List[Contributor]] = None + access_original: Optional[List[StrictStr]] = Field(default=None, alias="accessOriginal") + original_restricted_access: Optional[StrictBool] = Field(default=None, alias="originalRestrictedAccess") + ref: NodeRef + parent: Optional[NodeRef] = None + type: Optional[StrictStr] = None + aspects: Optional[List[StrictStr]] = None + name: StrictStr + title: Optional[StrictStr] = None + metadataset: Optional[StrictStr] = None + repository_type: Optional[StrictStr] = Field(default=None, alias="repositoryType") + created_at: datetime = Field(alias="createdAt") + created_by: Person = Field(alias="createdBy") + modified_at: Optional[datetime] = Field(default=None, alias="modifiedAt") + modified_by: Optional[Person] = Field(default=None, alias="modifiedBy") + access: List[StrictStr] + download_url: StrictStr = Field(alias="downloadUrl") + properties: Optional[Dict[str, List[StrictStr]]] = None + mimetype: Optional[StrictStr] = None + mediatype: Optional[StrictStr] = None + size: Optional[StrictStr] = None + preview: Optional[Preview] = None + icon_url: Optional[StrictStr] = Field(default=None, alias="iconURL") + collection: Collection + owner: Person + original_id: Optional[StrictStr] = Field(default=None, alias="originalId") + is_public: Optional[StrictBool] = Field(default=None, alias="isPublic") + __properties: ClassVar[List[str]] = ["nodeLTIDeepLink", "remote", "content", "license", "isDirectory", "commentCount", "rating", "usedInCollections", "relations", "contributors", "accessOriginal", "originalRestrictedAccess", "ref", "parent", "type", "aspects", "name", "title", "metadataset", "repositoryType", "createdAt", "createdBy", "modifiedAt", "modifiedBy", "access", "downloadUrl", "properties", "mimetype", "mediatype", "size", "preview", "iconURL", "collection", "owner", "originalId", "isPublic"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionReference from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node_lti_deep_link + if self.node_lti_deep_link: + _dict['nodeLTIDeepLink'] = self.node_lti_deep_link.to_dict() + # override the default output from pydantic by calling `to_dict()` of remote + if self.remote: + _dict['remote'] = self.remote.to_dict() + # override the default output from pydantic by calling `to_dict()` of content + if self.content: + _dict['content'] = self.content.to_dict() + # override the default output from pydantic by calling `to_dict()` of license + if self.license: + _dict['license'] = self.license.to_dict() + # override the default output from pydantic by calling `to_dict()` of rating + if self.rating: + _dict['rating'] = self.rating.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in used_in_collections (list) + _items = [] + if self.used_in_collections: + for _item_used_in_collections in self.used_in_collections: + if _item_used_in_collections: + _items.append(_item_used_in_collections.to_dict()) + _dict['usedInCollections'] = _items + # override the default output from pydantic by calling `to_dict()` of each value in relations (dict) + _field_dict = {} + if self.relations: + for _key_relations in self.relations: + if self.relations[_key_relations]: + _field_dict[_key_relations] = self.relations[_key_relations].to_dict() + _dict['relations'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each item in contributors (list) + _items = [] + if self.contributors: + for _item_contributors in self.contributors: + if _item_contributors: + _items.append(_item_contributors.to_dict()) + _dict['contributors'] = _items + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of parent + if self.parent: + _dict['parent'] = self.parent.to_dict() + # override the default output from pydantic by calling `to_dict()` of created_by + if self.created_by: + _dict['createdBy'] = self.created_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of modified_by + if self.modified_by: + _dict['modifiedBy'] = self.modified_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of preview + if self.preview: + _dict['preview'] = self.preview.to_dict() + # override the default output from pydantic by calling `to_dict()` of collection + if self.collection: + _dict['collection'] = self.collection.to_dict() + # override the default output from pydantic by calling `to_dict()` of owner + if self.owner: + _dict['owner'] = self.owner.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionReference from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeLTIDeepLink": NodeLTIDeepLink.from_dict(obj["nodeLTIDeepLink"]) if obj.get("nodeLTIDeepLink") is not None else None, + "remote": Remote.from_dict(obj["remote"]) if obj.get("remote") is not None else None, + "content": Content.from_dict(obj["content"]) if obj.get("content") is not None else None, + "license": License.from_dict(obj["license"]) if obj.get("license") is not None else None, + "isDirectory": obj.get("isDirectory"), + "commentCount": obj.get("commentCount"), + "rating": RatingDetails.from_dict(obj["rating"]) if obj.get("rating") is not None else None, + "usedInCollections": [Node.from_dict(_item) for _item in obj["usedInCollections"]] if obj.get("usedInCollections") is not None else None, + "relations": dict( + (_k, Node.from_dict(_v)) + for _k, _v in obj["relations"].items() + ) + if obj.get("relations") is not None + else None, + "contributors": [Contributor.from_dict(_item) for _item in obj["contributors"]] if obj.get("contributors") is not None else None, + "accessOriginal": obj.get("accessOriginal"), + "originalRestrictedAccess": obj.get("originalRestrictedAccess"), + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "parent": NodeRef.from_dict(obj["parent"]) if obj.get("parent") is not None else None, + "type": obj.get("type"), + "aspects": obj.get("aspects"), + "name": obj.get("name"), + "title": obj.get("title"), + "metadataset": obj.get("metadataset"), + "repositoryType": obj.get("repositoryType"), + "createdAt": obj.get("createdAt"), + "createdBy": Person.from_dict(obj["createdBy"]) if obj.get("createdBy") is not None else None, + "modifiedAt": obj.get("modifiedAt"), + "modifiedBy": Person.from_dict(obj["modifiedBy"]) if obj.get("modifiedBy") is not None else None, + "access": obj.get("access"), + "downloadUrl": obj.get("downloadUrl"), + "properties": obj.get("properties"), + "mimetype": obj.get("mimetype"), + "mediatype": obj.get("mediatype"), + "size": obj.get("size"), + "preview": Preview.from_dict(obj["preview"]) if obj.get("preview") is not None else None, + "iconURL": obj.get("iconURL"), + "collection": Collection.from_dict(obj["collection"]) if obj.get("collection") is not None else None, + "owner": Person.from_dict(obj["owner"]) if obj.get("owner") is not None else None, + "originalId": obj.get("originalId"), + "isPublic": obj.get("isPublic") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collections.py b/edu_sharing_openapi/edu_sharing_client/models/collections.py new file mode 100644 index 00000000..11687164 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collections.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Collections(BaseModel): + """ + Collections + """ # noqa: E501 + colors: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["colors"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Collections from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Collections from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "colors": obj.get("colors") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/collections_result.py b/edu_sharing_openapi/edu_sharing_client/models/collections_result.py new file mode 100644 index 00000000..5029bebd --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/collections_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CollectionsResult(BaseModel): + """ + CollectionsResult + """ # noqa: E501 + count: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CollectionsResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CollectionsResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "count": obj.get("count") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/comment.py b/edu_sharing_openapi/edu_sharing_client/models/comment.py new file mode 100644 index 00000000..76755e51 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/comment.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.user_simple import UserSimple +from typing import Optional, Set +from typing_extensions import Self + +class Comment(BaseModel): + """ + Comment + """ # noqa: E501 + ref: Optional[NodeRef] = None + reply_to: Optional[NodeRef] = Field(default=None, alias="replyTo") + creator: Optional[UserSimple] = None + created: Optional[StrictInt] = None + comment: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["ref", "replyTo", "creator", "created", "comment"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Comment from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of reply_to + if self.reply_to: + _dict['replyTo'] = self.reply_to.to_dict() + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Comment from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "replyTo": NodeRef.from_dict(obj["replyTo"]) if obj.get("replyTo") is not None else None, + "creator": UserSimple.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "created": obj.get("created"), + "comment": obj.get("comment") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/comment_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/comment_event_dto.py new file mode 100644 index 00000000..74e19437 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/comment_event_dto.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class CommentEventDTO(NotificationEventDTO): + """ + CommentEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + comment_content: Optional[StrictStr] = Field(default=None, alias="commentContent") + comment_reference: Optional[StrictStr] = Field(default=None, alias="commentReference") + event: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "commentContent", "commentReference", "event"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CommentEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CommentEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "commentContent": obj.get("commentContent"), + "commentReference": obj.get("commentReference"), + "event": obj.get("event") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/comments.py b/edu_sharing_openapi/edu_sharing_client/models/comments.py new file mode 100644 index 00000000..96c31f97 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/comments.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.comment import Comment +from typing import Optional, Set +from typing_extensions import Self + +class Comments(BaseModel): + """ + Comments + """ # noqa: E501 + comments: Optional[List[Comment]] = None + __properties: ClassVar[List[str]] = ["comments"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Comments from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in comments (list) + _items = [] + if self.comments: + for _item_comments in self.comments: + if _item_comments: + _items.append(_item_comments.to_dict()) + _dict['comments'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Comments from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "comments": [Comment.from_dict(_item) for _item in obj["comments"]] if obj.get("comments") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/condition.py b/edu_sharing_openapi/edu_sharing_client/models/condition.py new file mode 100644 index 00000000..587b906c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/condition.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Condition(BaseModel): + """ + Condition + """ # noqa: E501 + type: Optional[StrictStr] = None + negate: Optional[StrictBool] = None + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["type", "negate", "value"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['TOOLPERMISSION']): + raise ValueError("must be one of enum values ('TOOLPERMISSION')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Condition from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Condition from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type": obj.get("type"), + "negate": obj.get("negate"), + "value": obj.get("value") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config.py b/edu_sharing_openapi/edu_sharing_client/models/config.py new file mode 100644 index 00000000..bec3ffa1 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.language import Language +from edu_sharing_client.models.values import Values +from typing import Optional, Set +from typing_extensions import Self + +class Config(BaseModel): + """ + Config + """ # noqa: E501 + current: Optional[Values] = None + var_global: Optional[Values] = Field(default=None, alias="global") + language: Optional[Language] = None + __properties: ClassVar[List[str]] = ["current", "global", "language"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Config from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of current + if self.current: + _dict['current'] = self.current.to_dict() + # override the default output from pydantic by calling `to_dict()` of var_global + if self.var_global: + _dict['global'] = self.var_global.to_dict() + # override the default output from pydantic by calling `to_dict()` of language + if self.language: + _dict['language'] = self.language.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Config from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "current": Values.from_dict(obj["current"]) if obj.get("current") is not None else None, + "global": Values.from_dict(obj["global"]) if obj.get("global") is not None else None, + "language": Language.from_dict(obj["language"]) if obj.get("language") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_frontpage.py b/edu_sharing_openapi/edu_sharing_client/models/config_frontpage.py new file mode 100644 index 00000000..945fe213 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_frontpage.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigFrontpage(BaseModel): + """ + ConfigFrontpage + """ # noqa: E501 + enabled: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["enabled"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigFrontpage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigFrontpage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enabled": obj.get("enabled") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_privacy.py b/edu_sharing_openapi/edu_sharing_client/models/config_privacy.py new file mode 100644 index 00000000..741187cb --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_privacy.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigPrivacy(BaseModel): + """ + ConfigPrivacy + """ # noqa: E501 + cookie_disclaimer: Optional[StrictBool] = Field(default=None, alias="cookieDisclaimer") + __properties: ClassVar[List[str]] = ["cookieDisclaimer"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigPrivacy from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigPrivacy from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cookieDisclaimer": obj.get("cookieDisclaimer") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_publish.py b/edu_sharing_openapi/edu_sharing_client/models/config_publish.py new file mode 100644 index 00000000..7bbe3e1c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_publish.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigPublish(BaseModel): + """ + ConfigPublish + """ # noqa: E501 + license_mandatory: Optional[StrictBool] = Field(default=None, alias="licenseMandatory") + author_mandatory: Optional[StrictBool] = Field(default=None, alias="authorMandatory") + __properties: ClassVar[List[str]] = ["licenseMandatory", "authorMandatory"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigPublish from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigPublish from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "licenseMandatory": obj.get("licenseMandatory"), + "authorMandatory": obj.get("authorMandatory") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_rating.py b/edu_sharing_openapi/edu_sharing_client/models/config_rating.py new file mode 100644 index 00000000..bbad25dd --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_rating.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigRating(BaseModel): + """ + ConfigRating + """ # noqa: E501 + mode: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["mode"] + + @field_validator('mode') + def mode_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'likes', 'stars']): + raise ValueError("must be one of enum values ('none', 'likes', 'stars')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigRating from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigRating from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "mode": obj.get("mode") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_remote.py b/edu_sharing_openapi/edu_sharing_client/models/config_remote.py new file mode 100644 index 00000000..9d3f7de1 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_remote.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigRemote(BaseModel): + """ + ConfigRemote + """ # noqa: E501 + rocketchat: Optional[Dict[str, Any]] = None + __properties: ClassVar[List[str]] = ["rocketchat"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigRemote from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigRemote from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rocketchat": obj.get("rocketchat") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_theme_color.py b/edu_sharing_openapi/edu_sharing_client/models/config_theme_color.py new file mode 100644 index 00000000..9f956aa2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_theme_color.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigThemeColor(BaseModel): + """ + ConfigThemeColor + """ # noqa: E501 + variable: Optional[StrictStr] = None + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["variable", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigThemeColor from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigThemeColor from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "variable": obj.get("variable"), + "value": obj.get("value") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_theme_colors.py b/edu_sharing_openapi/edu_sharing_client/models/config_theme_colors.py new file mode 100644 index 00000000..fef22b8e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_theme_colors.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.config_theme_color import ConfigThemeColor +from typing import Optional, Set +from typing_extensions import Self + +class ConfigThemeColors(BaseModel): + """ + ConfigThemeColors + """ # noqa: E501 + color: Optional[List[ConfigThemeColor]] = None + __properties: ClassVar[List[str]] = ["color"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigThemeColors from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in color (list) + _items = [] + if self.color: + for _item_color in self.color: + if _item_color: + _items.append(_item_color.to_dict()) + _dict['color'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigThemeColors from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "color": [ConfigThemeColor.from_dict(_item) for _item in obj["color"]] if obj.get("color") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_tutorial.py b/edu_sharing_openapi/edu_sharing_client/models/config_tutorial.py new file mode 100644 index 00000000..a7e06adb --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_tutorial.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigTutorial(BaseModel): + """ + ConfigTutorial + """ # noqa: E501 + enabled: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["enabled"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigTutorial from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigTutorial from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enabled": obj.get("enabled") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_upload.py b/edu_sharing_openapi/edu_sharing_client/models/config_upload.py new file mode 100644 index 00000000..a090e61b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_upload.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigUpload(BaseModel): + """ + ConfigUpload + """ # noqa: E501 + post_dialog: Optional[StrictStr] = Field(default=None, alias="postDialog") + __properties: ClassVar[List[str]] = ["postDialog"] + + @field_validator('post_dialog') + def post_dialog_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['SimpleEdit', 'Mds']): + raise ValueError("must be one of enum values ('SimpleEdit', 'Mds')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigUpload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigUpload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "postDialog": obj.get("postDialog") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_workflow.py b/edu_sharing_openapi/edu_sharing_client/models/config_workflow.py new file mode 100644 index 00000000..4686330e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_workflow.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.config_workflow_list import ConfigWorkflowList +from typing import Optional, Set +from typing_extensions import Self + +class ConfigWorkflow(BaseModel): + """ + ConfigWorkflow + """ # noqa: E501 + default_receiver: Optional[StrictStr] = Field(default=None, alias="defaultReceiver") + default_status: Optional[StrictStr] = Field(default=None, alias="defaultStatus") + comment_required: Optional[StrictBool] = Field(default=None, alias="commentRequired") + workflows: Optional[List[ConfigWorkflowList]] = None + __properties: ClassVar[List[str]] = ["defaultReceiver", "defaultStatus", "commentRequired", "workflows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigWorkflow from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in workflows (list) + _items = [] + if self.workflows: + for _item_workflows in self.workflows: + if _item_workflows: + _items.append(_item_workflows.to_dict()) + _dict['workflows'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigWorkflow from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "defaultReceiver": obj.get("defaultReceiver"), + "defaultStatus": obj.get("defaultStatus"), + "commentRequired": obj.get("commentRequired"), + "workflows": [ConfigWorkflowList.from_dict(_item) for _item in obj["workflows"]] if obj.get("workflows") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/config_workflow_list.py b/edu_sharing_openapi/edu_sharing_client/models/config_workflow_list.py new file mode 100644 index 00000000..820a7fcc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/config_workflow_list.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConfigWorkflowList(BaseModel): + """ + ConfigWorkflowList + """ # noqa: E501 + id: Optional[StrictStr] = None + color: Optional[StrictStr] = None + has_receiver: Optional[StrictBool] = Field(default=None, alias="hasReceiver") + next: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["id", "color", "hasReceiver", "next"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConfigWorkflowList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConfigWorkflowList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "color": obj.get("color"), + "hasReceiver": obj.get("hasReceiver"), + "next": obj.get("next") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/connector.py b/edu_sharing_openapi/edu_sharing_client/models/connector.py new file mode 100644 index 00000000..de41e46b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/connector.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.connector_file_type import ConnectorFileType +from typing import Optional, Set +from typing_extensions import Self + +class Connector(BaseModel): + """ + Connector + """ # noqa: E501 + id: Optional[StrictStr] = None + icon: Optional[StrictStr] = None + show_new: StrictBool = Field(description="false", alias="showNew") + parameters: Optional[List[StrictStr]] = None + filetypes: Optional[List[ConnectorFileType]] = None + only_desktop: Optional[StrictBool] = Field(default=None, alias="onlyDesktop") + has_view_mode: Optional[StrictBool] = Field(default=None, alias="hasViewMode") + __properties: ClassVar[List[str]] = ["id", "icon", "showNew", "parameters", "filetypes", "onlyDesktop", "hasViewMode"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Connector from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in filetypes (list) + _items = [] + if self.filetypes: + for _item_filetypes in self.filetypes: + if _item_filetypes: + _items.append(_item_filetypes.to_dict()) + _dict['filetypes'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Connector from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "icon": obj.get("icon"), + "showNew": obj.get("showNew"), + "parameters": obj.get("parameters"), + "filetypes": [ConnectorFileType.from_dict(_item) for _item in obj["filetypes"]] if obj.get("filetypes") is not None else None, + "onlyDesktop": obj.get("onlyDesktop"), + "hasViewMode": obj.get("hasViewMode") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/connector_file_type.py b/edu_sharing_openapi/edu_sharing_client/models/connector_file_type.py new file mode 100644 index 00000000..46b59b6e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/connector_file_type.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ConnectorFileType(BaseModel): + """ + ConnectorFileType + """ # noqa: E501 + ccressourceversion: Optional[StrictStr] = None + ccressourcetype: Optional[StrictStr] = None + ccresourcesubtype: Optional[StrictStr] = None + editor_type: Optional[StrictStr] = Field(default=None, alias="editorType") + mimetype: Optional[StrictStr] = None + filetype: Optional[StrictStr] = None + creatable: Optional[StrictBool] = None + editable: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["ccressourceversion", "ccressourcetype", "ccresourcesubtype", "editorType", "mimetype", "filetype", "creatable", "editable"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConnectorFileType from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConnectorFileType from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "ccressourceversion": obj.get("ccressourceversion"), + "ccressourcetype": obj.get("ccressourcetype"), + "ccresourcesubtype": obj.get("ccresourcesubtype"), + "editorType": obj.get("editorType"), + "mimetype": obj.get("mimetype"), + "filetype": obj.get("filetype"), + "creatable": obj.get("creatable"), + "editable": obj.get("editable") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/connector_list.py b/edu_sharing_openapi/edu_sharing_client/models/connector_list.py new file mode 100644 index 00000000..6a6716d5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/connector_list.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.connector import Connector +from typing import Optional, Set +from typing_extensions import Self + +class ConnectorList(BaseModel): + """ + ConnectorList + """ # noqa: E501 + url: Optional[StrictStr] = None + connectors: Optional[List[Connector]] = None + __properties: ClassVar[List[str]] = ["url", "connectors"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ConnectorList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in connectors (list) + _items = [] + if self.connectors: + for _item_connectors in self.connectors: + if _item_connectors: + _items.append(_item_connectors.to_dict()) + _dict['connectors'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ConnectorList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url"), + "connectors": [Connector.from_dict(_item) for _item in obj["connectors"]] if obj.get("connectors") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/content.py b/edu_sharing_openapi/edu_sharing_client/models/content.py new file mode 100644 index 00000000..c5276e0e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/content.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Content(BaseModel): + """ + Content + """ # noqa: E501 + url: Optional[StrictStr] = None + hash: Optional[StrictStr] = None + version: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["url", "hash", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Content from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Content from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url"), + "hash": obj.get("hash"), + "version": obj.get("version") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/context_menu_entry.py b/edu_sharing_openapi/edu_sharing_client/models/context_menu_entry.py new file mode 100644 index 00000000..3fdace04 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/context_menu_entry.py @@ -0,0 +1,146 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ContextMenuEntry(BaseModel): + """ + ContextMenuEntry + """ # noqa: E501 + position: Optional[StrictInt] = None + icon: Optional[StrictStr] = None + name: Optional[StrictStr] = None + url: Optional[StrictStr] = None + is_disabled: Optional[StrictBool] = Field(default=None, alias="isDisabled") + open_in_new: Optional[StrictBool] = Field(default=None, alias="openInNew") + is_separate: Optional[StrictBool] = Field(default=None, alias="isSeparate") + is_separate_bottom: Optional[StrictBool] = Field(default=None, alias="isSeparateBottom") + only_desktop: Optional[StrictBool] = Field(default=None, alias="onlyDesktop") + only_web: Optional[StrictBool] = Field(default=None, alias="onlyWeb") + mode: Optional[StrictStr] = None + scopes: Optional[List[StrictStr]] = None + ajax: Optional[StrictBool] = None + group: Optional[StrictStr] = None + permission: Optional[StrictStr] = None + toolpermission: Optional[StrictStr] = None + is_directory: Optional[StrictBool] = Field(default=None, alias="isDirectory") + show_as_action: Optional[StrictBool] = Field(default=None, alias="showAsAction") + multiple: Optional[StrictBool] = None + change_strategy: Optional[StrictStr] = Field(default=None, alias="changeStrategy") + __properties: ClassVar[List[str]] = ["position", "icon", "name", "url", "isDisabled", "openInNew", "isSeparate", "isSeparateBottom", "onlyDesktop", "onlyWeb", "mode", "scopes", "ajax", "group", "permission", "toolpermission", "isDirectory", "showAsAction", "multiple", "changeStrategy"] + + @field_validator('scopes') + def scopes_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + for i in value: + if i not in set(['Render', 'Search', 'CollectionsReferences', 'CollectionsCollection', 'WorkspaceList', 'WorkspaceTree', 'Oer', 'CreateMenu']): + raise ValueError("each list item must be one of ('Render', 'Search', 'CollectionsReferences', 'CollectionsCollection', 'WorkspaceList', 'WorkspaceTree', 'Oer', 'CreateMenu')") + return value + + @field_validator('change_strategy') + def change_strategy_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['update', 'remove']): + raise ValueError("must be one of enum values ('update', 'remove')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ContextMenuEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ContextMenuEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "position": obj.get("position"), + "icon": obj.get("icon"), + "name": obj.get("name"), + "url": obj.get("url"), + "isDisabled": obj.get("isDisabled"), + "openInNew": obj.get("openInNew"), + "isSeparate": obj.get("isSeparate"), + "isSeparateBottom": obj.get("isSeparateBottom"), + "onlyDesktop": obj.get("onlyDesktop"), + "onlyWeb": obj.get("onlyWeb"), + "mode": obj.get("mode"), + "scopes": obj.get("scopes"), + "ajax": obj.get("ajax"), + "group": obj.get("group"), + "permission": obj.get("permission"), + "toolpermission": obj.get("toolpermission"), + "isDirectory": obj.get("isDirectory"), + "showAsAction": obj.get("showAsAction"), + "multiple": obj.get("multiple"), + "changeStrategy": obj.get("changeStrategy") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/contributor.py b/edu_sharing_openapi/edu_sharing_client/models/contributor.py new file mode 100644 index 00000000..62ae31c3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/contributor.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Contributor(BaseModel): + """ + Contributor + """ # noqa: E501 + var_property: Optional[StrictStr] = Field(default=None, alias="property") + firstname: Optional[StrictStr] = None + lastname: Optional[StrictStr] = None + email: Optional[StrictStr] = None + vcard: Optional[StrictStr] = None + org: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["property", "firstname", "lastname", "email", "vcard", "org"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Contributor from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Contributor from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "property": obj.get("property"), + "firstname": obj.get("firstname"), + "lastname": obj.get("lastname"), + "email": obj.get("email"), + "vcard": obj.get("vcard"), + "org": obj.get("org") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/counts.py b/edu_sharing_openapi/edu_sharing_client/models/counts.py new file mode 100644 index 00000000..3b911a40 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/counts.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.element import Element +from typing import Optional, Set +from typing_extensions import Self + +class Counts(BaseModel): + """ + Counts + """ # noqa: E501 + elements: Optional[List[Element]] = None + __properties: ClassVar[List[str]] = ["elements"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Counts from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in elements (list) + _items = [] + if self.elements: + for _item_elements in self.elements: + if _item_elements: + _items.append(_item_elements.to_dict()) + _dict['elements'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Counts from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "elements": [Element.from_dict(_item) for _item in obj["elements"]] if obj.get("elements") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/create.py b/edu_sharing_openapi/edu_sharing_client/models/create.py new file mode 100644 index 00000000..db7e4f08 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/create.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Create(BaseModel): + """ + Create + """ # noqa: E501 + only_metadata: Optional[StrictBool] = Field(default=None, alias="onlyMetadata") + __properties: ClassVar[List[str]] = ["onlyMetadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Create from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Create from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "onlyMetadata": obj.get("onlyMetadata") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/create_usage.py b/edu_sharing_openapi/edu_sharing_client/models/create_usage.py new file mode 100644 index 00000000..341f1e9f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/create_usage.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class CreateUsage(BaseModel): + """ + CreateUsage + """ # noqa: E501 + app_id: Optional[StrictStr] = Field(default=None, alias="appId") + course_id: Optional[StrictStr] = Field(default=None, alias="courseId") + resource_id: Optional[StrictStr] = Field(default=None, alias="resourceId") + node_id: Optional[StrictStr] = Field(default=None, alias="nodeId") + node_version: Optional[StrictStr] = Field(default=None, alias="nodeVersion") + __properties: ClassVar[List[str]] = ["appId", "courseId", "resourceId", "nodeId", "nodeVersion"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateUsage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateUsage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "appId": obj.get("appId"), + "courseId": obj.get("courseId"), + "resourceId": obj.get("resourceId"), + "nodeId": obj.get("nodeId"), + "nodeVersion": obj.get("nodeVersion") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/delete_option.py b/edu_sharing_openapi/edu_sharing_client/models/delete_option.py new file mode 100644 index 00000000..94cadf62 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/delete_option.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DeleteOption(BaseModel): + """ + DeleteOption + """ # noqa: E501 + delete: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["delete"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DeleteOption from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DeleteOption from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "delete": obj.get("delete") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/dynamic_config.py b/edu_sharing_openapi/edu_sharing_client/models/dynamic_config.py new file mode 100644 index 00000000..c78d6d80 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/dynamic_config.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DynamicConfig(BaseModel): + """ + DynamicConfig + """ # noqa: E501 + node_id: Optional[StrictStr] = Field(default=None, alias="nodeId") + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["nodeId", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DynamicConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DynamicConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeId": obj.get("nodeId"), + "value": obj.get("value") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_token.py b/edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_token.py new file mode 100644 index 00000000..57742e22 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_token.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DynamicRegistrationToken(BaseModel): + """ + DynamicRegistrationToken + """ # noqa: E501 + token: Optional[StrictStr] = None + url: Optional[StrictStr] = None + registered_app_id: Optional[StrictStr] = Field(default=None, alias="registeredAppId") + ts_created: Optional[StrictInt] = Field(default=None, alias="tsCreated") + ts_expiry: Optional[StrictInt] = Field(default=None, alias="tsExpiry") + valid: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["token", "url", "registeredAppId", "tsCreated", "tsExpiry", "valid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DynamicRegistrationToken from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DynamicRegistrationToken from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "token": obj.get("token"), + "url": obj.get("url"), + "registeredAppId": obj.get("registeredAppId"), + "tsCreated": obj.get("tsCreated"), + "tsExpiry": obj.get("tsExpiry"), + "valid": obj.get("valid") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_tokens.py b/edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_tokens.py new file mode 100644 index 00000000..377a844f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/dynamic_registration_tokens.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.dynamic_registration_token import DynamicRegistrationToken +from typing import Optional, Set +from typing_extensions import Self + +class DynamicRegistrationTokens(BaseModel): + """ + DynamicRegistrationTokens + """ # noqa: E501 + registration_links: Optional[List[DynamicRegistrationToken]] = Field(default=None, alias="registrationLinks") + __properties: ClassVar[List[str]] = ["registrationLinks"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DynamicRegistrationTokens from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in registration_links (list) + _items = [] + if self.registration_links: + for _item_registration_links in self.registration_links: + if _item_registration_links: + _items.append(_item_registration_links.to_dict()) + _dict['registrationLinks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DynamicRegistrationTokens from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "registrationLinks": [DynamicRegistrationToken.from_dict(_item) for _item in obj["registrationLinks"]] if obj.get("registrationLinks") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/element.py b/edu_sharing_openapi/edu_sharing_client/models/element.py new file mode 100644 index 00000000..4d28a9ba --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/element.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Element(BaseModel): + """ + Element + """ # noqa: E501 + id: Optional[StrictStr] = None + name: Optional[StrictStr] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id", "name", "type"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Element from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Element from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "name": obj.get("name"), + "type": obj.get("type") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/error_response.py b/edu_sharing_openapi/edu_sharing_client/models/error_response.py new file mode 100644 index 00000000..a74dc5ad --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/error_response.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ErrorResponse(BaseModel): + """ + ErrorResponse + """ # noqa: E501 + stacktrace: Optional[StrictStr] = None + details: Optional[Dict[str, Dict[str, Any]]] = None + error: StrictStr + message: StrictStr + log_level: Optional[StrictStr] = Field(default=None, alias="logLevel") + stacktrace_array: List[StrictStr] = Field(alias="stacktraceArray") + __properties: ClassVar[List[str]] = ["stacktrace", "details", "error", "message", "logLevel", "stacktraceArray"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ErrorResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ErrorResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "stacktrace": obj.get("stacktrace"), + "details": obj.get("details"), + "error": obj.get("error"), + "message": obj.get("message"), + "logLevel": obj.get("logLevel"), + "stacktraceArray": obj.get("stacktraceArray") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/excel_result.py b/edu_sharing_openapi/edu_sharing_client/models/excel_result.py new file mode 100644 index 00000000..25e6a730 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/excel_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ExcelResult(BaseModel): + """ + ExcelResult + """ # noqa: E501 + rows: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ExcelResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ExcelResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rows": obj.get("rows") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/facet.py b/edu_sharing_openapi/edu_sharing_client/models/facet.py new file mode 100644 index 00000000..2a1ecc42 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/facet.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.value import Value +from typing import Optional, Set +from typing_extensions import Self + +class Facet(BaseModel): + """ + Facet + """ # noqa: E501 + var_property: StrictStr = Field(alias="property") + values: List[Value] + sum_other_doc_count: Optional[StrictInt] = Field(default=None, alias="sumOtherDocCount") + __properties: ClassVar[List[str]] = ["property", "values", "sumOtherDocCount"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Facet from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in values (list) + _items = [] + if self.values: + for _item_values in self.values: + if _item_values: + _items.append(_item_values.to_dict()) + _dict['values'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Facet from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "property": obj.get("property"), + "values": [Value.from_dict(_item) for _item in obj["values"]] if obj.get("values") is not None else None, + "sumOtherDocCount": obj.get("sumOtherDocCount") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/feature_info.py b/edu_sharing_openapi/edu_sharing_client/models/feature_info.py new file mode 100644 index 00000000..8f689600 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/feature_info.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FeatureInfo(BaseModel): + """ + FeatureInfo + """ # noqa: E501 + id: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id"] + + @field_validator('id') + def id_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['handleService', 'doiService']): + raise ValueError("must be one of enum values ('handleService', 'doiService')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FeatureInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FeatureInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/feedback_data.py b/edu_sharing_openapi/edu_sharing_client/models/feedback_data.py new file mode 100644 index 00000000..dc40a4d5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/feedback_data.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FeedbackData(BaseModel): + """ + FeedbackData + """ # noqa: E501 + authority: Optional[StrictStr] = None + data: Optional[Dict[str, List[StrictStr]]] = None + created_at: Optional[datetime] = Field(default=None, alias="createdAt") + modified_at: Optional[datetime] = Field(default=None, alias="modifiedAt") + __properties: ClassVar[List[str]] = ["authority", "data", "createdAt", "modifiedAt"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FeedbackData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FeedbackData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authority": obj.get("authority"), + "data": obj.get("data"), + "createdAt": obj.get("createdAt"), + "modifiedAt": obj.get("modifiedAt") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/feedback_result.py b/edu_sharing_openapi/edu_sharing_client/models/feedback_result.py new file mode 100644 index 00000000..39ed1edb --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/feedback_result.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FeedbackResult(BaseModel): + """ + FeedbackResult + """ # noqa: E501 + node_id: Optional[StrictStr] = Field(default=None, alias="nodeId") + was_updated: Optional[StrictBool] = Field(default=None, alias="wasUpdated") + __properties: ClassVar[List[str]] = ["nodeId", "wasUpdated"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FeedbackResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FeedbackResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeId": obj.get("nodeId"), + "wasUpdated": obj.get("wasUpdated") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/filter.py b/edu_sharing_openapi/edu_sharing_client/models/filter.py new file mode 100644 index 00000000..613caeb0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/filter.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.filter_entry import FilterEntry +from typing import Optional, Set +from typing_extensions import Self + +class Filter(BaseModel): + """ + Filter + """ # noqa: E501 + entries: List[FilterEntry] + __properties: ClassVar[List[str]] = ["entries"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Filter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in entries (list) + _items = [] + if self.entries: + for _item_entries in self.entries: + if _item_entries: + _items.append(_item_entries.to_dict()) + _dict['entries'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Filter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "entries": [FilterEntry.from_dict(_item) for _item in obj["entries"]] if obj.get("entries") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/filter_entry.py b/edu_sharing_openapi/edu_sharing_client/models/filter_entry.py new file mode 100644 index 00000000..853ffeaf --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/filter_entry.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class FilterEntry(BaseModel): + """ + FilterEntry + """ # noqa: E501 + var_property: StrictStr = Field(alias="property") + values: List[StrictStr] + __properties: ClassVar[List[str]] = ["property", "values"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FilterEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FilterEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "property": obj.get("property"), + "values": obj.get("values") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/font_icon.py b/edu_sharing_openapi/edu_sharing_client/models/font_icon.py new file mode 100644 index 00000000..1b9e1a5b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/font_icon.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class FontIcon(BaseModel): + """ + FontIcon + """ # noqa: E501 + original: Optional[StrictStr] = None + replace: Optional[StrictStr] = None + css_class: Optional[StrictStr] = Field(default=None, alias="cssClass") + __properties: ClassVar[List[str]] = ["original", "replace", "cssClass"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of FontIcon from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of FontIcon from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "original": obj.get("original"), + "replace": obj.get("replace"), + "cssClass": obj.get("cssClass") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/frontpage.py b/edu_sharing_openapi/edu_sharing_client/models/frontpage.py new file mode 100644 index 00000000..475f515d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/frontpage.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.query import Query +from typing import Optional, Set +from typing_extensions import Self + +class Frontpage(BaseModel): + """ + Frontpage + """ # noqa: E501 + total_count: Optional[StrictInt] = Field(default=None, alias="totalCount") + display_count: Optional[StrictInt] = Field(default=None, alias="displayCount") + mode: Optional[StrictStr] = None + timespan: Optional[StrictInt] = None + timespan_all: Optional[StrictBool] = Field(default=None, alias="timespanAll") + queries: Optional[List[Query]] = None + collection: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["totalCount", "displayCount", "mode", "timespan", "timespanAll", "queries", "collection"] + + @field_validator('mode') + def mode_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['collection', 'rating', 'views', 'downloads']): + raise ValueError("must be one of enum values ('collection', 'rating', 'views', 'downloads')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Frontpage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in queries (list) + _items = [] + if self.queries: + for _item_queries in self.queries: + if _item_queries: + _items.append(_item_queries.to_dict()) + _dict['queries'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Frontpage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "totalCount": obj.get("totalCount"), + "displayCount": obj.get("displayCount"), + "mode": obj.get("mode"), + "timespan": obj.get("timespan"), + "timespanAll": obj.get("timespanAll"), + "queries": [Query.from_dict(_item) for _item in obj["queries"]] if obj.get("queries") is not None else None, + "collection": obj.get("collection") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/general.py b/edu_sharing_openapi/edu_sharing_client/models/general.py new file mode 100644 index 00000000..a379033b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/general.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class General(BaseModel): + """ + General + """ # noqa: E501 + referenced_in_name: Optional[StrictStr] = Field(default=None, alias="referencedInName") + referenced_in_type: Optional[StrictStr] = Field(default=None, alias="referencedInType") + referenced_in_instance: Optional[StrictStr] = Field(default=None, alias="referencedInInstance") + __properties: ClassVar[List[str]] = ["referencedInName", "referencedInType", "referencedInInstance"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of General from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of General from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "referencedInName": obj.get("referencedInName"), + "referencedInType": obj.get("referencedInType"), + "referencedInInstance": obj.get("referencedInInstance") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/geo.py b/edu_sharing_openapi/edu_sharing_client/models/geo.py new file mode 100644 index 00000000..4988f8da --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/geo.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class Geo(BaseModel): + """ + Geo + """ # noqa: E501 + longitude: Optional[Union[StrictFloat, StrictInt]] = None + latitude: Optional[Union[StrictFloat, StrictInt]] = None + address_country: Optional[StrictStr] = Field(default=None, alias="addressCountry") + __properties: ClassVar[List[str]] = ["longitude", "latitude", "addressCountry"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Geo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Geo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "longitude": obj.get("longitude"), + "latitude": obj.get("latitude"), + "addressCountry": obj.get("addressCountry") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/group.py b/edu_sharing_openapi/edu_sharing_client/models/group.py new file mode 100644 index 00000000..d66aff02 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/group.py @@ -0,0 +1,141 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.organization import Organization +from typing import Optional, Set +from typing_extensions import Self + +class Group(BaseModel): + """ + Group + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + editable: Optional[StrictBool] = None + signup_method: Optional[StrictStr] = Field(default=None, alias="signupMethod") + ref: Optional[NodeRef] = None + aspects: Optional[List[StrictStr]] = None + organizations: Optional[List[Organization]] = None + authority_name: StrictStr = Field(alias="authorityName") + authority_type: Optional[StrictStr] = Field(default=None, alias="authorityType") + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + profile: Optional[GroupProfile] = None + __properties: ClassVar[List[str]] = ["properties", "editable", "signupMethod", "ref", "aspects", "organizations", "authorityName", "authorityType", "groupName", "profile"] + + @field_validator('signup_method') + def signup_method_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['simple', 'password', 'list']): + raise ValueError("must be one of enum values ('simple', 'password', 'list')") + return value + + @field_validator('authority_type') + def authority_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST']): + raise ValueError("must be one of enum values ('USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Group from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in organizations (list) + _items = [] + if self.organizations: + for _item_organizations in self.organizations: + if _item_organizations: + _items.append(_item_organizations.to_dict()) + _dict['organizations'] = _items + # override the default output from pydantic by calling `to_dict()` of profile + if self.profile: + _dict['profile'] = self.profile.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Group from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "editable": obj.get("editable"), + "signupMethod": obj.get("signupMethod"), + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "aspects": obj.get("aspects"), + "organizations": [Organization.from_dict(_item) for _item in obj["organizations"]] if obj.get("organizations") is not None else None, + "authorityName": obj.get("authorityName"), + "authorityType": obj.get("authorityType"), + "groupName": obj.get("groupName"), + "profile": GroupProfile.from_dict(obj["profile"]) if obj.get("profile") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/group_entries.py b/edu_sharing_openapi/edu_sharing_client/models/group_entries.py new file mode 100644 index 00000000..e39dbbc4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/group_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.group import Group +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class GroupEntries(BaseModel): + """ + GroupEntries + """ # noqa: E501 + groups: List[Group] + pagination: Pagination + __properties: ClassVar[List[str]] = ["groups", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GroupEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in groups (list) + _items = [] + if self.groups: + for _item_groups in self.groups: + if _item_groups: + _items.append(_item_groups.to_dict()) + _dict['groups'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GroupEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "groups": [Group.from_dict(_item) for _item in obj["groups"]] if obj.get("groups") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/group_entry.py b/edu_sharing_openapi/edu_sharing_client/models/group_entry.py new file mode 100644 index 00000000..cdeb702a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/group_entry.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.group import Group +from typing import Optional, Set +from typing_extensions import Self + +class GroupEntry(BaseModel): + """ + GroupEntry + """ # noqa: E501 + group: Group + __properties: ClassVar[List[str]] = ["group"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GroupEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of group + if self.group: + _dict['group'] = self.group.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GroupEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "group": Group.from_dict(obj["group"]) if obj.get("group") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/group_profile.py b/edu_sharing_openapi/edu_sharing_client/models/group_profile.py new file mode 100644 index 00000000..e1905b31 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/group_profile.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GroupProfile(BaseModel): + """ + GroupProfile + """ # noqa: E501 + group_email: Optional[StrictStr] = Field(default=None, alias="groupEmail") + display_name: Optional[StrictStr] = Field(default=None, alias="displayName") + group_type: Optional[StrictStr] = Field(default=None, alias="groupType") + scope_type: Optional[StrictStr] = Field(default=None, alias="scopeType") + __properties: ClassVar[List[str]] = ["groupEmail", "displayName", "groupType", "scopeType"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GroupProfile from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GroupProfile from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "groupEmail": obj.get("groupEmail"), + "displayName": obj.get("displayName"), + "groupType": obj.get("groupType"), + "scopeType": obj.get("scopeType") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/group_signup_details.py b/edu_sharing_openapi/edu_sharing_client/models/group_signup_details.py new file mode 100644 index 00000000..8abc3268 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/group_signup_details.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class GroupSignupDetails(BaseModel): + """ + GroupSignupDetails + """ # noqa: E501 + signup_method: Optional[StrictStr] = Field(default=None, alias="signupMethod") + signup_password: Optional[StrictStr] = Field(default=None, alias="signupPassword") + __properties: ClassVar[List[str]] = ["signupMethod", "signupPassword"] + + @field_validator('signup_method') + def signup_method_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['simple', 'password', 'list']): + raise ValueError("must be one of enum values ('simple', 'password', 'list')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GroupSignupDetails from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GroupSignupDetails from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "signupMethod": obj.get("signupMethod"), + "signupPassword": obj.get("signupPassword") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/guest.py b/edu_sharing_openapi/edu_sharing_client/models/guest.py new file mode 100644 index 00000000..a703bbb5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/guest.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Guest(BaseModel): + """ + Guest + """ # noqa: E501 + enabled: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["enabled"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Guest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Guest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enabled": obj.get("enabled") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/handle_param.py b/edu_sharing_openapi/edu_sharing_client/models/handle_param.py new file mode 100644 index 00000000..f085e258 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/handle_param.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class HandleParam(BaseModel): + """ + HandleParam + """ # noqa: E501 + handle_service: Optional[StrictStr] = Field(default=None, alias="handleService") + doi_service: Optional[StrictStr] = Field(default=None, alias="doiService") + __properties: ClassVar[List[str]] = ["handleService", "doiService"] + + @field_validator('handle_service') + def handle_service_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['distinct', 'update']): + raise ValueError("must be one of enum values ('distinct', 'update')") + return value + + @field_validator('doi_service') + def doi_service_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['distinct', 'update']): + raise ValueError("must be one of enum values ('distinct', 'update')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HandleParam from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HandleParam from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "handleService": obj.get("handleService"), + "doiService": obj.get("doiService") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/help_menu_options.py b/edu_sharing_openapi/edu_sharing_client/models/help_menu_options.py new file mode 100644 index 00000000..189b005c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/help_menu_options.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class HelpMenuOptions(BaseModel): + """ + HelpMenuOptions + """ # noqa: E501 + key: Optional[StrictStr] = None + icon: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["key", "icon", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HelpMenuOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HelpMenuOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "icon": obj.get("icon"), + "url": obj.get("url") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/home_folder_options.py b/edu_sharing_openapi/edu_sharing_client/models/home_folder_options.py new file mode 100644 index 00000000..152957fa --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/home_folder_options.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class HomeFolderOptions(BaseModel): + """ + HomeFolderOptions + """ # noqa: E501 + folders: Optional[StrictStr] = None + private_files: Optional[StrictStr] = Field(default=None, alias="privateFiles") + cc_files: Optional[StrictStr] = Field(default=None, alias="ccFiles") + keep_folder_structure: Optional[StrictBool] = Field(default=None, alias="keepFolderStructure") + __properties: ClassVar[List[str]] = ["folders", "privateFiles", "ccFiles", "keepFolderStructure"] + + @field_validator('folders') + def folders_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + @field_validator('private_files') + def private_files_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + @field_validator('cc_files') + def cc_files_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of HomeFolderOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of HomeFolderOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "folders": obj.get("folders"), + "privateFiles": obj.get("privateFiles"), + "ccFiles": obj.get("ccFiles"), + "keepFolderStructure": obj.get("keepFolderStructure") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/icon.py b/edu_sharing_openapi/edu_sharing_client/models/icon.py new file mode 100644 index 00000000..013610fc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/icon.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Icon(BaseModel): + """ + Icon + """ # noqa: E501 + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Icon from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Icon from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/image.py b/edu_sharing_openapi/edu_sharing_client/models/image.py new file mode 100644 index 00000000..2d21a143 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/image.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Image(BaseModel): + """ + Image + """ # noqa: E501 + src: Optional[StrictStr] = None + replace: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["src", "replace"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Image from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Image from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "src": obj.get("src"), + "replace": obj.get("replace") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/interface.py b/edu_sharing_openapi/edu_sharing_client/models/interface.py new file mode 100644 index 00000000..7077a44b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/interface.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Interface(BaseModel): + """ + Interface + """ # noqa: E501 + url: Optional[StrictStr] = None + set: Optional[StrictStr] = None + metadata_prefix: Optional[StrictStr] = Field(default=None, alias="metadataPrefix") + documentation: Optional[StrictStr] = None + format: Optional[StrictStr] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["url", "set", "metadataPrefix", "documentation", "format", "type"] + + @field_validator('format') + def format_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Json', 'XML', 'Text']): + raise ValueError("must be one of enum values ('Json', 'XML', 'Text')") + return value + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Search', 'Sitemap', 'Statistics', 'OAI', 'Generic_Api']): + raise ValueError("must be one of enum values ('Search', 'Sitemap', 'Statistics', 'OAI', 'Generic_Api')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Interface from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Interface from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url"), + "set": obj.get("set"), + "metadataPrefix": obj.get("metadataPrefix"), + "documentation": obj.get("documentation"), + "format": obj.get("format"), + "type": obj.get("type") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/invite_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/invite_event_dto.py new file mode 100644 index 00000000..686b4050 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/invite_event_dto.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class InviteEventDTO(NotificationEventDTO): + """ + InviteEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + name: Optional[StrictStr] = None + type: Optional[StrictStr] = None + user_comment: Optional[StrictStr] = Field(default=None, alias="userComment") + permissions: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "name", "type", "userComment", "permissions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of InviteEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of InviteEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "name": obj.get("name"), + "type": obj.get("type"), + "userComment": obj.get("userComment"), + "permissions": obj.get("permissions") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job.py b/edu_sharing_openapi/edu_sharing_client/models/job.py new file mode 100644 index 00000000..e5e043bc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Job(BaseModel): + """ + Job + """ # noqa: E501 + id: StrictStr + status: StrictStr + __properties: ClassVar[List[str]] = ["id", "status"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Job from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Job from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "status": obj.get("status") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_builder.py b/edu_sharing_openapi/edu_sharing_client/models/job_builder.py new file mode 100644 index 00000000..b424ac6f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_builder.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class JobBuilder(BaseModel): + """ + JobBuilder + """ # noqa: E501 + job_data: Optional[JobBuilder] = Field(default=None, alias="jobData") + __properties: ClassVar[List[str]] = ["jobData"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobBuilder from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of job_data + if self.job_data: + _dict['jobData'] = self.job_data.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobBuilder from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "jobData": JobBuilder.from_dict(obj["jobData"]) if obj.get("jobData") is not None else None + }) + return _obj + +# TODO: Rewrite to not use raise_errors +JobBuilder.model_rebuild(raise_errors=False) + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_data_map.py b/edu_sharing_openapi/edu_sharing_client/models/job_data_map.py new file mode 100644 index 00000000..6f669a4c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_data_map.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class JobDataMap(BaseModel): + """ + JobDataMap + """ # noqa: E501 + dirty: Optional[StrictBool] = None + allows_transient_data: Optional[StrictBool] = Field(default=None, alias="allowsTransientData") + keys: Optional[List[StrictStr]] = None + wrapped_map: Optional[Dict[str, Dict[str, Any]]] = Field(default=None, alias="wrappedMap") + empty: Optional[StrictBool] = None + additional_properties: Dict[str, Any] = {} + __properties: ClassVar[List[str]] = ["dirty", "allowsTransientData", "keys", "wrappedMap", "empty"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobDataMap from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + * Fields in `self.additional_properties` are added to the output dict. + """ + excluded_fields: Set[str] = set([ + "additional_properties", + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # puts key-value pairs in additional_properties in the top level + if self.additional_properties is not None: + for _key, _value in self.additional_properties.items(): + _dict[_key] = _value + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobDataMap from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "dirty": obj.get("dirty"), + "allowsTransientData": obj.get("allowsTransientData"), + "keys": obj.get("keys"), + "wrappedMap": obj.get("wrappedMap"), + "empty": obj.get("empty") + }) + # store additional fields in additional_properties + for _key in obj.keys(): + if _key not in cls.__properties: + _obj.additional_properties[_key] = obj.get(_key) + + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_description.py b/edu_sharing_openapi/edu_sharing_client/models/job_description.py new file mode 100644 index 00000000..ea39776e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_description.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.job_field_description import JobFieldDescription +from typing import Optional, Set +from typing_extensions import Self + +class JobDescription(BaseModel): + """ + JobDescription + """ # noqa: E501 + name: Optional[StrictStr] = None + description: Optional[StrictStr] = None + params: Optional[List[JobFieldDescription]] = None + tags: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["name", "description", "params", "tags"] + + @field_validator('tags') + def tags_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + for i in value: + if i not in set(['DeletePersonJob']): + raise ValueError("each list item must be one of ('DeletePersonJob')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobDescription from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in params (list) + _items = [] + if self.params: + for _item_params in self.params: + if _item_params: + _items.append(_item_params.to_dict()) + _dict['params'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobDescription from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "description": obj.get("description"), + "params": [JobFieldDescription.from_dict(_item) for _item in obj["params"]] if obj.get("params") is not None else None, + "tags": obj.get("tags") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_detail.py b/edu_sharing_openapi/edu_sharing_client/models/job_detail.py new file mode 100644 index 00000000..554c4061 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_detail.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.job_builder import JobBuilder +from edu_sharing_client.models.job_detail_job_data_map import JobDetailJobDataMap +from edu_sharing_client.models.job_key import JobKey +from typing import Optional, Set +from typing_extensions import Self + +class JobDetail(BaseModel): + """ + JobDetail + """ # noqa: E501 + key: Optional[JobKey] = None + job_data_map: Optional[JobDetailJobDataMap] = Field(default=None, alias="jobDataMap") + durable: Optional[StrictBool] = None + persist_job_data_after_execution: Optional[StrictBool] = Field(default=None, alias="persistJobDataAfterExecution") + concurrent_exection_disallowed: Optional[StrictBool] = Field(default=None, alias="concurrentExectionDisallowed") + job_builder: Optional[JobBuilder] = Field(default=None, alias="jobBuilder") + description: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["key", "jobDataMap", "durable", "persistJobDataAfterExecution", "concurrentExectionDisallowed", "jobBuilder", "description"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobDetail from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of key + if self.key: + _dict['key'] = self.key.to_dict() + # override the default output from pydantic by calling `to_dict()` of job_data_map + if self.job_data_map: + _dict['jobDataMap'] = self.job_data_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of job_builder + if self.job_builder: + _dict['jobBuilder'] = self.job_builder.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobDetail from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": JobKey.from_dict(obj["key"]) if obj.get("key") is not None else None, + "jobDataMap": JobDetailJobDataMap.from_dict(obj["jobDataMap"]) if obj.get("jobDataMap") is not None else None, + "durable": obj.get("durable"), + "persistJobDataAfterExecution": obj.get("persistJobDataAfterExecution"), + "concurrentExectionDisallowed": obj.get("concurrentExectionDisallowed"), + "jobBuilder": JobBuilder.from_dict(obj["jobBuilder"]) if obj.get("jobBuilder") is not None else None, + "description": obj.get("description") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_detail_job_data_map.py b/edu_sharing_openapi/edu_sharing_client/models/job_detail_job_data_map.py new file mode 100644 index 00000000..e374a0e1 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_detail_job_data_map.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class JobDetailJobDataMap(BaseModel): + """ + JobDetailJobDataMap + """ # noqa: E501 + dirty: Optional[StrictBool] = None + allows_transient_data: Optional[StrictBool] = Field(default=None, alias="allowsTransientData") + keys: Optional[List[StrictStr]] = None + wrapped_map: Optional[Dict[str, Dict[str, Any]]] = Field(default=None, alias="wrappedMap") + empty: Optional[StrictBool] = None + additional_properties: Dict[str, Any] = {} + __properties: ClassVar[List[str]] = ["dirty", "allowsTransientData", "keys", "wrappedMap", "empty"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobDetailJobDataMap from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + * Fields in `self.additional_properties` are added to the output dict. + """ + excluded_fields: Set[str] = set([ + "additional_properties", + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # puts key-value pairs in additional_properties in the top level + if self.additional_properties is not None: + for _key, _value in self.additional_properties.items(): + _dict[_key] = _value + + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobDetailJobDataMap from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "dirty": obj.get("dirty"), + "allowsTransientData": obj.get("allowsTransientData"), + "keys": obj.get("keys"), + "wrappedMap": obj.get("wrappedMap"), + "empty": obj.get("empty") + }) + # store additional fields in additional_properties + for _key in obj.keys(): + if _key not in cls.__properties: + _obj.additional_properties[_key] = obj.get(_key) + + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_entry.py b/edu_sharing_openapi/edu_sharing_client/models/job_entry.py new file mode 100644 index 00000000..edb9adf9 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_entry.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.job import Job +from typing import Optional, Set +from typing_extensions import Self + +class JobEntry(BaseModel): + """ + JobEntry + """ # noqa: E501 + data: Job + __properties: ClassVar[List[str]] = ["data"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of data + if self.data: + _dict['data'] = self.data.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "data": Job.from_dict(obj["data"]) if obj.get("data") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_field_description.py b/edu_sharing_openapi/edu_sharing_client/models/job_field_description.py new file mode 100644 index 00000000..0ec491c3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_field_description.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class JobFieldDescription(BaseModel): + """ + JobFieldDescription + """ # noqa: E501 + name: Optional[StrictStr] = None + description: Optional[StrictStr] = None + file: Optional[StrictBool] = None + sample_value: Optional[StrictStr] = Field(default=None, alias="sampleValue") + is_array: Optional[StrictBool] = Field(default=None, alias="isArray") + array: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["name", "description", "file", "sampleValue", "isArray", "array"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobFieldDescription from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobFieldDescription from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "description": obj.get("description"), + "file": obj.get("file"), + "sampleValue": obj.get("sampleValue"), + "isArray": obj.get("isArray"), + "array": obj.get("array") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_info.py b/edu_sharing_openapi/edu_sharing_client/models/job_info.py new file mode 100644 index 00000000..f731c220 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_info.py @@ -0,0 +1,133 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.job_detail import JobDetail +from edu_sharing_client.models.job_detail_job_data_map import JobDetailJobDataMap +from edu_sharing_client.models.level import Level +from edu_sharing_client.models.log_entry import LogEntry +from typing import Optional, Set +from typing_extensions import Self + +class JobInfo(BaseModel): + """ + JobInfo + """ # noqa: E501 + job_data_map: Optional[JobDetailJobDataMap] = Field(default=None, alias="jobDataMap") + job_name: Optional[StrictStr] = Field(default=None, alias="jobName") + job_group: Optional[StrictStr] = Field(default=None, alias="jobGroup") + start_time: Optional[StrictInt] = Field(default=None, alias="startTime") + finish_time: Optional[StrictInt] = Field(default=None, alias="finishTime") + status: Optional[StrictStr] = None + worst_level: Optional[Level] = Field(default=None, alias="worstLevel") + log: Optional[List[LogEntry]] = None + job_detail: Optional[JobDetail] = Field(default=None, alias="jobDetail") + __properties: ClassVar[List[str]] = ["jobDataMap", "jobName", "jobGroup", "startTime", "finishTime", "status", "worstLevel", "log", "jobDetail"] + + @field_validator('status') + def status_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Running', 'Failed', 'Aborted', 'Finished']): + raise ValueError("must be one of enum values ('Running', 'Failed', 'Aborted', 'Finished')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of job_data_map + if self.job_data_map: + _dict['jobDataMap'] = self.job_data_map.to_dict() + # override the default output from pydantic by calling `to_dict()` of worst_level + if self.worst_level: + _dict['worstLevel'] = self.worst_level.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in log (list) + _items = [] + if self.log: + for _item_log in self.log: + if _item_log: + _items.append(_item_log.to_dict()) + _dict['log'] = _items + # override the default output from pydantic by calling `to_dict()` of job_detail + if self.job_detail: + _dict['jobDetail'] = self.job_detail.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "jobDataMap": JobDetailJobDataMap.from_dict(obj["jobDataMap"]) if obj.get("jobDataMap") is not None else None, + "jobName": obj.get("jobName"), + "jobGroup": obj.get("jobGroup"), + "startTime": obj.get("startTime"), + "finishTime": obj.get("finishTime"), + "status": obj.get("status"), + "worstLevel": Level.from_dict(obj["worstLevel"]) if obj.get("worstLevel") is not None else None, + "log": [LogEntry.from_dict(_item) for _item in obj["log"]] if obj.get("log") is not None else None, + "jobDetail": JobDetail.from_dict(obj["jobDetail"]) if obj.get("jobDetail") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/job_key.py b/edu_sharing_openapi/edu_sharing_client/models/job_key.py new file mode 100644 index 00000000..065cd06f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/job_key.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class JobKey(BaseModel): + """ + JobKey + """ # noqa: E501 + name: Optional[StrictStr] = None + group: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["name", "group"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JobKey from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JobKey from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "group": obj.get("group") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/json_object.py b/edu_sharing_openapi/edu_sharing_client/models/json_object.py new file mode 100644 index 00000000..6066cc51 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/json_object.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class JSONObject(BaseModel): + """ + JSONObject + """ # noqa: E501 + empty: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["empty"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of JSONObject from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of JSONObject from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "empty": obj.get("empty") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/key_value_pair.py b/edu_sharing_openapi/edu_sharing_client/models/key_value_pair.py new file mode 100644 index 00000000..78823686 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/key_value_pair.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class KeyValuePair(BaseModel): + """ + KeyValuePair + """ # noqa: E501 + key: Optional[StrictStr] = None + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["key", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of KeyValuePair from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of KeyValuePair from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "value": obj.get("value") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/language.py b/edu_sharing_openapi/edu_sharing_client/models/language.py new file mode 100644 index 00000000..9f1619c0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/language.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Language(BaseModel): + """ + Language + """ # noqa: E501 + var_global: Optional[Dict[str, StrictStr]] = Field(default=None, alias="global") + current: Optional[Dict[str, StrictStr]] = None + current_language: Optional[StrictStr] = Field(default=None, alias="currentLanguage") + __properties: ClassVar[List[str]] = ["global", "current", "currentLanguage"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Language from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Language from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "global": obj.get("global"), + "current": obj.get("current"), + "currentLanguage": obj.get("currentLanguage") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/level.py b/edu_sharing_openapi/edu_sharing_client/models/level.py new file mode 100644 index 00000000..9ad65c25 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/level.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Level(BaseModel): + """ + Level + """ # noqa: E501 + syslog_equivalent: Optional[StrictInt] = Field(default=None, alias="syslogEquivalent") + version2_level: Optional[Level] = Field(default=None, alias="version2Level") + __properties: ClassVar[List[str]] = ["syslogEquivalent", "version2Level"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Level from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of version2_level + if self.version2_level: + _dict['version2Level'] = self.version2_level.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Level from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "syslogEquivalent": obj.get("syslogEquivalent"), + "version2Level": Level.from_dict(obj["version2Level"]) if obj.get("version2Level") is not None else None + }) + return _obj + +# TODO: Rewrite to not use raise_errors +Level.model_rebuild(raise_errors=False) + diff --git a/edu_sharing_openapi/edu_sharing_client/models/license.py b/edu_sharing_openapi/edu_sharing_client/models/license.py new file mode 100644 index 00000000..6f471859 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/license.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class License(BaseModel): + """ + License + """ # noqa: E501 + icon: Optional[StrictStr] = None + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["icon", "url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of License from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of License from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "icon": obj.get("icon"), + "url": obj.get("url") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/license_agreement.py b/edu_sharing_openapi/edu_sharing_client/models/license_agreement.py new file mode 100644 index 00000000..3a0b8839 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/license_agreement.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode +from typing import Optional, Set +from typing_extensions import Self + +class LicenseAgreement(BaseModel): + """ + LicenseAgreement + """ # noqa: E501 + node_id: Optional[List[LicenseAgreementNode]] = Field(default=None, alias="nodeId") + __properties: ClassVar[List[str]] = ["nodeId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LicenseAgreement from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in node_id (list) + _items = [] + if self.node_id: + for _item_node_id in self.node_id: + if _item_node_id: + _items.append(_item_node_id.to_dict()) + _dict['nodeId'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LicenseAgreement from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeId": [LicenseAgreementNode.from_dict(_item) for _item in obj["nodeId"]] if obj.get("nodeId") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/license_agreement_node.py b/edu_sharing_openapi/edu_sharing_client/models/license_agreement_node.py new file mode 100644 index 00000000..a329816c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/license_agreement_node.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class LicenseAgreementNode(BaseModel): + """ + LicenseAgreementNode + """ # noqa: E501 + language: Optional[StrictStr] = None + value: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["language", "value"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LicenseAgreementNode from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LicenseAgreementNode from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "language": obj.get("language"), + "value": obj.get("value") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/licenses.py b/edu_sharing_openapi/edu_sharing_client/models/licenses.py new file mode 100644 index 00000000..0a6f70c3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/licenses.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Licenses(BaseModel): + """ + Licenses + """ # noqa: E501 + repository: Optional[Dict[str, StrictStr]] = None + services: Optional[Dict[str, Dict[str, StrictStr]]] = None + __properties: ClassVar[List[str]] = ["repository", "services"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Licenses from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Licenses from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repository": obj.get("repository"), + "services": obj.get("services") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/location.py b/edu_sharing_openapi/edu_sharing_client/models/location.py new file mode 100644 index 00000000..393b32b3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/location.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.geo import Geo +from typing import Optional, Set +from typing_extensions import Self + +class Location(BaseModel): + """ + Location + """ # noqa: E501 + geo: Optional[Geo] = None + __properties: ClassVar[List[str]] = ["geo"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Location from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of geo + if self.geo: + _dict['geo'] = self.geo.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Location from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "geo": Geo.from_dict(obj["geo"]) if obj.get("geo") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/log_entry.py b/edu_sharing_openapi/edu_sharing_client/models/log_entry.py new file mode 100644 index 00000000..2c52d38d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/log_entry.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.level import Level +from typing import Optional, Set +from typing_extensions import Self + +class LogEntry(BaseModel): + """ + LogEntry + """ # noqa: E501 + class_name: Optional[StrictStr] = Field(default=None, alias="className") + level: Optional[Level] = None + var_date: Optional[StrictInt] = Field(default=None, alias="date") + message: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["className", "level", "date", "message"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LogEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of level + if self.level: + _dict['level'] = self.level.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LogEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "className": obj.get("className"), + "level": Level.from_dict(obj["level"]) if obj.get("level") is not None else None, + "date": obj.get("date"), + "message": obj.get("message") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/logger_config_result.py b/edu_sharing_openapi/edu_sharing_client/models/logger_config_result.py new file mode 100644 index 00000000..0ac5dbb5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/logger_config_result.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class LoggerConfigResult(BaseModel): + """ + LoggerConfigResult + """ # noqa: E501 + name: Optional[StrictStr] = None + level: Optional[StrictStr] = None + appender: Optional[List[StrictStr]] = None + config: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["name", "level", "appender", "config"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LoggerConfigResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LoggerConfigResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "level": obj.get("level"), + "appender": obj.get("appender"), + "config": obj.get("config") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/login.py b/edu_sharing_openapi/edu_sharing_client/models/login.py new file mode 100644 index 00000000..27d2b438 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/login.py @@ -0,0 +1,124 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.lti_session import LTISession +from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription +from typing import Optional, Set +from typing_extensions import Self + +class Login(BaseModel): + """ + Login + """ # noqa: E501 + remote_authentications: Optional[Dict[str, RemoteAuthDescription]] = Field(default=None, alias="remoteAuthentications") + is_valid_login: StrictBool = Field(alias="isValidLogin") + is_admin: StrictBool = Field(alias="isAdmin") + lti_session: Optional[LTISession] = Field(default=None, alias="ltiSession") + current_scope: StrictStr = Field(alias="currentScope") + user_home: Optional[StrictStr] = Field(default=None, alias="userHome") + session_timeout: StrictInt = Field(alias="sessionTimeout") + tool_permissions: Optional[List[StrictStr]] = Field(default=None, alias="toolPermissions") + status_code: Optional[StrictStr] = Field(default=None, alias="statusCode") + authority_name: Optional[StrictStr] = Field(default=None, alias="authorityName") + is_guest: StrictBool = Field(alias="isGuest") + __properties: ClassVar[List[str]] = ["remoteAuthentications", "isValidLogin", "isAdmin", "ltiSession", "currentScope", "userHome", "sessionTimeout", "toolPermissions", "statusCode", "authorityName", "isGuest"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Login from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in remote_authentications (dict) + _field_dict = {} + if self.remote_authentications: + for _key_remote_authentications in self.remote_authentications: + if self.remote_authentications[_key_remote_authentications]: + _field_dict[_key_remote_authentications] = self.remote_authentications[_key_remote_authentications].to_dict() + _dict['remoteAuthentications'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of lti_session + if self.lti_session: + _dict['ltiSession'] = self.lti_session.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Login from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "remoteAuthentications": dict( + (_k, RemoteAuthDescription.from_dict(_v)) + for _k, _v in obj["remoteAuthentications"].items() + ) + if obj.get("remoteAuthentications") is not None + else None, + "isValidLogin": obj.get("isValidLogin"), + "isAdmin": obj.get("isAdmin"), + "ltiSession": LTISession.from_dict(obj["ltiSession"]) if obj.get("ltiSession") is not None else None, + "currentScope": obj.get("currentScope"), + "userHome": obj.get("userHome"), + "sessionTimeout": obj.get("sessionTimeout"), + "toolPermissions": obj.get("toolPermissions"), + "statusCode": obj.get("statusCode"), + "authorityName": obj.get("authorityName"), + "isGuest": obj.get("isGuest") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/login_credentials.py b/edu_sharing_openapi/edu_sharing_client/models/login_credentials.py new file mode 100644 index 00000000..1478b3ba --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/login_credentials.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class LoginCredentials(BaseModel): + """ + LoginCredentials + """ # noqa: E501 + user_name: StrictStr = Field(alias="userName") + password: StrictStr + scope: StrictStr + __properties: ClassVar[List[str]] = ["userName", "password", "scope"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LoginCredentials from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LoginCredentials from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "userName": obj.get("userName"), + "password": obj.get("password"), + "scope": obj.get("scope") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/logout_info.py b/edu_sharing_openapi/edu_sharing_client/models/logout_info.py new file mode 100644 index 00000000..7cc2dc85 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/logout_info.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class LogoutInfo(BaseModel): + """ + LogoutInfo + """ # noqa: E501 + url: Optional[StrictStr] = None + destroy_session: Optional[StrictBool] = Field(default=None, alias="destroySession") + ajax: Optional[StrictBool] = None + next: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["url", "destroySession", "ajax", "next"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LogoutInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LogoutInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url"), + "destroySession": obj.get("destroySession"), + "ajax": obj.get("ajax"), + "next": obj.get("next") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/lti_platform_configuration.py b/edu_sharing_openapi/edu_sharing_client/models/lti_platform_configuration.py new file mode 100644 index 00000000..dbff8bca --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/lti_platform_configuration.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.message import Message +from typing import Optional, Set +from typing_extensions import Self + +class LTIPlatformConfiguration(BaseModel): + """ + LTIPlatformConfiguration + """ # noqa: E501 + product_family_code: Optional[StrictStr] = None + version: Optional[StrictStr] = None + messages_supported: Optional[List[Message]] = None + variables: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["product_family_code", "version", "messages_supported", "variables"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LTIPlatformConfiguration from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in messages_supported (list) + _items = [] + if self.messages_supported: + for _item_messages_supported in self.messages_supported: + if _item_messages_supported: + _items.append(_item_messages_supported.to_dict()) + _dict['messages_supported'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LTIPlatformConfiguration from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "product_family_code": obj.get("product_family_code"), + "version": obj.get("version"), + "messages_supported": [Message.from_dict(_item) for _item in obj["messages_supported"]] if obj.get("messages_supported") is not None else None, + "variables": obj.get("variables") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/lti_session.py b/edu_sharing_openapi/edu_sharing_client/models/lti_session.py new file mode 100644 index 00000000..d27519a4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/lti_session.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class LTISession(BaseModel): + """ + LTISession + """ # noqa: E501 + accept_multiple: Optional[StrictBool] = Field(default=None, alias="acceptMultiple") + deeplink_return_url: Optional[StrictStr] = Field(default=None, alias="deeplinkReturnUrl") + accept_types: Optional[List[StrictStr]] = Field(default=None, alias="acceptTypes") + accept_presentation_document_targets: Optional[List[StrictStr]] = Field(default=None, alias="acceptPresentationDocumentTargets") + can_confirm: Optional[StrictBool] = Field(default=None, alias="canConfirm") + title: Optional[StrictStr] = None + text: Optional[StrictStr] = None + custom_content_node: Optional[Node] = Field(default=None, alias="customContentNode") + __properties: ClassVar[List[str]] = ["acceptMultiple", "deeplinkReturnUrl", "acceptTypes", "acceptPresentationDocumentTargets", "canConfirm", "title", "text", "customContentNode"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LTISession from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of custom_content_node + if self.custom_content_node: + _dict['customContentNode'] = self.custom_content_node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LTISession from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "acceptMultiple": obj.get("acceptMultiple"), + "deeplinkReturnUrl": obj.get("deeplinkReturnUrl"), + "acceptTypes": obj.get("acceptTypes"), + "acceptPresentationDocumentTargets": obj.get("acceptPresentationDocumentTargets"), + "canConfirm": obj.get("canConfirm"), + "title": obj.get("title"), + "text": obj.get("text"), + "customContentNode": Node.from_dict(obj["customContentNode"]) if obj.get("customContentNode") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/lti_tool_configuration.py b/edu_sharing_openapi/edu_sharing_client/models/lti_tool_configuration.py new file mode 100644 index 00000000..c750907a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/lti_tool_configuration.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class LTIToolConfiguration(BaseModel): + """ + LTIToolConfiguration + """ # noqa: E501 + version: Optional[StrictStr] = None + deployment_id: Optional[StrictStr] = None + target_link_uri: Optional[StrictStr] = None + domain: Optional[StrictStr] = None + description: Optional[StrictStr] = None + claims: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["version", "deployment_id", "target_link_uri", "domain", "description", "claims"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of LTIToolConfiguration from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of LTIToolConfiguration from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "version": obj.get("version"), + "deployment_id": obj.get("deployment_id"), + "target_link_uri": obj.get("target_link_uri"), + "domain": obj.get("domain"), + "description": obj.get("description"), + "claims": obj.get("claims") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mainnav.py b/edu_sharing_openapi/edu_sharing_client/models/mainnav.py new file mode 100644 index 00000000..b664550e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mainnav.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.icon import Icon +from typing import Optional, Set +from typing_extensions import Self + +class Mainnav(BaseModel): + """ + Mainnav + """ # noqa: E501 + icon: Optional[Icon] = None + main_menu_style: Optional[StrictStr] = Field(default=None, alias="mainMenuStyle") + __properties: ClassVar[List[str]] = ["icon", "mainMenuStyle"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Mainnav from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of icon + if self.icon: + _dict['icon'] = self.icon.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Mainnav from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "icon": Icon.from_dict(obj["icon"]) if obj.get("icon") is not None else None, + "mainMenuStyle": obj.get("mainMenuStyle") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/manual_registration_data.py b/edu_sharing_openapi/edu_sharing_client/models/manual_registration_data.py new file mode 100644 index 00000000..265010d6 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/manual_registration_data.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ManualRegistrationData(BaseModel): + """ + ManualRegistrationData + """ # noqa: E501 + tool_name: Optional[StrictStr] = Field(default=None, alias="toolName") + tool_url: Optional[StrictStr] = Field(default=None, alias="toolUrl") + tool_description: Optional[StrictStr] = Field(default=None, alias="toolDescription") + keyset_url: Optional[StrictStr] = Field(default=None, alias="keysetUrl") + login_initiation_url: Optional[StrictStr] = Field(default=None, alias="loginInitiationUrl") + redirection_urls: Optional[List[StrictStr]] = Field(default=None, alias="redirectionUrls") + custom_parameters: Optional[List[StrictStr]] = Field(default=None, description="JSON Object where each value is a string. Custom parameters to be included in each launch to this tool. If a custom parameter is also defined at the message level, the message level value takes precedence. The value of the custom parameters may be substitution parameters as described in the LTI Core [LTI-13] specification. ", alias="customParameters") + logo_url: Optional[StrictStr] = Field(default=None, alias="logoUrl") + target_link_uri: StrictStr = Field(description="The default target link uri to use unless defined otherwise in the message or link definition", alias="targetLinkUri") + target_link_uri_deep_link: Optional[StrictStr] = Field(default=None, description="The target link uri to use for DeepLing Message", alias="targetLinkUriDeepLink") + client_name: StrictStr = Field(description="Name of the Tool to be presented to the End-User. Localized representations may be included as described in Section 2.1 of the [OIDC-Reg] specification. ", alias="clientName") + __properties: ClassVar[List[str]] = ["toolName", "toolUrl", "toolDescription", "keysetUrl", "loginInitiationUrl", "redirectionUrls", "customParameters", "logoUrl", "targetLinkUri", "targetLinkUriDeepLink", "clientName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ManualRegistrationData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ManualRegistrationData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "toolName": obj.get("toolName"), + "toolUrl": obj.get("toolUrl"), + "toolDescription": obj.get("toolDescription"), + "keysetUrl": obj.get("keysetUrl"), + "loginInitiationUrl": obj.get("loginInitiationUrl"), + "redirectionUrls": obj.get("redirectionUrls"), + "customParameters": obj.get("customParameters"), + "logoUrl": obj.get("logoUrl"), + "targetLinkUri": obj.get("targetLinkUri"), + "targetLinkUriDeepLink": obj.get("targetLinkUriDeepLink"), + "clientName": obj.get("clientName") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mc_org_connect_result.py b/edu_sharing_openapi/edu_sharing_client/models/mc_org_connect_result.py new file mode 100644 index 00000000..d0df47db --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mc_org_connect_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class McOrgConnectResult(BaseModel): + """ + McOrgConnectResult + """ # noqa: E501 + rows: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of McOrgConnectResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of McOrgConnectResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rows": obj.get("rows") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds.py b/edu_sharing_openapi/edu_sharing_client/models/mds.py new file mode 100644 index 00000000..854e930d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds.py @@ -0,0 +1,143 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.create import Create +from edu_sharing_client.models.mds_group import MdsGroup +from edu_sharing_client.models.mds_list import MdsList +from edu_sharing_client.models.mds_sort import MdsSort +from edu_sharing_client.models.mds_view import MdsView +from edu_sharing_client.models.mds_widget import MdsWidget +from typing import Optional, Set +from typing_extensions import Self + +class Mds(BaseModel): + """ + Mds + """ # noqa: E501 + name: StrictStr + create: Optional[Create] = None + widgets: List[MdsWidget] + views: List[MdsView] + groups: List[MdsGroup] + lists: List[MdsList] + sorts: List[MdsSort] + __properties: ClassVar[List[str]] = ["name", "create", "widgets", "views", "groups", "lists", "sorts"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Mds from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of create + if self.create: + _dict['create'] = self.create.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in widgets (list) + _items = [] + if self.widgets: + for _item_widgets in self.widgets: + if _item_widgets: + _items.append(_item_widgets.to_dict()) + _dict['widgets'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in views (list) + _items = [] + if self.views: + for _item_views in self.views: + if _item_views: + _items.append(_item_views.to_dict()) + _dict['views'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in groups (list) + _items = [] + if self.groups: + for _item_groups in self.groups: + if _item_groups: + _items.append(_item_groups.to_dict()) + _dict['groups'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in lists (list) + _items = [] + if self.lists: + for _item_lists in self.lists: + if _item_lists: + _items.append(_item_lists.to_dict()) + _dict['lists'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in sorts (list) + _items = [] + if self.sorts: + for _item_sorts in self.sorts: + if _item_sorts: + _items.append(_item_sorts.to_dict()) + _dict['sorts'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Mds from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "create": Create.from_dict(obj["create"]) if obj.get("create") is not None else None, + "widgets": [MdsWidget.from_dict(_item) for _item in obj["widgets"]] if obj.get("widgets") is not None else None, + "views": [MdsView.from_dict(_item) for _item in obj["views"]] if obj.get("views") is not None else None, + "groups": [MdsGroup.from_dict(_item) for _item in obj["groups"]] if obj.get("groups") is not None else None, + "lists": [MdsList.from_dict(_item) for _item in obj["lists"]] if obj.get("lists") is not None else None, + "sorts": [MdsSort.from_dict(_item) for _item in obj["sorts"]] if obj.get("sorts") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_column.py b/edu_sharing_openapi/edu_sharing_client/models/mds_column.py new file mode 100644 index 00000000..563a650c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_column.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsColumn(BaseModel): + """ + MdsColumn + """ # noqa: E501 + id: Optional[StrictStr] = None + format: Optional[StrictStr] = None + show_default: Optional[StrictBool] = Field(default=None, alias="showDefault") + __properties: ClassVar[List[str]] = ["id", "format", "showDefault"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsColumn from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsColumn from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "format": obj.get("format"), + "showDefault": obj.get("showDefault") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_entries.py b/edu_sharing_openapi/edu_sharing_client/models/mds_entries.py new file mode 100644 index 00000000..19dad3f9 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_entries.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.metadata_set_info import MetadataSetInfo +from typing import Optional, Set +from typing_extensions import Self + +class MdsEntries(BaseModel): + """ + MdsEntries + """ # noqa: E501 + metadatasets: List[MetadataSetInfo] + __properties: ClassVar[List[str]] = ["metadatasets"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in metadatasets (list) + _items = [] + if self.metadatasets: + for _item_metadatasets in self.metadatasets: + if _item_metadatasets: + _items.append(_item_metadatasets.to_dict()) + _dict['metadatasets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "metadatasets": [MetadataSetInfo.from_dict(_item) for _item in obj["metadatasets"]] if obj.get("metadatasets") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_group.py b/edu_sharing_openapi/edu_sharing_client/models/mds_group.py new file mode 100644 index 00000000..f489346f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_group.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsGroup(BaseModel): + """ + MdsGroup + """ # noqa: E501 + rendering: Optional[StrictStr] = None + id: Optional[StrictStr] = None + views: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["rendering", "id", "views"] + + @field_validator('rendering') + def rendering_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['legacy', 'angular']): + raise ValueError("must be one of enum values ('legacy', 'angular')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsGroup from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsGroup from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rendering": obj.get("rendering"), + "id": obj.get("id"), + "views": obj.get("views") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_list.py b/edu_sharing_openapi/edu_sharing_client/models/mds_list.py new file mode 100644 index 00000000..9b2251cc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_list.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mds_column import MdsColumn +from typing import Optional, Set +from typing_extensions import Self + +class MdsList(BaseModel): + """ + MdsList + """ # noqa: E501 + id: Optional[StrictStr] = None + columns: Optional[List[MdsColumn]] = None + __properties: ClassVar[List[str]] = ["id", "columns"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in columns (list) + _items = [] + if self.columns: + for _item_columns in self.columns: + if _item_columns: + _items.append(_item_columns.to_dict()) + _dict['columns'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "columns": [MdsColumn.from_dict(_item) for _item in obj["columns"]] if obj.get("columns") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_query_criteria.py b/edu_sharing_openapi/edu_sharing_client/models/mds_query_criteria.py new file mode 100644 index 00000000..d12777b2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_query_criteria.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class MdsQueryCriteria(BaseModel): + """ + MdsQueryCriteria + """ # noqa: E501 + var_property: StrictStr = Field(alias="property") + values: List[StrictStr] + __properties: ClassVar[List[str]] = ["property", "values"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsQueryCriteria from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsQueryCriteria from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "property": obj.get("property"), + "values": obj.get("values") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_sort.py b/edu_sharing_openapi/edu_sharing_client/models/mds_sort.py new file mode 100644 index 00000000..73de7ed4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_sort.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mds_sort_column import MdsSortColumn +from edu_sharing_client.models.mds_sort_default import MdsSortDefault +from typing import Optional, Set +from typing_extensions import Self + +class MdsSort(BaseModel): + """ + MdsSort + """ # noqa: E501 + id: StrictStr + columns: Optional[List[MdsSortColumn]] = None + default: Optional[MdsSortDefault] = None + __properties: ClassVar[List[str]] = ["id", "columns", "default"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsSort from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in columns (list) + _items = [] + if self.columns: + for _item_columns in self.columns: + if _item_columns: + _items.append(_item_columns.to_dict()) + _dict['columns'] = _items + # override the default output from pydantic by calling `to_dict()` of default + if self.default: + _dict['default'] = self.default.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsSort from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "columns": [MdsSortColumn.from_dict(_item) for _item in obj["columns"]] if obj.get("columns") is not None else None, + "default": MdsSortDefault.from_dict(obj["default"]) if obj.get("default") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_sort_column.py b/edu_sharing_openapi/edu_sharing_client/models/mds_sort_column.py new file mode 100644 index 00000000..40c2202c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_sort_column.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsSortColumn(BaseModel): + """ + MdsSortColumn + """ # noqa: E501 + id: StrictStr + mode: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id", "mode"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsSortColumn from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsSortColumn from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "mode": obj.get("mode") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_sort_default.py b/edu_sharing_openapi/edu_sharing_client/models/mds_sort_default.py new file mode 100644 index 00000000..d4690465 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_sort_default.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class MdsSortDefault(BaseModel): + """ + MdsSortDefault + """ # noqa: E501 + sort_by: StrictStr = Field(alias="sortBy") + sort_ascending: StrictBool = Field(alias="sortAscending") + __properties: ClassVar[List[str]] = ["sortBy", "sortAscending"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsSortDefault from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsSortDefault from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "sortBy": obj.get("sortBy"), + "sortAscending": obj.get("sortAscending") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_subwidget.py b/edu_sharing_openapi/edu_sharing_client/models/mds_subwidget.py new file mode 100644 index 00000000..328c20df --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_subwidget.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsSubwidget(BaseModel): + """ + MdsSubwidget + """ # noqa: E501 + id: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsSubwidget from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsSubwidget from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_value.py b/edu_sharing_openapi/edu_sharing_client/models/mds_value.py new file mode 100644 index 00000000..f700a765 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_value.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsValue(BaseModel): + """ + MdsValue + """ # noqa: E501 + id: StrictStr + caption: Optional[StrictStr] = None + description: Optional[StrictStr] = None + parent: Optional[StrictStr] = None + url: Optional[StrictStr] = None + alternative_ids: Optional[List[StrictStr]] = Field(default=None, alias="alternativeIds") + __properties: ClassVar[List[str]] = ["id", "caption", "description", "parent", "url", "alternativeIds"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsValue from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsValue from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "caption": obj.get("caption"), + "description": obj.get("description"), + "parent": obj.get("parent"), + "url": obj.get("url"), + "alternativeIds": obj.get("alternativeIds") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_view.py b/edu_sharing_openapi/edu_sharing_client/models/mds_view.py new file mode 100644 index 00000000..02c5845f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_view.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsView(BaseModel): + """ + MdsView + """ # noqa: E501 + id: Optional[StrictStr] = None + caption: Optional[StrictStr] = None + icon: Optional[StrictStr] = None + html: Optional[StrictStr] = None + rel: Optional[StrictStr] = None + hide_if_empty: Optional[StrictBool] = Field(default=None, alias="hideIfEmpty") + is_extended: Optional[StrictBool] = Field(default=None, alias="isExtended") + __properties: ClassVar[List[str]] = ["id", "caption", "icon", "html", "rel", "hideIfEmpty", "isExtended"] + + @field_validator('rel') + def rel_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['suggestions']): + raise ValueError("must be one of enum values ('suggestions')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsView from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsView from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "caption": obj.get("caption"), + "icon": obj.get("icon"), + "html": obj.get("html"), + "rel": obj.get("rel"), + "hideIfEmpty": obj.get("hideIfEmpty"), + "isExtended": obj.get("isExtended") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_widget.py b/edu_sharing_openapi/edu_sharing_client/models/mds_widget.py new file mode 100644 index 00000000..d8a1a79e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_widget.py @@ -0,0 +1,223 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mds_subwidget import MdsSubwidget +from edu_sharing_client.models.mds_value import MdsValue +from edu_sharing_client.models.mds_widget_condition import MdsWidgetCondition +from typing import Optional, Set +from typing_extensions import Self + +class MdsWidget(BaseModel): + """ + MdsWidget + """ # noqa: E501 + ids: Optional[Dict[str, StrictStr]] = None + link: Optional[StrictStr] = None + configuration: Optional[StrictStr] = None + format: Optional[StrictStr] = None + allow_valuespace_suggestions: Optional[StrictBool] = Field(default=None, alias="allowValuespaceSuggestions") + count_defaultvalue_as_filter: Optional[StrictBool] = Field(default=None, description="When true, a set defaultvalue will still trigger the search to show an active filter. When false (default), the defaultvalue will be shown as if no filter is active", alias="countDefaultvalueAsFilter") + condition: Optional[MdsWidgetCondition] = None + maxlength: Optional[StrictInt] = None + interaction_type: Optional[StrictStr] = Field(default=None, alias="interactionType") + filter_mode: Optional[StrictStr] = Field(default=None, alias="filterMode") + expandable: Optional[StrictStr] = None + subwidgets: Optional[List[MdsSubwidget]] = None + required: Optional[StrictStr] = None + id: Optional[StrictStr] = None + caption: Optional[StrictStr] = None + bottom_caption: Optional[StrictStr] = Field(default=None, alias="bottomCaption") + icon: Optional[StrictStr] = None + type: Optional[StrictStr] = None + template: Optional[StrictStr] = None + has_values: Optional[StrictBool] = Field(default=None, alias="hasValues") + values: Optional[List[MdsValue]] = None + placeholder: Optional[StrictStr] = None + unit: Optional[StrictStr] = None + min: Optional[StrictInt] = None + max: Optional[StrictInt] = None + default_min: Optional[StrictInt] = Field(default=None, alias="defaultMin") + default_max: Optional[StrictInt] = Field(default=None, alias="defaultMax") + step: Optional[StrictInt] = None + is_required: Optional[StrictStr] = Field(default=None, alias="isRequired") + allowempty: Optional[StrictBool] = None + defaultvalue: Optional[StrictStr] = None + is_extended: Optional[StrictBool] = Field(default=None, alias="isExtended") + is_searchable: Optional[StrictBool] = Field(default=None, alias="isSearchable") + hide_if_empty: Optional[StrictBool] = Field(default=None, alias="hideIfEmpty") + __properties: ClassVar[List[str]] = ["ids", "link", "configuration", "format", "allowValuespaceSuggestions", "countDefaultvalueAsFilter", "condition", "maxlength", "interactionType", "filterMode", "expandable", "subwidgets", "required", "id", "caption", "bottomCaption", "icon", "type", "template", "hasValues", "values", "placeholder", "unit", "min", "max", "defaultMin", "defaultMax", "step", "isRequired", "allowempty", "defaultvalue", "isExtended", "isSearchable", "hideIfEmpty"] + + @field_validator('interaction_type') + def interaction_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Input', 'None']): + raise ValueError("must be one of enum values ('Input', 'None')") + return value + + @field_validator('filter_mode') + def filter_mode_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['disabled', 'auto', 'always']): + raise ValueError("must be one of enum values ('disabled', 'auto', 'always')") + return value + + @field_validator('expandable') + def expandable_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['disabled', 'expanded', 'collapsed']): + raise ValueError("must be one of enum values ('disabled', 'expanded', 'collapsed')") + return value + + @field_validator('required') + def required_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['mandatory', 'mandatoryForPublish', 'recommended', 'optional', 'ignore']): + raise ValueError("must be one of enum values ('mandatory', 'mandatoryForPublish', 'recommended', 'optional', 'ignore')") + return value + + @field_validator('is_required') + def is_required_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['mandatory', 'mandatoryForPublish', 'recommended', 'optional', 'ignore']): + raise ValueError("must be one of enum values ('mandatory', 'mandatoryForPublish', 'recommended', 'optional', 'ignore')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsWidget from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of condition + if self.condition: + _dict['condition'] = self.condition.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in subwidgets (list) + _items = [] + if self.subwidgets: + for _item_subwidgets in self.subwidgets: + if _item_subwidgets: + _items.append(_item_subwidgets.to_dict()) + _dict['subwidgets'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in values (list) + _items = [] + if self.values: + for _item_values in self.values: + if _item_values: + _items.append(_item_values.to_dict()) + _dict['values'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsWidget from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "ids": obj.get("ids"), + "link": obj.get("link"), + "configuration": obj.get("configuration"), + "format": obj.get("format"), + "allowValuespaceSuggestions": obj.get("allowValuespaceSuggestions"), + "countDefaultvalueAsFilter": obj.get("countDefaultvalueAsFilter"), + "condition": MdsWidgetCondition.from_dict(obj["condition"]) if obj.get("condition") is not None else None, + "maxlength": obj.get("maxlength"), + "interactionType": obj.get("interactionType"), + "filterMode": obj.get("filterMode"), + "expandable": obj.get("expandable"), + "subwidgets": [MdsSubwidget.from_dict(_item) for _item in obj["subwidgets"]] if obj.get("subwidgets") is not None else None, + "required": obj.get("required"), + "id": obj.get("id"), + "caption": obj.get("caption"), + "bottomCaption": obj.get("bottomCaption"), + "icon": obj.get("icon"), + "type": obj.get("type"), + "template": obj.get("template"), + "hasValues": obj.get("hasValues"), + "values": [MdsValue.from_dict(_item) for _item in obj["values"]] if obj.get("values") is not None else None, + "placeholder": obj.get("placeholder"), + "unit": obj.get("unit"), + "min": obj.get("min"), + "max": obj.get("max"), + "defaultMin": obj.get("defaultMin"), + "defaultMax": obj.get("defaultMax"), + "step": obj.get("step"), + "isRequired": obj.get("isRequired"), + "allowempty": obj.get("allowempty"), + "defaultvalue": obj.get("defaultvalue"), + "isExtended": obj.get("isExtended"), + "isSearchable": obj.get("isSearchable"), + "hideIfEmpty": obj.get("hideIfEmpty") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mds_widget_condition.py b/edu_sharing_openapi/edu_sharing_client/models/mds_widget_condition.py new file mode 100644 index 00000000..7fbf3abf --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mds_widget_condition.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MdsWidgetCondition(BaseModel): + """ + MdsWidgetCondition + """ # noqa: E501 + type: StrictStr + value: StrictStr + negate: StrictBool + dynamic: StrictBool + pattern: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["type", "value", "negate", "dynamic", "pattern"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value not in set(['PROPERTY', 'TOOLPERMISSION']): + raise ValueError("must be one of enum values ('PROPERTY', 'TOOLPERMISSION')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MdsWidgetCondition from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MdsWidgetCondition from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type": obj.get("type"), + "value": obj.get("value"), + "negate": obj.get("negate"), + "dynamic": obj.get("dynamic"), + "pattern": obj.get("pattern") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mediacenter.py b/edu_sharing_openapi/edu_sharing_client/models/mediacenter.py new file mode 100644 index 00000000..b9221a6b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mediacenter.py @@ -0,0 +1,143 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.organization import Organization +from typing import Optional, Set +from typing_extensions import Self + +class Mediacenter(BaseModel): + """ + Mediacenter + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + editable: Optional[StrictBool] = None + signup_method: Optional[StrictStr] = Field(default=None, alias="signupMethod") + ref: Optional[NodeRef] = None + aspects: Optional[List[StrictStr]] = None + organizations: Optional[List[Organization]] = None + authority_name: StrictStr = Field(alias="authorityName") + authority_type: Optional[StrictStr] = Field(default=None, alias="authorityType") + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + profile: Optional[GroupProfile] = None + administration_access: Optional[StrictBool] = Field(default=None, alias="administrationAccess") + __properties: ClassVar[List[str]] = ["properties", "editable", "signupMethod", "ref", "aspects", "organizations", "authorityName", "authorityType", "groupName", "profile", "administrationAccess"] + + @field_validator('signup_method') + def signup_method_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['simple', 'password', 'list']): + raise ValueError("must be one of enum values ('simple', 'password', 'list')") + return value + + @field_validator('authority_type') + def authority_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST']): + raise ValueError("must be one of enum values ('USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Mediacenter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in organizations (list) + _items = [] + if self.organizations: + for _item_organizations in self.organizations: + if _item_organizations: + _items.append(_item_organizations.to_dict()) + _dict['organizations'] = _items + # override the default output from pydantic by calling `to_dict()` of profile + if self.profile: + _dict['profile'] = self.profile.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Mediacenter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "editable": obj.get("editable"), + "signupMethod": obj.get("signupMethod"), + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "aspects": obj.get("aspects"), + "organizations": [Organization.from_dict(_item) for _item in obj["organizations"]] if obj.get("organizations") is not None else None, + "authorityName": obj.get("authorityName"), + "authorityType": obj.get("authorityType"), + "groupName": obj.get("groupName"), + "profile": GroupProfile.from_dict(obj["profile"]) if obj.get("profile") is not None else None, + "administrationAccess": obj.get("administrationAccess") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mediacenter_profile_extension.py b/edu_sharing_openapi/edu_sharing_client/models/mediacenter_profile_extension.py new file mode 100644 index 00000000..da1ca80d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mediacenter_profile_extension.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.catalog import Catalog +from typing import Optional, Set +from typing_extensions import Self + +class MediacenterProfileExtension(BaseModel): + """ + MediacenterProfileExtension + """ # noqa: E501 + id: Optional[StrictStr] = None + location: Optional[StrictStr] = None + district_abbreviation: Optional[StrictStr] = Field(default=None, alias="districtAbbreviation") + main_url: Optional[StrictStr] = Field(default=None, alias="mainUrl") + catalogs: Optional[List[Catalog]] = None + content_status: Optional[StrictStr] = Field(default=None, alias="contentStatus") + __properties: ClassVar[List[str]] = ["id", "location", "districtAbbreviation", "mainUrl", "catalogs", "contentStatus"] + + @field_validator('content_status') + def content_status_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Activated', 'Deactivated']): + raise ValueError("must be one of enum values ('Activated', 'Deactivated')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MediacenterProfileExtension from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in catalogs (list) + _items = [] + if self.catalogs: + for _item_catalogs in self.catalogs: + if _item_catalogs: + _items.append(_item_catalogs.to_dict()) + _dict['catalogs'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MediacenterProfileExtension from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "location": obj.get("location"), + "districtAbbreviation": obj.get("districtAbbreviation"), + "mainUrl": obj.get("mainUrl"), + "catalogs": [Catalog.from_dict(_item) for _item in obj["catalogs"]] if obj.get("catalogs") is not None else None, + "contentStatus": obj.get("contentStatus") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/mediacenters_import_result.py b/edu_sharing_openapi/edu_sharing_client/models/mediacenters_import_result.py new file mode 100644 index 00000000..c8337590 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/mediacenters_import_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MediacentersImportResult(BaseModel): + """ + MediacentersImportResult + """ # noqa: E501 + rows: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MediacentersImportResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MediacentersImportResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rows": obj.get("rows") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/menu_entry.py b/edu_sharing_openapi/edu_sharing_client/models/menu_entry.py new file mode 100644 index 00000000..8283eeae --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/menu_entry.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class MenuEntry(BaseModel): + """ + MenuEntry + """ # noqa: E501 + position: Optional[StrictInt] = None + icon: Optional[StrictStr] = None + name: Optional[StrictStr] = None + url: Optional[StrictStr] = None + is_disabled: Optional[StrictBool] = Field(default=None, alias="isDisabled") + open_in_new: Optional[StrictBool] = Field(default=None, alias="openInNew") + is_separate: Optional[StrictBool] = Field(default=None, alias="isSeparate") + is_separate_bottom: Optional[StrictBool] = Field(default=None, alias="isSeparateBottom") + only_desktop: Optional[StrictBool] = Field(default=None, alias="onlyDesktop") + only_web: Optional[StrictBool] = Field(default=None, alias="onlyWeb") + path: Optional[StrictStr] = None + scope: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["position", "icon", "name", "url", "isDisabled", "openInNew", "isSeparate", "isSeparateBottom", "onlyDesktop", "onlyWeb", "path", "scope"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MenuEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MenuEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "position": obj.get("position"), + "icon": obj.get("icon"), + "name": obj.get("name"), + "url": obj.get("url"), + "isDisabled": obj.get("isDisabled"), + "openInNew": obj.get("openInNew"), + "isSeparate": obj.get("isSeparate"), + "isSeparateBottom": obj.get("isSeparateBottom"), + "onlyDesktop": obj.get("onlyDesktop"), + "onlyWeb": obj.get("onlyWeb"), + "path": obj.get("path"), + "scope": obj.get("scope") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/message.py b/edu_sharing_openapi/edu_sharing_client/models/message.py new file mode 100644 index 00000000..dca093a3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/message.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Message(BaseModel): + """ + Message + """ # noqa: E501 + type: Optional[StrictStr] = None + placements: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["type", "placements"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Message from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Message from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type": obj.get("type"), + "placements": obj.get("placements") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/metadata_set_info.py b/edu_sharing_openapi/edu_sharing_client/models/metadata_set_info.py new file mode 100644 index 00000000..d8344ee7 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/metadata_set_info.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class MetadataSetInfo(BaseModel): + """ + MetadataSetInfo + """ # noqa: E501 + id: StrictStr + name: StrictStr + __properties: ClassVar[List[str]] = ["id", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MetadataSetInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MetadataSetInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "name": obj.get("name") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/metadata_suggestion_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/metadata_suggestion_event_dto.py new file mode 100644 index 00000000..1fa7815d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/metadata_suggestion_event_dto.py @@ -0,0 +1,119 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from edu_sharing_client.models.widget_data_dto import WidgetDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class MetadataSuggestionEventDTO(NotificationEventDTO): + """ + MetadataSuggestionEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + caption_id: Optional[StrictStr] = Field(default=None, alias="captionId") + caption: Optional[StrictStr] = None + parent_id: Optional[StrictStr] = Field(default=None, alias="parentId") + parent_caption: Optional[StrictStr] = Field(default=None, alias="parentCaption") + widget: Optional[WidgetDataDTO] = None + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "captionId", "caption", "parentId", "parentCaption", "widget"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of MetadataSuggestionEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of widget + if self.widget: + _dict['widget'] = self.widget.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of MetadataSuggestionEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "captionId": obj.get("captionId"), + "caption": obj.get("caption"), + "parentId": obj.get("parentId"), + "parentCaption": obj.get("parentCaption"), + "widget": WidgetDataDTO.from_dict(obj["widget"]) if obj.get("widget") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node.py b/edu_sharing_openapi/edu_sharing_client/models/node.py new file mode 100644 index 00000000..6cbed548 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node.py @@ -0,0 +1,226 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection import Collection +from edu_sharing_client.models.content import Content +from edu_sharing_client.models.contributor import Contributor +from edu_sharing_client.models.license import License +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.person import Person +from edu_sharing_client.models.preview import Preview +from edu_sharing_client.models.rating_details import RatingDetails +from edu_sharing_client.models.remote import Remote +from typing import Optional, Set +from typing_extensions import Self + +class Node(BaseModel): + """ + Node + """ # noqa: E501 + node_lti_deep_link: Optional[NodeLTIDeepLink] = Field(default=None, alias="nodeLTIDeepLink") + remote: Optional[Remote] = None + content: Optional[Content] = None + license: Optional[License] = None + is_directory: Optional[StrictBool] = Field(default=None, alias="isDirectory") + comment_count: Optional[StrictInt] = Field(default=None, alias="commentCount") + rating: Optional[RatingDetails] = None + used_in_collections: Optional[List[Node]] = Field(default=None, alias="usedInCollections") + relations: Optional[Dict[str, Node]] = None + contributors: Optional[List[Contributor]] = None + ref: NodeRef + parent: Optional[NodeRef] = None + type: Optional[StrictStr] = None + aspects: Optional[List[StrictStr]] = None + name: StrictStr + title: Optional[StrictStr] = None + metadataset: Optional[StrictStr] = None + repository_type: Optional[StrictStr] = Field(default=None, alias="repositoryType") + created_at: datetime = Field(alias="createdAt") + created_by: Person = Field(alias="createdBy") + modified_at: Optional[datetime] = Field(default=None, alias="modifiedAt") + modified_by: Optional[Person] = Field(default=None, alias="modifiedBy") + access: List[StrictStr] + download_url: StrictStr = Field(alias="downloadUrl") + properties: Optional[Dict[str, List[StrictStr]]] = None + mimetype: Optional[StrictStr] = None + mediatype: Optional[StrictStr] = None + size: Optional[StrictStr] = None + preview: Optional[Preview] = None + icon_url: Optional[StrictStr] = Field(default=None, alias="iconURL") + collection: Collection + owner: Person + is_public: Optional[StrictBool] = Field(default=None, alias="isPublic") + __properties: ClassVar[List[str]] = ["nodeLTIDeepLink", "remote", "content", "license", "isDirectory", "commentCount", "rating", "usedInCollections", "relations", "contributors", "ref", "parent", "type", "aspects", "name", "title", "metadataset", "repositoryType", "createdAt", "createdBy", "modifiedAt", "modifiedBy", "access", "downloadUrl", "properties", "mimetype", "mediatype", "size", "preview", "iconURL", "collection", "owner", "isPublic"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Node from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node_lti_deep_link + if self.node_lti_deep_link: + _dict['nodeLTIDeepLink'] = self.node_lti_deep_link.to_dict() + # override the default output from pydantic by calling `to_dict()` of remote + if self.remote: + _dict['remote'] = self.remote.to_dict() + # override the default output from pydantic by calling `to_dict()` of content + if self.content: + _dict['content'] = self.content.to_dict() + # override the default output from pydantic by calling `to_dict()` of license + if self.license: + _dict['license'] = self.license.to_dict() + # override the default output from pydantic by calling `to_dict()` of rating + if self.rating: + _dict['rating'] = self.rating.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in used_in_collections (list) + _items = [] + if self.used_in_collections: + for _item_used_in_collections in self.used_in_collections: + if _item_used_in_collections: + _items.append(_item_used_in_collections.to_dict()) + _dict['usedInCollections'] = _items + # override the default output from pydantic by calling `to_dict()` of each value in relations (dict) + _field_dict = {} + if self.relations: + for _key_relations in self.relations: + if self.relations[_key_relations]: + _field_dict[_key_relations] = self.relations[_key_relations].to_dict() + _dict['relations'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each item in contributors (list) + _items = [] + if self.contributors: + for _item_contributors in self.contributors: + if _item_contributors: + _items.append(_item_contributors.to_dict()) + _dict['contributors'] = _items + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of parent + if self.parent: + _dict['parent'] = self.parent.to_dict() + # override the default output from pydantic by calling `to_dict()` of created_by + if self.created_by: + _dict['createdBy'] = self.created_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of modified_by + if self.modified_by: + _dict['modifiedBy'] = self.modified_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of preview + if self.preview: + _dict['preview'] = self.preview.to_dict() + # override the default output from pydantic by calling `to_dict()` of collection + if self.collection: + _dict['collection'] = self.collection.to_dict() + # override the default output from pydantic by calling `to_dict()` of owner + if self.owner: + _dict['owner'] = self.owner.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Node from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeLTIDeepLink": NodeLTIDeepLink.from_dict(obj["nodeLTIDeepLink"]) if obj.get("nodeLTIDeepLink") is not None else None, + "remote": Remote.from_dict(obj["remote"]) if obj.get("remote") is not None else None, + "content": Content.from_dict(obj["content"]) if obj.get("content") is not None else None, + "license": License.from_dict(obj["license"]) if obj.get("license") is not None else None, + "isDirectory": obj.get("isDirectory"), + "commentCount": obj.get("commentCount"), + "rating": RatingDetails.from_dict(obj["rating"]) if obj.get("rating") is not None else None, + "usedInCollections": [Node.from_dict(_item) for _item in obj["usedInCollections"]] if obj.get("usedInCollections") is not None else None, + "relations": dict( + (_k, Node.from_dict(_v)) + for _k, _v in obj["relations"].items() + ) + if obj.get("relations") is not None + else None, + "contributors": [Contributor.from_dict(_item) for _item in obj["contributors"]] if obj.get("contributors") is not None else None, + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "parent": NodeRef.from_dict(obj["parent"]) if obj.get("parent") is not None else None, + "type": obj.get("type"), + "aspects": obj.get("aspects"), + "name": obj.get("name"), + "title": obj.get("title"), + "metadataset": obj.get("metadataset"), + "repositoryType": obj.get("repositoryType"), + "createdAt": obj.get("createdAt"), + "createdBy": Person.from_dict(obj["createdBy"]) if obj.get("createdBy") is not None else None, + "modifiedAt": obj.get("modifiedAt"), + "modifiedBy": Person.from_dict(obj["modifiedBy"]) if obj.get("modifiedBy") is not None else None, + "access": obj.get("access"), + "downloadUrl": obj.get("downloadUrl"), + "properties": obj.get("properties"), + "mimetype": obj.get("mimetype"), + "mediatype": obj.get("mediatype"), + "size": obj.get("size"), + "preview": Preview.from_dict(obj["preview"]) if obj.get("preview") is not None else None, + "iconURL": obj.get("iconURL"), + "collection": Collection.from_dict(obj["collection"]) if obj.get("collection") is not None else None, + "owner": Person.from_dict(obj["owner"]) if obj.get("owner") is not None else None, + "isPublic": obj.get("isPublic") + }) + return _obj + +# TODO: Rewrite to not use raise_errors +Node.model_rebuild(raise_errors=False) + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_collection_proposal_count.py b/edu_sharing_openapi/edu_sharing_client/models/node_collection_proposal_count.py new file mode 100644 index 00000000..d4718b95 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_collection_proposal_count.py @@ -0,0 +1,229 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection import Collection +from edu_sharing_client.models.content import Content +from edu_sharing_client.models.contributor import Contributor +from edu_sharing_client.models.license import License +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.person import Person +from edu_sharing_client.models.preview import Preview +from edu_sharing_client.models.rating_details import RatingDetails +from edu_sharing_client.models.remote import Remote +from typing import Optional, Set +from typing_extensions import Self + +class NodeCollectionProposalCount(BaseModel): + """ + NodeCollectionProposalCount + """ # noqa: E501 + node_lti_deep_link: Optional[NodeLTIDeepLink] = Field(default=None, alias="nodeLTIDeepLink") + remote: Optional[Remote] = None + content: Optional[Content] = None + license: Optional[License] = None + is_directory: Optional[StrictBool] = Field(default=None, alias="isDirectory") + comment_count: Optional[StrictInt] = Field(default=None, alias="commentCount") + rating: Optional[RatingDetails] = None + used_in_collections: Optional[List[Node]] = Field(default=None, alias="usedInCollections") + relations: Optional[Dict[str, Node]] = None + contributors: Optional[List[Contributor]] = None + proposal_counts: Optional[Dict[str, StrictInt]] = Field(default=None, alias="proposalCounts") + proposal_count: Optional[Dict[str, StrictInt]] = Field(default=None, alias="proposalCount") + ref: NodeRef + parent: Optional[NodeRef] = None + type: Optional[StrictStr] = None + aspects: Optional[List[StrictStr]] = None + name: StrictStr + title: Optional[StrictStr] = None + metadataset: Optional[StrictStr] = None + repository_type: Optional[StrictStr] = Field(default=None, alias="repositoryType") + created_at: datetime = Field(alias="createdAt") + created_by: Person = Field(alias="createdBy") + modified_at: Optional[datetime] = Field(default=None, alias="modifiedAt") + modified_by: Optional[Person] = Field(default=None, alias="modifiedBy") + access: List[StrictStr] + download_url: StrictStr = Field(alias="downloadUrl") + properties: Optional[Dict[str, List[StrictStr]]] = None + mimetype: Optional[StrictStr] = None + mediatype: Optional[StrictStr] = None + size: Optional[StrictStr] = None + preview: Optional[Preview] = None + icon_url: Optional[StrictStr] = Field(default=None, alias="iconURL") + collection: Collection + owner: Person + is_public: Optional[StrictBool] = Field(default=None, alias="isPublic") + __properties: ClassVar[List[str]] = ["nodeLTIDeepLink", "remote", "content", "license", "isDirectory", "commentCount", "rating", "usedInCollections", "relations", "contributors", "proposalCounts", "proposalCount", "ref", "parent", "type", "aspects", "name", "title", "metadataset", "repositoryType", "createdAt", "createdBy", "modifiedAt", "modifiedBy", "access", "downloadUrl", "properties", "mimetype", "mediatype", "size", "preview", "iconURL", "collection", "owner", "isPublic"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeCollectionProposalCount from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node_lti_deep_link + if self.node_lti_deep_link: + _dict['nodeLTIDeepLink'] = self.node_lti_deep_link.to_dict() + # override the default output from pydantic by calling `to_dict()` of remote + if self.remote: + _dict['remote'] = self.remote.to_dict() + # override the default output from pydantic by calling `to_dict()` of content + if self.content: + _dict['content'] = self.content.to_dict() + # override the default output from pydantic by calling `to_dict()` of license + if self.license: + _dict['license'] = self.license.to_dict() + # override the default output from pydantic by calling `to_dict()` of rating + if self.rating: + _dict['rating'] = self.rating.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in used_in_collections (list) + _items = [] + if self.used_in_collections: + for _item_used_in_collections in self.used_in_collections: + if _item_used_in_collections: + _items.append(_item_used_in_collections.to_dict()) + _dict['usedInCollections'] = _items + # override the default output from pydantic by calling `to_dict()` of each value in relations (dict) + _field_dict = {} + if self.relations: + for _key_relations in self.relations: + if self.relations[_key_relations]: + _field_dict[_key_relations] = self.relations[_key_relations].to_dict() + _dict['relations'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each item in contributors (list) + _items = [] + if self.contributors: + for _item_contributors in self.contributors: + if _item_contributors: + _items.append(_item_contributors.to_dict()) + _dict['contributors'] = _items + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of parent + if self.parent: + _dict['parent'] = self.parent.to_dict() + # override the default output from pydantic by calling `to_dict()` of created_by + if self.created_by: + _dict['createdBy'] = self.created_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of modified_by + if self.modified_by: + _dict['modifiedBy'] = self.modified_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of preview + if self.preview: + _dict['preview'] = self.preview.to_dict() + # override the default output from pydantic by calling `to_dict()` of collection + if self.collection: + _dict['collection'] = self.collection.to_dict() + # override the default output from pydantic by calling `to_dict()` of owner + if self.owner: + _dict['owner'] = self.owner.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeCollectionProposalCount from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeLTIDeepLink": NodeLTIDeepLink.from_dict(obj["nodeLTIDeepLink"]) if obj.get("nodeLTIDeepLink") is not None else None, + "remote": Remote.from_dict(obj["remote"]) if obj.get("remote") is not None else None, + "content": Content.from_dict(obj["content"]) if obj.get("content") is not None else None, + "license": License.from_dict(obj["license"]) if obj.get("license") is not None else None, + "isDirectory": obj.get("isDirectory"), + "commentCount": obj.get("commentCount"), + "rating": RatingDetails.from_dict(obj["rating"]) if obj.get("rating") is not None else None, + "usedInCollections": [Node.from_dict(_item) for _item in obj["usedInCollections"]] if obj.get("usedInCollections") is not None else None, + "relations": dict( + (_k, Node.from_dict(_v)) + for _k, _v in obj["relations"].items() + ) + if obj.get("relations") is not None + else None, + "contributors": [Contributor.from_dict(_item) for _item in obj["contributors"]] if obj.get("contributors") is not None else None, + "proposalCounts": obj.get("proposalCounts"), + "proposalCount": obj.get("proposalCount"), + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "parent": NodeRef.from_dict(obj["parent"]) if obj.get("parent") is not None else None, + "type": obj.get("type"), + "aspects": obj.get("aspects"), + "name": obj.get("name"), + "title": obj.get("title"), + "metadataset": obj.get("metadataset"), + "repositoryType": obj.get("repositoryType"), + "createdAt": obj.get("createdAt"), + "createdBy": Person.from_dict(obj["createdBy"]) if obj.get("createdBy") is not None else None, + "modifiedAt": obj.get("modifiedAt"), + "modifiedBy": Person.from_dict(obj["modifiedBy"]) if obj.get("modifiedBy") is not None else None, + "access": obj.get("access"), + "downloadUrl": obj.get("downloadUrl"), + "properties": obj.get("properties"), + "mimetype": obj.get("mimetype"), + "mediatype": obj.get("mediatype"), + "size": obj.get("size"), + "preview": Preview.from_dict(obj["preview"]) if obj.get("preview") is not None else None, + "iconURL": obj.get("iconURL"), + "collection": Collection.from_dict(obj["collection"]) if obj.get("collection") is not None else None, + "owner": Person.from_dict(obj["owner"]) if obj.get("owner") is not None else None, + "isPublic": obj.get("isPublic") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_data.py b/edu_sharing_openapi/edu_sharing_client/models/node_data.py new file mode 100644 index 00000000..98eca010 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_data.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeData(BaseModel): + """ + NodeData + """ # noqa: E501 + timestamp: Optional[StrictStr] = None + counts: Optional[Dict[str, StrictInt]] = None + __properties: ClassVar[List[str]] = ["timestamp", "counts"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "counts": obj.get("counts") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_data_dto.py b/edu_sharing_openapi/edu_sharing_client/models/node_data_dto.py new file mode 100644 index 00000000..6750c1bf --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_data_dto.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeDataDTO(BaseModel): + """ + NodeDataDTO + """ # noqa: E501 + type: Optional[StrictStr] = None + aspects: Optional[List[StrictStr]] = None + properties: Optional[Dict[str, Dict[str, Any]]] = None + __properties: ClassVar[List[str]] = ["type", "aspects", "properties"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeDataDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeDataDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "type": obj.get("type"), + "aspects": obj.get("aspects"), + "properties": obj.get("properties") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_entries.py b/edu_sharing_openapi/edu_sharing_client/models/node_entries.py new file mode 100644 index 00000000..0db0278d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class NodeEntries(BaseModel): + """ + NodeEntries + """ # noqa: E501 + nodes: List[Node] + pagination: Pagination + __properties: ClassVar[List[str]] = ["nodes", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in nodes (list) + _items = [] + if self.nodes: + for _item_nodes in self.nodes: + if _item_nodes: + _items.append(_item_nodes.to_dict()) + _dict['nodes'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodes": [Node.from_dict(_item) for _item in obj["nodes"]] if obj.get("nodes") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_entry.py b/edu_sharing_openapi/edu_sharing_client/models/node_entry.py new file mode 100644 index 00000000..c166b82d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_entry.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class NodeEntry(BaseModel): + """ + NodeEntry + """ # noqa: E501 + node: Node + __properties: ClassVar[List[str]] = ["node"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_issue_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/node_issue_event_dto.py new file mode 100644 index 00000000..f6d9572d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_issue_event_dto.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class NodeIssueEventDTO(NotificationEventDTO): + """ + NodeIssueEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + reason: Optional[StrictStr] = None + user_comment: Optional[StrictStr] = Field(default=None, alias="userComment") + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "reason", "userComment"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeIssueEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeIssueEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "reason": obj.get("reason"), + "userComment": obj.get("userComment") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_locked.py b/edu_sharing_openapi/edu_sharing_client/models/node_locked.py new file mode 100644 index 00000000..25a7f1ac --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_locked.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class NodeLocked(BaseModel): + """ + NodeLocked + """ # noqa: E501 + is_locked: StrictBool = Field(alias="isLocked") + __properties: ClassVar[List[str]] = ["isLocked"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeLocked from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeLocked from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "isLocked": obj.get("isLocked") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_lti_deep_link.py b/edu_sharing_openapi/edu_sharing_client/models/node_lti_deep_link.py new file mode 100644 index 00000000..ac9fe564 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_lti_deep_link.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeLTIDeepLink(BaseModel): + """ + NodeLTIDeepLink + """ # noqa: E501 + lti_deep_link_return_url: Optional[StrictStr] = Field(default=None, alias="ltiDeepLinkReturnUrl") + jwt_deep_link_response: Optional[StrictStr] = Field(default=None, alias="jwtDeepLinkResponse") + __properties: ClassVar[List[str]] = ["ltiDeepLinkReturnUrl", "jwtDeepLinkResponse"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeLTIDeepLink from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeLTIDeepLink from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "ltiDeepLinkReturnUrl": obj.get("ltiDeepLinkReturnUrl"), + "jwtDeepLinkResponse": obj.get("jwtDeepLinkResponse") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_permission_entry.py b/edu_sharing_openapi/edu_sharing_client/models/node_permission_entry.py new file mode 100644 index 00000000..82db884d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_permission_entry.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node_permissions import NodePermissions +from typing import Optional, Set +from typing_extensions import Self + +class NodePermissionEntry(BaseModel): + """ + NodePermissionEntry + """ # noqa: E501 + permissions: NodePermissions + __properties: ClassVar[List[str]] = ["permissions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodePermissionEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of permissions + if self.permissions: + _dict['permissions'] = self.permissions.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodePermissionEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "permissions": NodePermissions.from_dict(obj["permissions"]) if obj.get("permissions") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_permissions.py b/edu_sharing_openapi/edu_sharing_client/models/node_permissions.py new file mode 100644 index 00000000..7d3d7034 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_permissions.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.ace import ACE +from edu_sharing_client.models.acl import ACL +from typing import Optional, Set +from typing_extensions import Self + +class NodePermissions(BaseModel): + """ + NodePermissions + """ # noqa: E501 + local_permissions: ACL = Field(alias="localPermissions") + inherited_permissions: List[ACE] = Field(alias="inheritedPermissions") + __properties: ClassVar[List[str]] = ["localPermissions", "inheritedPermissions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodePermissions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of local_permissions + if self.local_permissions: + _dict['localPermissions'] = self.local_permissions.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in inherited_permissions (list) + _items = [] + if self.inherited_permissions: + for _item_inherited_permissions in self.inherited_permissions: + if _item_inherited_permissions: + _items.append(_item_inherited_permissions.to_dict()) + _dict['inheritedPermissions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodePermissions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "localPermissions": ACL.from_dict(obj["localPermissions"]) if obj.get("localPermissions") is not None else None, + "inheritedPermissions": [ACE.from_dict(_item) for _item in obj["inheritedPermissions"]] if obj.get("inheritedPermissions") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_ref.py b/edu_sharing_openapi/edu_sharing_client/models/node_ref.py new file mode 100644 index 00000000..24c4b2ed --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_ref.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeRef(BaseModel): + """ + NodeRef + """ # noqa: E501 + repo: StrictStr + id: StrictStr + archived: StrictBool + is_home_repo: Optional[StrictBool] = Field(default=None, alias="isHomeRepo") + __properties: ClassVar[List[str]] = ["repo", "id", "archived", "isHomeRepo"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeRef from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeRef from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repo": obj.get("repo"), + "id": obj.get("id"), + "archived": obj.get("archived"), + "isHomeRepo": obj.get("isHomeRepo") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_relation.py b/edu_sharing_openapi/edu_sharing_client/models/node_relation.py new file mode 100644 index 00000000..fa93dde9 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_relation.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.relation_data import RelationData +from typing import Optional, Set +from typing_extensions import Self + +class NodeRelation(BaseModel): + """ + NodeRelation + """ # noqa: E501 + node: Optional[Node] = None + relations: Optional[List[RelationData]] = None + __properties: ClassVar[List[str]] = ["node", "relations"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeRelation from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in relations (list) + _items = [] + if self.relations: + for _item_relations in self.relations: + if _item_relations: + _items.append(_item_relations.to_dict()) + _dict['relations'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeRelation from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None, + "relations": [RelationData.from_dict(_item) for _item in obj["relations"]] if obj.get("relations") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_remote.py b/edu_sharing_openapi/edu_sharing_client/models/node_remote.py new file mode 100644 index 00000000..986bda38 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_remote.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class NodeRemote(BaseModel): + """ + NodeRemote + """ # noqa: E501 + node: Node + remote: Node + __properties: ClassVar[List[str]] = ["node", "remote"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeRemote from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of remote + if self.remote: + _dict['remote'] = self.remote.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeRemote from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None, + "remote": Node.from_dict(obj["remote"]) if obj.get("remote") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_share.py b/edu_sharing_openapi/edu_sharing_client/models/node_share.py new file mode 100644 index 00000000..96a3c19d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_share.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeShare(BaseModel): + """ + NodeShare + """ # noqa: E501 + password: Optional[StrictBool] = None + token: Optional[StrictStr] = None + email: Optional[StrictStr] = None + expiry_date: Optional[StrictInt] = Field(default=None, alias="expiryDate") + invited_at: Optional[StrictInt] = Field(default=None, alias="invitedAt") + download_count: Optional[StrictInt] = Field(default=None, alias="downloadCount") + url: Optional[StrictStr] = None + share_id: Optional[StrictStr] = Field(default=None, alias="shareId") + __properties: ClassVar[List[str]] = ["password", "token", "email", "expiryDate", "invitedAt", "downloadCount", "url", "shareId"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeShare from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeShare from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "password": obj.get("password"), + "token": obj.get("token"), + "email": obj.get("email"), + "expiryDate": obj.get("expiryDate"), + "invitedAt": obj.get("invitedAt"), + "downloadCount": obj.get("downloadCount"), + "url": obj.get("url"), + "shareId": obj.get("shareId") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_stats.py b/edu_sharing_openapi/edu_sharing_client/models/node_stats.py new file mode 100644 index 00000000..ea120f70 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_stats.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeStats(BaseModel): + """ + NodeStats + """ # noqa: E501 + total: Optional[Dict[str, StrictInt]] = None + __properties: ClassVar[List[str]] = ["total"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeStats from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeStats from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "total": obj.get("total") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_text.py b/edu_sharing_openapi/edu_sharing_client/models/node_text.py new file mode 100644 index 00000000..b37abcbb --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_text.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NodeText(BaseModel): + """ + NodeText + """ # noqa: E501 + text: Optional[StrictStr] = None + html: Optional[StrictStr] = None + raw: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["text", "html", "raw"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeText from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeText from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "text": obj.get("text"), + "html": obj.get("html"), + "raw": obj.get("raw") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_version.py b/edu_sharing_openapi/edu_sharing_client/models/node_version.py new file mode 100644 index 00000000..a445f61c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_version.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_version_ref import NodeVersionRef +from edu_sharing_client.models.person import Person +from typing import Optional, Set +from typing_extensions import Self + +class NodeVersion(BaseModel): + """ + NodeVersion + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + version: NodeVersionRef + comment: StrictStr + modified_at: StrictStr = Field(alias="modifiedAt") + modified_by: Person = Field(alias="modifiedBy") + content_url: Optional[StrictStr] = Field(default=None, alias="contentUrl") + __properties: ClassVar[List[str]] = ["properties", "version", "comment", "modifiedAt", "modifiedBy", "contentUrl"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeVersion from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of version + if self.version: + _dict['version'] = self.version.to_dict() + # override the default output from pydantic by calling `to_dict()` of modified_by + if self.modified_by: + _dict['modifiedBy'] = self.modified_by.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeVersion from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "version": NodeVersionRef.from_dict(obj["version"]) if obj.get("version") is not None else None, + "comment": obj.get("comment"), + "modifiedAt": obj.get("modifiedAt"), + "modifiedBy": Person.from_dict(obj["modifiedBy"]) if obj.get("modifiedBy") is not None else None, + "contentUrl": obj.get("contentUrl") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_version_entries.py b/edu_sharing_openapi/edu_sharing_client/models/node_version_entries.py new file mode 100644 index 00000000..d8741e0d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_version_entries.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node_version import NodeVersion +from typing import Optional, Set +from typing_extensions import Self + +class NodeVersionEntries(BaseModel): + """ + NodeVersionEntries + """ # noqa: E501 + versions: List[NodeVersion] + __properties: ClassVar[List[str]] = ["versions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeVersionEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in versions (list) + _items = [] + if self.versions: + for _item_versions in self.versions: + if _item_versions: + _items.append(_item_versions.to_dict()) + _dict['versions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeVersionEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "versions": [NodeVersion.from_dict(_item) for _item in obj["versions"]] if obj.get("versions") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_version_entry.py b/edu_sharing_openapi/edu_sharing_client/models/node_version_entry.py new file mode 100644 index 00000000..db4cb7a3 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_version_entry.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node_version import NodeVersion +from typing import Optional, Set +from typing_extensions import Self + +class NodeVersionEntry(BaseModel): + """ + NodeVersionEntry + """ # noqa: E501 + version: NodeVersion + __properties: ClassVar[List[str]] = ["version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeVersionEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of version + if self.version: + _dict['version'] = self.version.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeVersionEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "version": NodeVersion.from_dict(obj["version"]) if obj.get("version") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_version_ref.py b/edu_sharing_openapi/edu_sharing_client/models/node_version_ref.py new file mode 100644 index 00000000..1e843158 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_version_ref.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node_ref import NodeRef +from typing import Optional, Set +from typing_extensions import Self + +class NodeVersionRef(BaseModel): + """ + NodeVersionRef + """ # noqa: E501 + node: NodeRef + major: StrictInt + minor: StrictInt + __properties: ClassVar[List[str]] = ["node", "major", "minor"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeVersionRef from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeVersionRef from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "node": NodeRef.from_dict(obj["node"]) if obj.get("node") is not None else None, + "major": obj.get("major"), + "minor": obj.get("minor") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/node_version_ref_entries.py b/edu_sharing_openapi/edu_sharing_client/models/node_version_ref_entries.py new file mode 100644 index 00000000..ee8ad26d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/node_version_ref_entries.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node_version_ref import NodeVersionRef +from typing import Optional, Set +from typing_extensions import Self + +class NodeVersionRefEntries(BaseModel): + """ + NodeVersionRefEntries + """ # noqa: E501 + versions: List[NodeVersionRef] + __properties: ClassVar[List[str]] = ["versions"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NodeVersionRefEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in versions (list) + _items = [] + if self.versions: + for _item_versions in self.versions: + if _item_versions: + _items.append(_item_versions.to_dict()) + _dict['versions'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NodeVersionRefEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "versions": [NodeVersionRef.from_dict(_item) for _item in obj["versions"]] if obj.get("versions") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/notification_config.py b/edu_sharing_openapi/edu_sharing_client/models/notification_config.py new file mode 100644 index 00000000..422c3824 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/notification_config.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.notification_intervals import NotificationIntervals +from typing import Optional, Set +from typing_extensions import Self + +class NotificationConfig(BaseModel): + """ + NotificationConfig + """ # noqa: E501 + config_mode: Optional[StrictStr] = Field(default=None, alias="configMode") + default_interval: Optional[StrictStr] = Field(default=None, alias="defaultInterval") + intervals: Optional[NotificationIntervals] = None + __properties: ClassVar[List[str]] = ["configMode", "defaultInterval", "intervals"] + + @field_validator('config_mode') + def config_mode_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['uniformly', 'individual']): + raise ValueError("must be one of enum values ('uniformly', 'individual')") + return value + + @field_validator('default_interval') + def default_interval_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotificationConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of intervals + if self.intervals: + _dict['intervals'] = self.intervals.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotificationConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "configMode": obj.get("configMode"), + "defaultInterval": obj.get("defaultInterval"), + "intervals": NotificationIntervals.from_dict(obj["intervals"]) if obj.get("intervals") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/notification_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/notification_event_dto.py new file mode 100644 index 00000000..2b32a954 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/notification_event_dto.py @@ -0,0 +1,151 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from importlib import import_module +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional, Union +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from edu_sharing_client.models.add_to_collection_event_dto import AddToCollectionEventDTO + from edu_sharing_client.models.comment_event_dto import CommentEventDTO + from edu_sharing_client.models.invite_event_dto import InviteEventDTO + from edu_sharing_client.models.metadata_suggestion_event_dto import MetadataSuggestionEventDTO + from edu_sharing_client.models.node_issue_event_dto import NodeIssueEventDTO + from edu_sharing_client.models.propose_for_collection_event_dto import ProposeForCollectionEventDTO + from edu_sharing_client.models.rating_event_dto import RatingEventDTO + from edu_sharing_client.models.workflow_event_dto import WorkflowEventDTO + +class NotificationEventDTO(BaseModel): + """ + NotificationEventDTO + """ # noqa: E501 + timestamp: Optional[datetime] = None + creator: Optional[UserDataDTO] = None + receiver: Optional[UserDataDTO] = None + status: Optional[StrictStr] = None + id: Optional[StrictStr] = Field(default=None, alias="_id") + var_class: StrictStr = Field(alias="_class") + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class"] + + @field_validator('status') + def status_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['PENDING', 'SENT', 'READ', 'IGNORED']): + raise ValueError("must be one of enum values ('PENDING', 'SENT', 'READ', 'IGNORED')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + # JSON field name that stores the object type + __discriminator_property_name: ClassVar[str] = '_class' + + # discriminator mappings + __discriminator_value_class_map: ClassVar[Dict[str, str]] = { + 'AddToCollectionEventDTO': 'AddToCollectionEventDTO','CommentEventDTO': 'CommentEventDTO','InviteEventDTO': 'InviteEventDTO','MetadataSuggestionEventDTO': 'MetadataSuggestionEventDTO','NodeIssueEventDTO': 'NodeIssueEventDTO','ProposeForCollectionEventDTO': 'ProposeForCollectionEventDTO','RatingEventDTO': 'RatingEventDTO','WorkflowEventDTO': 'WorkflowEventDTO' + } + + @classmethod + def get_discriminator_value(cls, obj: Dict[str, Any]) -> Optional[str]: + """Returns the discriminator value (object type) of the data""" + discriminator_value = obj[cls.__discriminator_property_name] + if discriminator_value: + return cls.__discriminator_value_class_map.get(discriminator_value) + else: + return None + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Union[AddToCollectionEventDTO, CommentEventDTO, InviteEventDTO, MetadataSuggestionEventDTO, NodeIssueEventDTO, ProposeForCollectionEventDTO, RatingEventDTO, WorkflowEventDTO]]: + """Create an instance of NotificationEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Dict[str, Any]) -> Optional[Union[AddToCollectionEventDTO, CommentEventDTO, InviteEventDTO, MetadataSuggestionEventDTO, NodeIssueEventDTO, ProposeForCollectionEventDTO, RatingEventDTO, WorkflowEventDTO]]: + """Create an instance of NotificationEventDTO from a dict""" + # look up the object type based on discriminator mapping + object_type = cls.get_discriminator_value(obj) + if object_type == 'AddToCollectionEventDTO': + return import_module("edu_sharing_client.models.add_to_collection_event_dto").AddToCollectionEventDTO.from_dict(obj) + if object_type == 'CommentEventDTO': + return import_module("edu_sharing_client.models.comment_event_dto").CommentEventDTO.from_dict(obj) + if object_type == 'InviteEventDTO': + return import_module("edu_sharing_client.models.invite_event_dto").InviteEventDTO.from_dict(obj) + if object_type == 'MetadataSuggestionEventDTO': + return import_module("edu_sharing_client.models.metadata_suggestion_event_dto").MetadataSuggestionEventDTO.from_dict(obj) + if object_type == 'NodeIssueEventDTO': + return import_module("edu_sharing_client.models.node_issue_event_dto").NodeIssueEventDTO.from_dict(obj) + if object_type == 'ProposeForCollectionEventDTO': + return import_module("edu_sharing_client.models.propose_for_collection_event_dto").ProposeForCollectionEventDTO.from_dict(obj) + if object_type == 'RatingEventDTO': + return import_module("edu_sharing_client.models.rating_event_dto").RatingEventDTO.from_dict(obj) + if object_type == 'WorkflowEventDTO': + return import_module("edu_sharing_client.models.workflow_event_dto").WorkflowEventDTO.from_dict(obj) + + raise ValueError("NotificationEventDTO failed to lookup discriminator value from " + + json.dumps(obj) + ". Discriminator property name: " + cls.__discriminator_property_name + + ", mapping: " + json.dumps(cls.__discriminator_value_class_map)) + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/notification_intervals.py b/edu_sharing_openapi/edu_sharing_client/models/notification_intervals.py new file mode 100644 index 00000000..0c1e1fe2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/notification_intervals.py @@ -0,0 +1,181 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class NotificationIntervals(BaseModel): + """ + NotificationIntervals + """ # noqa: E501 + add_to_collection_event: Optional[StrictStr] = Field(default=None, alias="addToCollectionEvent") + propose_for_collection_event: Optional[StrictStr] = Field(default=None, alias="proposeForCollectionEvent") + comment_event: Optional[StrictStr] = Field(default=None, alias="commentEvent") + invite_event: Optional[StrictStr] = Field(default=None, alias="inviteEvent") + node_issue_event: Optional[StrictStr] = Field(default=None, alias="nodeIssueEvent") + rating_event: Optional[StrictStr] = Field(default=None, alias="ratingEvent") + workflow_event: Optional[StrictStr] = Field(default=None, alias="workflowEvent") + metadata_suggestion_event: Optional[StrictStr] = Field(default=None, alias="metadataSuggestionEvent") + __properties: ClassVar[List[str]] = ["addToCollectionEvent", "proposeForCollectionEvent", "commentEvent", "inviteEvent", "nodeIssueEvent", "ratingEvent", "workflowEvent", "metadataSuggestionEvent"] + + @field_validator('add_to_collection_event') + def add_to_collection_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('propose_for_collection_event') + def propose_for_collection_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('comment_event') + def comment_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('invite_event') + def invite_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('node_issue_event') + def node_issue_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('rating_event') + def rating_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('workflow_event') + def workflow_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + @field_validator('metadata_suggestion_event') + def metadata_suggestion_event_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['immediately', 'disabled', 'daily', 'weekly']): + raise ValueError("must be one of enum values ('immediately', 'disabled', 'daily', 'weekly')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotificationIntervals from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotificationIntervals from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "addToCollectionEvent": obj.get("addToCollectionEvent"), + "proposeForCollectionEvent": obj.get("proposeForCollectionEvent"), + "commentEvent": obj.get("commentEvent"), + "inviteEvent": obj.get("inviteEvent"), + "nodeIssueEvent": obj.get("nodeIssueEvent"), + "ratingEvent": obj.get("ratingEvent"), + "workflowEvent": obj.get("workflowEvent"), + "metadataSuggestionEvent": obj.get("metadataSuggestionEvent") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/notification_response_page.py b/edu_sharing_openapi/edu_sharing_client/models/notification_response_page.py new file mode 100644 index 00000000..79d32c19 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/notification_response_page.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.pageable import Pageable +from edu_sharing_client.models.sort import Sort +from typing import Optional, Set +from typing_extensions import Self + +class NotificationResponsePage(BaseModel): + """ + NotificationResponsePage + """ # noqa: E501 + content: Optional[List[NotificationEventDTO]] = None + pageable: Optional[Pageable] = None + total_elements: Optional[StrictInt] = Field(default=None, alias="totalElements") + total_pages: Optional[StrictInt] = Field(default=None, alias="totalPages") + last: Optional[StrictBool] = None + number_of_elements: Optional[StrictInt] = Field(default=None, alias="numberOfElements") + first: Optional[StrictBool] = None + size: Optional[StrictInt] = None + number: Optional[StrictInt] = None + sort: Optional[Sort] = None + empty: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["content", "pageable", "totalElements", "totalPages", "last", "numberOfElements", "first", "size", "number", "sort", "empty"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotificationResponsePage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in content (list) + _items = [] + if self.content: + for _item_content in self.content: + if _item_content: + _items.append(_item_content.to_dict()) + _dict['content'] = _items + # override the default output from pydantic by calling `to_dict()` of pageable + if self.pageable: + _dict['pageable'] = self.pageable.to_dict() + # override the default output from pydantic by calling `to_dict()` of sort + if self.sort: + _dict['sort'] = self.sort.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotificationResponsePage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "content": [NotificationEventDTO.from_dict(_item) for _item in obj["content"]] if obj.get("content") is not None else None, + "pageable": Pageable.from_dict(obj["pageable"]) if obj.get("pageable") is not None else None, + "totalElements": obj.get("totalElements"), + "totalPages": obj.get("totalPages"), + "last": obj.get("last"), + "numberOfElements": obj.get("numberOfElements"), + "first": obj.get("first"), + "size": obj.get("size"), + "number": obj.get("number"), + "sort": Sort.from_dict(obj["sort"]) if obj.get("sort") is not None else None, + "empty": obj.get("empty") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/notify_entry.py b/edu_sharing_openapi/edu_sharing_client/models/notify_entry.py new file mode 100644 index 00000000..6fcb5bc0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/notify_entry.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.acl import ACL +from edu_sharing_client.models.user import User +from typing import Optional, Set +from typing_extensions import Self + +class NotifyEntry(BaseModel): + """ + NotifyEntry + """ # noqa: E501 + var_date: StrictInt = Field(alias="date") + permissions: ACL + user: User + action: StrictStr + __properties: ClassVar[List[str]] = ["date", "permissions", "user", "action"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of NotifyEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of permissions + if self.permissions: + _dict['permissions'] = self.permissions.to_dict() + # override the default output from pydantic by calling `to_dict()` of user + if self.user: + _dict['user'] = self.user.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of NotifyEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "date": obj.get("date"), + "permissions": ACL.from_dict(obj["permissions"]) if obj.get("permissions") is not None else None, + "user": User.from_dict(obj["user"]) if obj.get("user") is not None else None, + "action": obj.get("action") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/open_id_configuration.py b/edu_sharing_openapi/edu_sharing_client/models/open_id_configuration.py new file mode 100644 index 00000000..aa9de575 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/open_id_configuration.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.lti_platform_configuration import LTIPlatformConfiguration +from typing import Optional, Set +from typing_extensions import Self + +class OpenIdConfiguration(BaseModel): + """ + OpenIdConfiguration + """ # noqa: E501 + issuer: Optional[StrictStr] = None + token_endpoint: Optional[StrictStr] = None + token_endpoint_auth_methods_supported: Optional[List[StrictStr]] = None + token_endpoint_auth_signing_alg_values_supported: Optional[List[StrictStr]] = None + jwks_uri: Optional[StrictStr] = None + authorization_endpoint: Optional[StrictStr] = None + registration_endpoint: Optional[StrictStr] = None + scopes_supported: Optional[List[StrictStr]] = None + response_types_supported: Optional[List[StrictStr]] = None + subject_types_supported: Optional[List[StrictStr]] = None + id_token_signing_alg_values_supported: Optional[List[StrictStr]] = None + claims_supported: Optional[List[StrictStr]] = None + https__purl_imsglobal_org_spec_lti_platform_configuration: Optional[LTIPlatformConfiguration] = Field(default=None, alias="https://purl.imsglobal.org/spec/lti-platform-configuration") + __properties: ClassVar[List[str]] = ["issuer", "token_endpoint", "token_endpoint_auth_methods_supported", "token_endpoint_auth_signing_alg_values_supported", "jwks_uri", "authorization_endpoint", "registration_endpoint", "scopes_supported", "response_types_supported", "subject_types_supported", "id_token_signing_alg_values_supported", "claims_supported", "https://purl.imsglobal.org/spec/lti-platform-configuration"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OpenIdConfiguration from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of https__purl_imsglobal_org_spec_lti_platform_configuration + if self.https__purl_imsglobal_org_spec_lti_platform_configuration: + _dict['https://purl.imsglobal.org/spec/lti-platform-configuration'] = self.https__purl_imsglobal_org_spec_lti_platform_configuration.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OpenIdConfiguration from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "issuer": obj.get("issuer"), + "token_endpoint": obj.get("token_endpoint"), + "token_endpoint_auth_methods_supported": obj.get("token_endpoint_auth_methods_supported"), + "token_endpoint_auth_signing_alg_values_supported": obj.get("token_endpoint_auth_signing_alg_values_supported"), + "jwks_uri": obj.get("jwks_uri"), + "authorization_endpoint": obj.get("authorization_endpoint"), + "registration_endpoint": obj.get("registration_endpoint"), + "scopes_supported": obj.get("scopes_supported"), + "response_types_supported": obj.get("response_types_supported"), + "subject_types_supported": obj.get("subject_types_supported"), + "id_token_signing_alg_values_supported": obj.get("id_token_signing_alg_values_supported"), + "claims_supported": obj.get("claims_supported"), + "https://purl.imsglobal.org/spec/lti-platform-configuration": LTIPlatformConfiguration.from_dict(obj["https://purl.imsglobal.org/spec/lti-platform-configuration"]) if obj.get("https://purl.imsglobal.org/spec/lti-platform-configuration") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/open_id_registration_result.py b/edu_sharing_openapi/edu_sharing_client/models/open_id_registration_result.py new file mode 100644 index 00000000..baa4928d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/open_id_registration_result.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.lti_tool_configuration import LTIToolConfiguration +from typing import Optional, Set +from typing_extensions import Self + +class OpenIdRegistrationResult(BaseModel): + """ + OpenIdRegistrationResult + """ # noqa: E501 + client_id: Optional[StrictStr] = None + response_types: Optional[List[StrictStr]] = None + jwks_uri: Optional[StrictStr] = None + initiate_login_uri: Optional[StrictStr] = None + grant_types: Optional[List[StrictStr]] = None + redirect_uris: Optional[List[StrictStr]] = None + application_type: Optional[StrictStr] = None + token_endpoint_auth_method: Optional[StrictStr] = None + client_name: Optional[StrictStr] = None + logo_uri: Optional[StrictStr] = None + scope: Optional[StrictStr] = None + https__purl_imsglobal_org_spec_lti_tool_configuration: Optional[LTIToolConfiguration] = Field(default=None, alias="https://purl.imsglobal.org/spec/lti-tool-configuration") + __properties: ClassVar[List[str]] = ["client_id", "response_types", "jwks_uri", "initiate_login_uri", "grant_types", "redirect_uris", "application_type", "token_endpoint_auth_method", "client_name", "logo_uri", "scope", "https://purl.imsglobal.org/spec/lti-tool-configuration"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OpenIdRegistrationResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of https__purl_imsglobal_org_spec_lti_tool_configuration + if self.https__purl_imsglobal_org_spec_lti_tool_configuration: + _dict['https://purl.imsglobal.org/spec/lti-tool-configuration'] = self.https__purl_imsglobal_org_spec_lti_tool_configuration.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OpenIdRegistrationResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "client_id": obj.get("client_id"), + "response_types": obj.get("response_types"), + "jwks_uri": obj.get("jwks_uri"), + "initiate_login_uri": obj.get("initiate_login_uri"), + "grant_types": obj.get("grant_types"), + "redirect_uris": obj.get("redirect_uris"), + "application_type": obj.get("application_type"), + "token_endpoint_auth_method": obj.get("token_endpoint_auth_method"), + "client_name": obj.get("client_name"), + "logo_uri": obj.get("logo_uri"), + "scope": obj.get("scope"), + "https://purl.imsglobal.org/spec/lti-tool-configuration": LTIToolConfiguration.from_dict(obj["https://purl.imsglobal.org/spec/lti-tool-configuration"]) if obj.get("https://purl.imsglobal.org/spec/lti-tool-configuration") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/organisations_import_result.py b/edu_sharing_openapi/edu_sharing_client/models/organisations_import_result.py new file mode 100644 index 00000000..071b383f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/organisations_import_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class OrganisationsImportResult(BaseModel): + """ + OrganisationsImportResult + """ # noqa: E501 + rows: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OrganisationsImportResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OrganisationsImportResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "rows": obj.get("rows") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/organization.py b/edu_sharing_openapi/edu_sharing_client/models/organization.py new file mode 100644 index 00000000..7d8b987a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/organization.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.group_profile import GroupProfile +from edu_sharing_client.models.node_ref import NodeRef +from typing import Optional, Set +from typing_extensions import Self + +class Organization(BaseModel): + """ + Organization + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + editable: Optional[StrictBool] = None + signup_method: Optional[StrictStr] = Field(default=None, alias="signupMethod") + ref: Optional[NodeRef] = None + aspects: Optional[List[StrictStr]] = None + authority_name: StrictStr = Field(alias="authorityName") + authority_type: Optional[StrictStr] = Field(default=None, alias="authorityType") + group_name: Optional[StrictStr] = Field(default=None, alias="groupName") + profile: Optional[GroupProfile] = None + administration_access: Optional[StrictBool] = Field(default=None, alias="administrationAccess") + shared_folder: Optional[NodeRef] = Field(default=None, alias="sharedFolder") + __properties: ClassVar[List[str]] = ["properties", "editable", "signupMethod", "ref", "aspects", "authorityName", "authorityType", "groupName", "profile", "administrationAccess", "sharedFolder"] + + @field_validator('signup_method') + def signup_method_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['simple', 'password', 'list']): + raise ValueError("must be one of enum values ('simple', 'password', 'list')") + return value + + @field_validator('authority_type') + def authority_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST']): + raise ValueError("must be one of enum values ('USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Organization from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of ref + if self.ref: + _dict['ref'] = self.ref.to_dict() + # override the default output from pydantic by calling `to_dict()` of profile + if self.profile: + _dict['profile'] = self.profile.to_dict() + # override the default output from pydantic by calling `to_dict()` of shared_folder + if self.shared_folder: + _dict['sharedFolder'] = self.shared_folder.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Organization from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "editable": obj.get("editable"), + "signupMethod": obj.get("signupMethod"), + "ref": NodeRef.from_dict(obj["ref"]) if obj.get("ref") is not None else None, + "aspects": obj.get("aspects"), + "authorityName": obj.get("authorityName"), + "authorityType": obj.get("authorityType"), + "groupName": obj.get("groupName"), + "profile": GroupProfile.from_dict(obj["profile"]) if obj.get("profile") is not None else None, + "administrationAccess": obj.get("administrationAccess"), + "sharedFolder": NodeRef.from_dict(obj["sharedFolder"]) if obj.get("sharedFolder") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/organization_entries.py b/edu_sharing_openapi/edu_sharing_client/models/organization_entries.py new file mode 100644 index 00000000..5a6751be --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/organization_entries.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class OrganizationEntries(BaseModel): + """ + OrganizationEntries + """ # noqa: E501 + organizations: List[Organization] + pagination: Pagination + can_create: Optional[StrictBool] = Field(default=None, alias="canCreate") + __properties: ClassVar[List[str]] = ["organizations", "pagination", "canCreate"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of OrganizationEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in organizations (list) + _items = [] + if self.organizations: + for _item_organizations in self.organizations: + if _item_organizations: + _items.append(_item_organizations.to_dict()) + _dict['organizations'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of OrganizationEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "organizations": [Organization.from_dict(_item) for _item in obj["organizations"]] if obj.get("organizations") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "canCreate": obj.get("canCreate") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/pageable.py b/edu_sharing_openapi/edu_sharing_client/models/pageable.py new file mode 100644 index 00000000..d54078c7 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/pageable.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.sort import Sort +from typing import Optional, Set +from typing_extensions import Self + +class Pageable(BaseModel): + """ + Pageable + """ # noqa: E501 + page_number: Optional[StrictInt] = Field(default=None, alias="pageNumber") + unpaged: Optional[StrictBool] = None + offset: Optional[StrictInt] = None + sort: Optional[Sort] = None + paged: Optional[StrictBool] = None + page_size: Optional[StrictInt] = Field(default=None, alias="pageSize") + __properties: ClassVar[List[str]] = ["pageNumber", "unpaged", "offset", "sort", "paged", "pageSize"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Pageable from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of sort + if self.sort: + _dict['sort'] = self.sort.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Pageable from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "pageNumber": obj.get("pageNumber"), + "unpaged": obj.get("unpaged"), + "offset": obj.get("offset"), + "sort": Sort.from_dict(obj["sort"]) if obj.get("sort") is not None else None, + "paged": obj.get("paged"), + "pageSize": obj.get("pageSize") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/pagination.py b/edu_sharing_openapi/edu_sharing_client/models/pagination.py new file mode 100644 index 00000000..c8d6da48 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/pagination.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Pagination(BaseModel): + """ + Pagination + """ # noqa: E501 + total: StrictInt + var_from: StrictInt = Field(alias="from") + count: StrictInt + __properties: ClassVar[List[str]] = ["total", "from", "count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Pagination from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Pagination from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "total": obj.get("total"), + "from": obj.get("from"), + "count": obj.get("count") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/parameters.py b/edu_sharing_openapi/edu_sharing_client/models/parameters.py new file mode 100644 index 00000000..c58744a0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/parameters.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.general import General +from typing import Optional, Set +from typing_extensions import Self + +class Parameters(BaseModel): + """ + Parameters + """ # noqa: E501 + general: Optional[General] = None + __properties: ClassVar[List[str]] = ["general"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Parameters from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of general + if self.general: + _dict['general'] = self.general.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Parameters from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "general": General.from_dict(obj["general"]) if obj.get("general") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/parent_entries.py b/edu_sharing_openapi/edu_sharing_client/models/parent_entries.py new file mode 100644 index 00000000..f55200fc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/parent_entries.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class ParentEntries(BaseModel): + """ + ParentEntries + """ # noqa: E501 + scope: Optional[StrictStr] = None + nodes: List[Node] + pagination: Pagination + __properties: ClassVar[List[str]] = ["scope", "nodes", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ParentEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in nodes (list) + _items = [] + if self.nodes: + for _item_nodes in self.nodes: + if _item_nodes: + _items.append(_item_nodes.to_dict()) + _dict['nodes'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ParentEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "scope": obj.get("scope"), + "nodes": [Node.from_dict(_item) for _item in obj["nodes"]] if obj.get("nodes") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/person.py b/edu_sharing_openapi/edu_sharing_client/models/person.py new file mode 100644 index 00000000..de5e6111 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/person.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.user_profile import UserProfile +from typing import Optional, Set +from typing_extensions import Self + +class Person(BaseModel): + """ + Person + """ # noqa: E501 + profile: Optional[UserProfile] = None + first_name: Optional[StrictStr] = Field(default=None, alias="firstName") + last_name: Optional[StrictStr] = Field(default=None, alias="lastName") + mailbox: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["profile", "firstName", "lastName", "mailbox"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Person from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of profile + if self.profile: + _dict['profile'] = self.profile.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Person from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "profile": UserProfile.from_dict(obj["profile"]) if obj.get("profile") is not None else None, + "firstName": obj.get("firstName"), + "lastName": obj.get("lastName"), + "mailbox": obj.get("mailbox") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/person_delete_options.py b/edu_sharing_openapi/edu_sharing_client/models/person_delete_options.py new file mode 100644 index 00000000..e27f8d0a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/person_delete_options.py @@ -0,0 +1,135 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection_options import CollectionOptions +from edu_sharing_client.models.delete_option import DeleteOption +from edu_sharing_client.models.home_folder_options import HomeFolderOptions +from edu_sharing_client.models.shared_folder_options import SharedFolderOptions +from typing import Optional, Set +from typing_extensions import Self + +class PersonDeleteOptions(BaseModel): + """ + PersonDeleteOptions + """ # noqa: E501 + cleanup_metadata: Optional[StrictBool] = Field(default=None, alias="cleanupMetadata") + home_folder: Optional[HomeFolderOptions] = Field(default=None, alias="homeFolder") + shared_folders: Optional[SharedFolderOptions] = Field(default=None, alias="sharedFolders") + collections: Optional[CollectionOptions] = None + ratings: Optional[DeleteOption] = None + comments: Optional[DeleteOption] = None + collection_feedback: Optional[DeleteOption] = Field(default=None, alias="collectionFeedback") + statistics: Optional[DeleteOption] = None + stream: Optional[DeleteOption] = None + receiver: Optional[StrictStr] = None + receiver_group: Optional[StrictStr] = Field(default=None, alias="receiverGroup") + __properties: ClassVar[List[str]] = ["cleanupMetadata", "homeFolder", "sharedFolders", "collections", "ratings", "comments", "collectionFeedback", "statistics", "stream", "receiver", "receiverGroup"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersonDeleteOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of home_folder + if self.home_folder: + _dict['homeFolder'] = self.home_folder.to_dict() + # override the default output from pydantic by calling `to_dict()` of shared_folders + if self.shared_folders: + _dict['sharedFolders'] = self.shared_folders.to_dict() + # override the default output from pydantic by calling `to_dict()` of collections + if self.collections: + _dict['collections'] = self.collections.to_dict() + # override the default output from pydantic by calling `to_dict()` of ratings + if self.ratings: + _dict['ratings'] = self.ratings.to_dict() + # override the default output from pydantic by calling `to_dict()` of comments + if self.comments: + _dict['comments'] = self.comments.to_dict() + # override the default output from pydantic by calling `to_dict()` of collection_feedback + if self.collection_feedback: + _dict['collectionFeedback'] = self.collection_feedback.to_dict() + # override the default output from pydantic by calling `to_dict()` of statistics + if self.statistics: + _dict['statistics'] = self.statistics.to_dict() + # override the default output from pydantic by calling `to_dict()` of stream + if self.stream: + _dict['stream'] = self.stream.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersonDeleteOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "cleanupMetadata": obj.get("cleanupMetadata"), + "homeFolder": HomeFolderOptions.from_dict(obj["homeFolder"]) if obj.get("homeFolder") is not None else None, + "sharedFolders": SharedFolderOptions.from_dict(obj["sharedFolders"]) if obj.get("sharedFolders") is not None else None, + "collections": CollectionOptions.from_dict(obj["collections"]) if obj.get("collections") is not None else None, + "ratings": DeleteOption.from_dict(obj["ratings"]) if obj.get("ratings") is not None else None, + "comments": DeleteOption.from_dict(obj["comments"]) if obj.get("comments") is not None else None, + "collectionFeedback": DeleteOption.from_dict(obj["collectionFeedback"]) if obj.get("collectionFeedback") is not None else None, + "statistics": DeleteOption.from_dict(obj["statistics"]) if obj.get("statistics") is not None else None, + "stream": DeleteOption.from_dict(obj["stream"]) if obj.get("stream") is not None else None, + "receiver": obj.get("receiver"), + "receiverGroup": obj.get("receiverGroup") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/person_delete_result.py b/edu_sharing_openapi/edu_sharing_client/models/person_delete_result.py new file mode 100644 index 00000000..7c64be56 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/person_delete_result.py @@ -0,0 +1,132 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection_counts import CollectionCounts +from edu_sharing_client.models.counts import Counts +from typing import Optional, Set +from typing_extensions import Self + +class PersonDeleteResult(BaseModel): + """ + PersonDeleteResult + """ # noqa: E501 + authority_name: Optional[StrictStr] = Field(default=None, alias="authorityName") + deleted_name: Optional[StrictStr] = Field(default=None, alias="deletedName") + home_folder: Optional[Dict[str, Counts]] = Field(default=None, alias="homeFolder") + shared_folders: Optional[Dict[str, Counts]] = Field(default=None, alias="sharedFolders") + collections: Optional[CollectionCounts] = None + comments: Optional[StrictInt] = None + ratings: Optional[StrictInt] = None + collection_feedback: Optional[StrictInt] = Field(default=None, alias="collectionFeedback") + stream: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["authorityName", "deletedName", "homeFolder", "sharedFolders", "collections", "comments", "ratings", "collectionFeedback", "stream"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersonDeleteResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each value in home_folder (dict) + _field_dict = {} + if self.home_folder: + for _key_home_folder in self.home_folder: + if self.home_folder[_key_home_folder]: + _field_dict[_key_home_folder] = self.home_folder[_key_home_folder].to_dict() + _dict['homeFolder'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of each value in shared_folders (dict) + _field_dict = {} + if self.shared_folders: + for _key_shared_folders in self.shared_folders: + if self.shared_folders[_key_shared_folders]: + _field_dict[_key_shared_folders] = self.shared_folders[_key_shared_folders].to_dict() + _dict['sharedFolders'] = _field_dict + # override the default output from pydantic by calling `to_dict()` of collections + if self.collections: + _dict['collections'] = self.collections.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersonDeleteResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "authorityName": obj.get("authorityName"), + "deletedName": obj.get("deletedName"), + "homeFolder": dict( + (_k, Counts.from_dict(_v)) + for _k, _v in obj["homeFolder"].items() + ) + if obj.get("homeFolder") is not None + else None, + "sharedFolders": dict( + (_k, Counts.from_dict(_v)) + for _k, _v in obj["sharedFolders"].items() + ) + if obj.get("sharedFolders") is not None + else None, + "collections": CollectionCounts.from_dict(obj["collections"]) if obj.get("collections") is not None else None, + "comments": obj.get("comments"), + "ratings": obj.get("ratings"), + "collectionFeedback": obj.get("collectionFeedback"), + "stream": obj.get("stream") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/person_report.py b/edu_sharing_openapi/edu_sharing_client/models/person_report.py new file mode 100644 index 00000000..ed27a1dc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/person_report.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions +from edu_sharing_client.models.person_delete_result import PersonDeleteResult +from typing import Optional, Set +from typing_extensions import Self + +class PersonReport(BaseModel): + """ + PersonReport + """ # noqa: E501 + options: Optional[PersonDeleteOptions] = None + results: Optional[List[PersonDeleteResult]] = None + __properties: ClassVar[List[str]] = ["options", "results"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PersonReport from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of options + if self.options: + _dict['options'] = self.options.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in results (list) + _items = [] + if self.results: + for _item_results in self.results: + if _item_results: + _items.append(_item_results.to_dict()) + _dict['results'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PersonReport from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "options": PersonDeleteOptions.from_dict(obj["options"]) if obj.get("options") is not None else None, + "results": [PersonDeleteResult.from_dict(_item) for _item in obj["results"]] if obj.get("results") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/plugin_info.py b/edu_sharing_openapi/edu_sharing_client/models/plugin_info.py new file mode 100644 index 00000000..12ef14c6 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/plugin_info.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PluginInfo(BaseModel): + """ + PluginInfo + """ # noqa: E501 + id: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PluginInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PluginInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/plugin_status.py b/edu_sharing_openapi/edu_sharing_client/models/plugin_status.py new file mode 100644 index 00000000..dc9922fa --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/plugin_status.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class PluginStatus(BaseModel): + """ + PluginStatus + """ # noqa: E501 + version: Optional[StrictStr] = None + name: Optional[StrictStr] = None + enabled: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["version", "name", "enabled"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of PluginStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of PluginStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "version": obj.get("version"), + "name": obj.get("name"), + "enabled": obj.get("enabled") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/preferences.py b/edu_sharing_openapi/edu_sharing_client/models/preferences.py new file mode 100644 index 00000000..9e3b20ad --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/preferences.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Preferences(BaseModel): + """ + Preferences + """ # noqa: E501 + preferences: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["preferences"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Preferences from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Preferences from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "preferences": obj.get("preferences") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/preview.py b/edu_sharing_openapi/edu_sharing_client/models/preview.py new file mode 100644 index 00000000..df738eaa --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/preview.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictBytes, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class Preview(BaseModel): + """ + Preview + """ # noqa: E501 + is_icon: StrictBool = Field(alias="isIcon") + is_generated: Optional[StrictBool] = Field(default=None, alias="isGenerated") + type: Optional[StrictStr] = None + mimetype: Optional[StrictStr] = None + data: Optional[Union[StrictBytes, StrictStr]] = None + url: StrictStr + width: StrictInt + height: StrictInt + __properties: ClassVar[List[str]] = ["isIcon", "isGenerated", "type", "mimetype", "data", "url", "width", "height"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Preview from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Preview from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "isIcon": obj.get("isIcon"), + "isGenerated": obj.get("isGenerated"), + "type": obj.get("type"), + "mimetype": obj.get("mimetype"), + "data": obj.get("data"), + "url": obj.get("url"), + "width": obj.get("width"), + "height": obj.get("height") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/profile.py b/edu_sharing_openapi/edu_sharing_client/models/profile.py new file mode 100644 index 00000000..cccfbc1f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/profile.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension +from typing import Optional, Set +from typing_extensions import Self + +class Profile(BaseModel): + """ + Profile + """ # noqa: E501 + group_email: Optional[StrictStr] = Field(default=None, alias="groupEmail") + mediacenter: Optional[MediacenterProfileExtension] = None + display_name: Optional[StrictStr] = Field(default=None, alias="displayName") + group_type: Optional[StrictStr] = Field(default=None, alias="groupType") + scope_type: Optional[StrictStr] = Field(default=None, alias="scopeType") + __properties: ClassVar[List[str]] = ["groupEmail", "mediacenter", "displayName", "groupType", "scopeType"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Profile from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of mediacenter + if self.mediacenter: + _dict['mediacenter'] = self.mediacenter.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Profile from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "groupEmail": obj.get("groupEmail"), + "mediacenter": MediacenterProfileExtension.from_dict(obj["mediacenter"]) if obj.get("mediacenter") is not None else None, + "displayName": obj.get("displayName"), + "groupType": obj.get("groupType"), + "scopeType": obj.get("scopeType") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/profile_settings.py b/edu_sharing_openapi/edu_sharing_client/models/profile_settings.py new file mode 100644 index 00000000..76ffde99 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/profile_settings.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class ProfileSettings(BaseModel): + """ + ProfileSettings + """ # noqa: E501 + show_email: StrictBool = Field(description="false", alias="showEmail") + __properties: ClassVar[List[str]] = ["showEmail"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ProfileSettings from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ProfileSettings from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "showEmail": obj.get("showEmail") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/propose_for_collection_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/propose_for_collection_event_dto.py new file mode 100644 index 00000000..9ccdaddc --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/propose_for_collection_event_dto.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection_dto import CollectionDTO +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class ProposeForCollectionEventDTO(NotificationEventDTO): + """ + ProposeForCollectionEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + collection: Optional[CollectionDTO] = None + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "collection"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ProposeForCollectionEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of collection + if self.collection: + _dict['collection'] = self.collection.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ProposeForCollectionEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "collection": CollectionDTO.from_dict(obj["collection"]) if obj.get("collection") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/provider.py b/edu_sharing_openapi/edu_sharing_client/models/provider.py new file mode 100644 index 00000000..33c2d48e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/provider.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.location import Location +from typing import Optional, Set +from typing_extensions import Self + +class Provider(BaseModel): + """ + Provider + """ # noqa: E501 + legal_name: Optional[StrictStr] = Field(default=None, alias="legalName") + url: Optional[StrictStr] = None + email: Optional[StrictStr] = None + area_served: Optional[StrictStr] = Field(default=None, alias="areaServed") + location: Optional[Location] = None + __properties: ClassVar[List[str]] = ["legalName", "url", "email", "areaServed", "location"] + + @field_validator('area_served') + def area_served_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['Organization', 'City', 'State', 'Country', 'Continent', 'World']): + raise ValueError("must be one of enum values ('Organization', 'City', 'State', 'Country', 'Continent', 'World')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Provider from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of location + if self.location: + _dict['location'] = self.location.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Provider from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "legalName": obj.get("legalName"), + "url": obj.get("url"), + "email": obj.get("email"), + "areaServed": obj.get("areaServed"), + "location": Location.from_dict(obj["location"]) if obj.get("location") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/query.py b/edu_sharing_openapi/edu_sharing_client/models/query.py new file mode 100644 index 00000000..1d79117b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/query.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.condition import Condition +from typing import Optional, Set +from typing_extensions import Self + +class Query(BaseModel): + """ + Query + """ # noqa: E501 + condition: Optional[Condition] = None + query: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["condition", "query"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Query from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of condition + if self.condition: + _dict['condition'] = self.condition.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Query from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "condition": Condition.from_dict(obj["condition"]) if obj.get("condition") is not None else None, + "query": obj.get("query") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rating_data.py b/edu_sharing_openapi/edu_sharing_client/models/rating_data.py new file mode 100644 index 00000000..fe102897 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rating_data.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class RatingData(BaseModel): + """ + RatingData + """ # noqa: E501 + sum: Optional[Union[StrictFloat, StrictInt]] = None + count: Optional[StrictInt] = None + rating: Optional[Union[StrictFloat, StrictInt]] = None + __properties: ClassVar[List[str]] = ["sum", "count", "rating"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RatingData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RatingData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "sum": obj.get("sum"), + "count": obj.get("count"), + "rating": obj.get("rating") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rating_details.py b/edu_sharing_openapi/edu_sharing_client/models/rating_details.py new file mode 100644 index 00000000..2315df2a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rating_details.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Optional, Union +from edu_sharing_client.models.rating_data import RatingData +from typing import Optional, Set +from typing_extensions import Self + +class RatingDetails(BaseModel): + """ + RatingDetails + """ # noqa: E501 + overall: Optional[RatingData] = None + affiliation: Optional[Dict[str, RatingData]] = None + user: Optional[Union[StrictFloat, StrictInt]] = None + __properties: ClassVar[List[str]] = ["overall", "affiliation", "user"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RatingDetails from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of overall + if self.overall: + _dict['overall'] = self.overall.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in affiliation (dict) + _field_dict = {} + if self.affiliation: + for _key_affiliation in self.affiliation: + if self.affiliation[_key_affiliation]: + _field_dict[_key_affiliation] = self.affiliation[_key_affiliation].to_dict() + _dict['affiliation'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RatingDetails from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "overall": RatingData.from_dict(obj["overall"]) if obj.get("overall") is not None else None, + "affiliation": dict( + (_k, RatingData.from_dict(_v)) + for _k, _v in obj["affiliation"].items() + ) + if obj.get("affiliation") is not None + else None, + "user": obj.get("user") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rating_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/rating_event_dto.py new file mode 100644 index 00000000..d8b5d79b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rating_event_dto.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict, Field, StrictFloat, StrictInt +from typing import Any, ClassVar, Dict, List, Optional, Union +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class RatingEventDTO(NotificationEventDTO): + """ + RatingEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + new_rating: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, alias="newRating") + rating_sum: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, alias="ratingSum") + rating_count: Optional[StrictInt] = Field(default=None, alias="ratingCount") + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "newRating", "ratingSum", "ratingCount"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RatingEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RatingEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "newRating": obj.get("newRating"), + "ratingSum": obj.get("ratingSum"), + "ratingCount": obj.get("ratingCount") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rating_history.py b/edu_sharing_openapi/edu_sharing_client/models/rating_history.py new file mode 100644 index 00000000..9b5b886a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rating_history.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.rating_data import RatingData +from typing import Optional, Set +from typing_extensions import Self + +class RatingHistory(BaseModel): + """ + RatingHistory + """ # noqa: E501 + overall: Optional[RatingData] = None + affiliation: Optional[Dict[str, RatingData]] = None + timestamp: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["overall", "affiliation", "timestamp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RatingHistory from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of overall + if self.overall: + _dict['overall'] = self.overall.to_dict() + # override the default output from pydantic by calling `to_dict()` of each value in affiliation (dict) + _field_dict = {} + if self.affiliation: + for _key_affiliation in self.affiliation: + if self.affiliation[_key_affiliation]: + _field_dict[_key_affiliation] = self.affiliation[_key_affiliation].to_dict() + _dict['affiliation'] = _field_dict + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RatingHistory from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "overall": RatingData.from_dict(obj["overall"]) if obj.get("overall") is not None else None, + "affiliation": dict( + (_k, RatingData.from_dict(_v)) + for _k, _v in obj["affiliation"].items() + ) + if obj.get("affiliation") is not None + else None, + "timestamp": obj.get("timestamp") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/reference_entries.py b/edu_sharing_openapi/edu_sharing_client/models/reference_entries.py new file mode 100644 index 00000000..ee5e7596 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/reference_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.collection_reference import CollectionReference +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class ReferenceEntries(BaseModel): + """ + ReferenceEntries + """ # noqa: E501 + pagination: Optional[Pagination] = None + references: List[CollectionReference] + __properties: ClassVar[List[str]] = ["pagination", "references"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ReferenceEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in references (list) + _items = [] + if self.references: + for _item_references in self.references: + if _item_references: + _items.append(_item_references.to_dict()) + _dict['references'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ReferenceEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "references": [CollectionReference.from_dict(_item) for _item in obj["references"]] if obj.get("references") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/register.py b/edu_sharing_openapi/edu_sharing_client/models/register.py new file mode 100644 index 00000000..e4b8469f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/register.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Register(BaseModel): + """ + Register + """ # noqa: E501 + local: Optional[StrictBool] = None + recover_password: Optional[StrictBool] = Field(default=None, alias="recoverPassword") + login_url: Optional[StrictStr] = Field(default=None, alias="loginUrl") + recover_url: Optional[StrictStr] = Field(default=None, alias="recoverUrl") + required_fields: Optional[List[StrictStr]] = Field(default=None, alias="requiredFields") + __properties: ClassVar[List[str]] = ["local", "recoverPassword", "loginUrl", "recoverUrl", "requiredFields"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Register from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Register from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "local": obj.get("local"), + "recoverPassword": obj.get("recoverPassword"), + "loginUrl": obj.get("loginUrl"), + "recoverUrl": obj.get("recoverUrl"), + "requiredFields": obj.get("requiredFields") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/register_exists.py b/edu_sharing_openapi/edu_sharing_client/models/register_exists.py new file mode 100644 index 00000000..072178d5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/register_exists.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RegisterExists(BaseModel): + """ + RegisterExists + """ # noqa: E501 + exists: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["exists"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RegisterExists from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RegisterExists from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "exists": obj.get("exists") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/register_information.py b/edu_sharing_openapi/edu_sharing_client/models/register_information.py new file mode 100644 index 00000000..bdf2a6a7 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/register_information.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RegisterInformation(BaseModel): + """ + RegisterInformation + """ # noqa: E501 + vcard: Optional[StrictStr] = None + first_name: Optional[StrictStr] = Field(default=None, alias="firstName") + last_name: Optional[StrictStr] = Field(default=None, alias="lastName") + email: Optional[StrictStr] = None + password: Optional[StrictStr] = None + organization: Optional[StrictStr] = None + allow_notifications: Optional[StrictBool] = Field(default=None, alias="allowNotifications") + authority_name: Optional[StrictStr] = Field(default=None, alias="authorityName") + __properties: ClassVar[List[str]] = ["vcard", "firstName", "lastName", "email", "password", "organization", "allowNotifications", "authorityName"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RegisterInformation from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RegisterInformation from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "vcard": obj.get("vcard"), + "firstName": obj.get("firstName"), + "lastName": obj.get("lastName"), + "email": obj.get("email"), + "password": obj.get("password"), + "organization": obj.get("organization"), + "allowNotifications": obj.get("allowNotifications"), + "authorityName": obj.get("authorityName") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/registration_url.py b/edu_sharing_openapi/edu_sharing_client/models/registration_url.py new file mode 100644 index 00000000..84493c90 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/registration_url.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RegistrationUrl(BaseModel): + """ + RegistrationUrl + """ # noqa: E501 + url: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["url"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RegistrationUrl from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RegistrationUrl from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/relation_data.py b/edu_sharing_openapi/edu_sharing_client/models/relation_data.py new file mode 100644 index 00000000..3448bb67 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/relation_data.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.user import User +from typing import Optional, Set +from typing_extensions import Self + +class RelationData(BaseModel): + """ + RelationData + """ # noqa: E501 + node: Optional[Node] = None + creator: Optional[User] = None + timestamp: Optional[datetime] = None + type: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["node", "creator", "timestamp", "type"] + + @field_validator('type') + def type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['isPartOf', 'isBasedOn', 'references', 'hasPart', 'isBasisFor']): + raise ValueError("must be one of enum values ('isPartOf', 'isBasedOn', 'references', 'hasPart', 'isBasisFor')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RelationData from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RelationData from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None, + "creator": User.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "timestamp": obj.get("timestamp"), + "type": obj.get("type") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/remote.py b/edu_sharing_openapi/edu_sharing_client/models/remote.py new file mode 100644 index 00000000..76877157 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/remote.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.repo import Repo +from typing import Optional, Set +from typing_extensions import Self + +class Remote(BaseModel): + """ + Remote + """ # noqa: E501 + repository: Optional[Repo] = None + id: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["repository", "id"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Remote from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of repository + if self.repository: + _dict['repository'] = self.repository.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Remote from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repository": Repo.from_dict(obj["repository"]) if obj.get("repository") is not None else None, + "id": obj.get("id") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/remote_auth_description.py b/edu_sharing_openapi/edu_sharing_client/models/remote_auth_description.py new file mode 100644 index 00000000..8dcb18ad --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/remote_auth_description.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RemoteAuthDescription(BaseModel): + """ + RemoteAuthDescription + """ # noqa: E501 + url: Optional[StrictStr] = None + token: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["url", "token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RemoteAuthDescription from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RemoteAuthDescription from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "url": obj.get("url"), + "token": obj.get("token") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rendering.py b/edu_sharing_openapi/edu_sharing_client/models/rendering.py new file mode 100644 index 00000000..627c4b9d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rendering.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.rendering_gdpr import RenderingGdpr +from typing import Optional, Set +from typing_extensions import Self + +class Rendering(BaseModel): + """ + Rendering + """ # noqa: E501 + show_preview: Optional[StrictBool] = Field(default=None, alias="showPreview") + show_download_button: Optional[StrictBool] = Field(default=None, alias="showDownloadButton") + prerender: Optional[StrictBool] = None + gdpr: Optional[List[RenderingGdpr]] = None + __properties: ClassVar[List[str]] = ["showPreview", "showDownloadButton", "prerender", "gdpr"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Rendering from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in gdpr (list) + _items = [] + if self.gdpr: + for _item_gdpr in self.gdpr: + if _item_gdpr: + _items.append(_item_gdpr.to_dict()) + _dict['gdpr'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Rendering from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "showPreview": obj.get("showPreview"), + "showDownloadButton": obj.get("showDownloadButton"), + "prerender": obj.get("prerender"), + "gdpr": [RenderingGdpr.from_dict(_item) for _item in obj["gdpr"]] if obj.get("gdpr") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rendering_details_entry.py b/edu_sharing_openapi/edu_sharing_client/models/rendering_details_entry.py new file mode 100644 index 00000000..5a59a72d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rendering_details_entry.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class RenderingDetailsEntry(BaseModel): + """ + RenderingDetailsEntry + """ # noqa: E501 + details_snippet: StrictStr = Field(alias="detailsSnippet") + mime_type: StrictStr = Field(alias="mimeType") + node: Node + __properties: ClassVar[List[str]] = ["detailsSnippet", "mimeType", "node"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RenderingDetailsEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RenderingDetailsEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "detailsSnippet": obj.get("detailsSnippet"), + "mimeType": obj.get("mimeType"), + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/rendering_gdpr.py b/edu_sharing_openapi/edu_sharing_client/models/rendering_gdpr.py new file mode 100644 index 00000000..ebb2f120 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/rendering_gdpr.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class RenderingGdpr(BaseModel): + """ + RenderingGdpr + """ # noqa: E501 + matcher: Optional[StrictStr] = None + name: Optional[StrictStr] = None + privacy_information_url: Optional[StrictStr] = Field(default=None, alias="privacyInformationUrl") + __properties: ClassVar[List[str]] = ["matcher", "name", "privacyInformationUrl"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RenderingGdpr from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RenderingGdpr from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "matcher": obj.get("matcher"), + "name": obj.get("name"), + "privacyInformationUrl": obj.get("privacyInformationUrl") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/repo.py b/edu_sharing_openapi/edu_sharing_client/models/repo.py new file mode 100644 index 00000000..eac5a439 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/repo.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Repo(BaseModel): + """ + Repo + """ # noqa: E501 + repository_type: Optional[StrictStr] = Field(default=None, alias="repositoryType") + rendering_supported: Optional[StrictBool] = Field(default=None, alias="renderingSupported") + id: Optional[StrictStr] = None + title: Optional[StrictStr] = None + icon: Optional[StrictStr] = None + logo: Optional[StrictStr] = None + is_home_repo: Optional[StrictBool] = Field(default=None, alias="isHomeRepo") + __properties: ClassVar[List[str]] = ["repositoryType", "renderingSupported", "id", "title", "icon", "logo", "isHomeRepo"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Repo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Repo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repositoryType": obj.get("repositoryType"), + "renderingSupported": obj.get("renderingSupported"), + "id": obj.get("id"), + "title": obj.get("title"), + "icon": obj.get("icon"), + "logo": obj.get("logo"), + "isHomeRepo": obj.get("isHomeRepo") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/repo_entries.py b/edu_sharing_openapi/edu_sharing_client/models/repo_entries.py new file mode 100644 index 00000000..9dd245ce --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/repo_entries.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.repo import Repo +from typing import Optional, Set +from typing_extensions import Self + +class RepoEntries(BaseModel): + """ + RepoEntries + """ # noqa: E501 + repositories: List[Repo] + __properties: ClassVar[List[str]] = ["repositories"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RepoEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in repositories (list) + _items = [] + if self.repositories: + for _item_repositories in self.repositories: + if _item_repositories: + _items.append(_item_repositories.to_dict()) + _dict['repositories'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RepoEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repositories": [Repo.from_dict(_item) for _item in obj["repositories"]] if obj.get("repositories") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/repository_config.py b/edu_sharing_openapi/edu_sharing_client/models/repository_config.py new file mode 100644 index 00000000..fedcc352 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/repository_config.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.frontpage import Frontpage +from typing import Optional, Set +from typing_extensions import Self + +class RepositoryConfig(BaseModel): + """ + RepositoryConfig + """ # noqa: E501 + frontpage: Optional[Frontpage] = None + __properties: ClassVar[List[str]] = ["frontpage"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RepositoryConfig from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of frontpage + if self.frontpage: + _dict['frontpage'] = self.frontpage.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RepositoryConfig from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "frontpage": Frontpage.from_dict(obj["frontpage"]) if obj.get("frontpage") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/repository_version_info.py b/edu_sharing_openapi/edu_sharing_client/models/repository_version_info.py new file mode 100644 index 00000000..6e997843 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/repository_version_info.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.version import Version +from edu_sharing_client.models.version_build import VersionBuild +from edu_sharing_client.models.version_git import VersionGit +from edu_sharing_client.models.version_maven import VersionMaven +from typing import Optional, Set +from typing_extensions import Self + +class RepositoryVersionInfo(BaseModel): + """ + RepositoryVersionInfo + """ # noqa: E501 + version: Optional[Version] = None + maven: Optional[VersionMaven] = None + git: Optional[VersionGit] = None + build: Optional[VersionBuild] = None + __properties: ClassVar[List[str]] = ["version", "maven", "git", "build"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RepositoryVersionInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of version + if self.version: + _dict['version'] = self.version.to_dict() + # override the default output from pydantic by calling `to_dict()` of maven + if self.maven: + _dict['maven'] = self.maven.to_dict() + # override the default output from pydantic by calling `to_dict()` of git + if self.git: + _dict['git'] = self.git.to_dict() + # override the default output from pydantic by calling `to_dict()` of build + if self.build: + _dict['build'] = self.build.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RepositoryVersionInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "version": Version.from_dict(obj["version"]) if obj.get("version") is not None else None, + "maven": VersionMaven.from_dict(obj["maven"]) if obj.get("maven") is not None else None, + "git": VersionGit.from_dict(obj["git"]) if obj.get("git") is not None else None, + "build": VersionBuild.from_dict(obj["build"]) if obj.get("build") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/restore_result.py b/edu_sharing_openapi/edu_sharing_client/models/restore_result.py new file mode 100644 index 00000000..30b870b1 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/restore_result.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class RestoreResult(BaseModel): + """ + RestoreResult + """ # noqa: E501 + archive_node_id: StrictStr = Field(alias="archiveNodeId") + node_id: StrictStr = Field(alias="nodeId") + parent: StrictStr + path: StrictStr + name: StrictStr + restore_status: StrictStr = Field(alias="restoreStatus") + __properties: ClassVar[List[str]] = ["archiveNodeId", "nodeId", "parent", "path", "name", "restoreStatus"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RestoreResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RestoreResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "archiveNodeId": obj.get("archiveNodeId"), + "nodeId": obj.get("nodeId"), + "parent": obj.get("parent"), + "path": obj.get("path"), + "name": obj.get("name"), + "restoreStatus": obj.get("restoreStatus") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/restore_results.py b/edu_sharing_openapi/edu_sharing_client/models/restore_results.py new file mode 100644 index 00000000..eba0914e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/restore_results.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.restore_result import RestoreResult +from typing import Optional, Set +from typing_extensions import Self + +class RestoreResults(BaseModel): + """ + RestoreResults + """ # noqa: E501 + results: List[RestoreResult] + __properties: ClassVar[List[str]] = ["results"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of RestoreResults from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in results (list) + _items = [] + if self.results: + for _item_results in self.results: + if _item_results: + _items.append(_item_results.to_dict()) + _dict['results'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of RestoreResults from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "results": [RestoreResult.from_dict(_item) for _item in obj["results"]] if obj.get("results") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_parameters.py b/edu_sharing_openapi/edu_sharing_client/models/search_parameters.py new file mode 100644 index 00000000..66295fd4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_parameters.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from typing import Optional, Set +from typing_extensions import Self + +class SearchParameters(BaseModel): + """ + SearchParameters + """ # noqa: E501 + permissions: Optional[List[StrictStr]] = None + resolve_collections: Optional[StrictBool] = Field(default=None, alias="resolveCollections") + resolve_usernames: Optional[StrictBool] = Field(default=None, alias="resolveUsernames") + return_suggestions: Optional[StrictBool] = Field(default=None, alias="returnSuggestions") + excludes: Optional[List[StrictStr]] = None + facets: Optional[List[StrictStr]] = None + facet_min_count: Optional[StrictInt] = Field(default=5, alias="facetMinCount") + facet_limit: Optional[StrictInt] = Field(default=10, alias="facetLimit") + facet_suggest: Optional[StrictStr] = Field(default=None, alias="facetSuggest") + criteria: List[MdsQueryCriteria] + __properties: ClassVar[List[str]] = ["permissions", "resolveCollections", "resolveUsernames", "returnSuggestions", "excludes", "facets", "facetMinCount", "facetLimit", "facetSuggest", "criteria"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchParameters from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in criteria (list) + _items = [] + if self.criteria: + for _item_criteria in self.criteria: + if _item_criteria: + _items.append(_item_criteria.to_dict()) + _dict['criteria'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchParameters from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "permissions": obj.get("permissions"), + "resolveCollections": obj.get("resolveCollections"), + "resolveUsernames": obj.get("resolveUsernames"), + "returnSuggestions": obj.get("returnSuggestions"), + "excludes": obj.get("excludes"), + "facets": obj.get("facets"), + "facetMinCount": obj.get("facetMinCount") if obj.get("facetMinCount") is not None else 5, + "facetLimit": obj.get("facetLimit") if obj.get("facetLimit") is not None else 10, + "facetSuggest": obj.get("facetSuggest"), + "criteria": [MdsQueryCriteria.from_dict(_item) for _item in obj["criteria"]] if obj.get("criteria") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_parameters_facets.py b/edu_sharing_openapi/edu_sharing_client/models/search_parameters_facets.py new file mode 100644 index 00000000..b9c44572 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_parameters_facets.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from typing import Optional, Set +from typing_extensions import Self + +class SearchParametersFacets(BaseModel): + """ + SearchParametersFacets + """ # noqa: E501 + facets: List[StrictStr] + facet_min_count: Optional[StrictInt] = Field(default=5, alias="facetMinCount") + facet_limit: Optional[StrictInt] = Field(default=10, alias="facetLimit") + facet_suggest: Optional[StrictStr] = Field(default=None, alias="facetSuggest") + criteria: List[MdsQueryCriteria] + __properties: ClassVar[List[str]] = ["facets", "facetMinCount", "facetLimit", "facetSuggest", "criteria"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchParametersFacets from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in criteria (list) + _items = [] + if self.criteria: + for _item_criteria in self.criteria: + if _item_criteria: + _items.append(_item_criteria.to_dict()) + _dict['criteria'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchParametersFacets from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "facets": obj.get("facets"), + "facetMinCount": obj.get("facetMinCount") if obj.get("facetMinCount") is not None else 5, + "facetLimit": obj.get("facetLimit") if obj.get("facetLimit") is not None else 10, + "facetSuggest": obj.get("facetSuggest"), + "criteria": [MdsQueryCriteria.from_dict(_item) for _item in obj["criteria"]] if obj.get("criteria") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_result.py b/edu_sharing_openapi/edu_sharing_client/models/search_result.py new file mode 100644 index 00000000..dfe39346 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_result.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.facet import Facet +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.pagination import Pagination +from typing import Optional, Set +from typing_extensions import Self + +class SearchResult(BaseModel): + """ + SearchResult + """ # noqa: E501 + nodes: List[Node] + pagination: Pagination + facets: List[Facet] + __properties: ClassVar[List[str]] = ["nodes", "pagination", "facets"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in nodes (list) + _items = [] + if self.nodes: + for _item_nodes in self.nodes: + if _item_nodes: + _items.append(_item_nodes.to_dict()) + _dict['nodes'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in facets (list) + _items = [] + if self.facets: + for _item_facets in self.facets: + if _item_facets: + _items.append(_item_facets.to_dict()) + _dict['facets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodes": [Node.from_dict(_item) for _item in obj["nodes"]] if obj.get("nodes") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "facets": [Facet.from_dict(_item) for _item in obj["facets"]] if obj.get("facets") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_result_elastic.py b/edu_sharing_openapi/edu_sharing_client/models/search_result_elastic.py new file mode 100644 index 00000000..e62d2204 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_result_elastic.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.facet import Facet +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.suggest import Suggest +from typing import Optional, Set +from typing_extensions import Self + +class SearchResultElastic(BaseModel): + """ + SearchResultElastic + """ # noqa: E501 + suggests: Optional[List[Suggest]] = None + elastic_response: Optional[StrictStr] = Field(default=None, alias="elasticResponse") + nodes: List[Dict[str, Any]] + pagination: Pagination + facets: List[Facet] + ignored: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["suggests", "elasticResponse", "nodes", "pagination", "facets", "ignored"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchResultElastic from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in suggests (list) + _items = [] + if self.suggests: + for _item_suggests in self.suggests: + if _item_suggests: + _items.append(_item_suggests.to_dict()) + _dict['suggests'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in facets (list) + _items = [] + if self.facets: + for _item_facets in self.facets: + if _item_facets: + _items.append(_item_facets.to_dict()) + _dict['facets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchResultElastic from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "suggests": [Suggest.from_dict(_item) for _item in obj["suggests"]] if obj.get("suggests") is not None else None, + "elasticResponse": obj.get("elasticResponse"), + "nodes": obj.get("nodes"), + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "facets": [Facet.from_dict(_item) for _item in obj["facets"]] if obj.get("facets") is not None else None, + "ignored": obj.get("ignored") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_result_lrmi.py b/edu_sharing_openapi/edu_sharing_client/models/search_result_lrmi.py new file mode 100644 index 00000000..4adac88c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_result_lrmi.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.facet import Facet +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.suggest import Suggest +from typing import Optional, Set +from typing_extensions import Self + +class SearchResultLrmi(BaseModel): + """ + SearchResultLrmi + """ # noqa: E501 + suggests: Optional[List[Suggest]] = None + nodes: List[StrictStr] + pagination: Pagination + facets: List[Facet] + ignored: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["suggests", "nodes", "pagination", "facets", "ignored"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchResultLrmi from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in suggests (list) + _items = [] + if self.suggests: + for _item_suggests in self.suggests: + if _item_suggests: + _items.append(_item_suggests.to_dict()) + _dict['suggests'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in facets (list) + _items = [] + if self.facets: + for _item_facets in self.facets: + if _item_facets: + _items.append(_item_facets.to_dict()) + _dict['facets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchResultLrmi from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "suggests": [Suggest.from_dict(_item) for _item in obj["suggests"]] if obj.get("suggests") is not None else None, + "nodes": obj.get("nodes"), + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "facets": [Facet.from_dict(_item) for _item in obj["facets"]] if obj.get("facets") is not None else None, + "ignored": obj.get("ignored") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_result_node.py b/edu_sharing_openapi/edu_sharing_client/models/search_result_node.py new file mode 100644 index 00000000..18e10c4e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_result_node.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.facet import Facet +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.suggest import Suggest +from typing import Optional, Set +from typing_extensions import Self + +class SearchResultNode(BaseModel): + """ + SearchResultNode + """ # noqa: E501 + suggests: Optional[List[Suggest]] = None + nodes: List[Node] + pagination: Pagination + facets: List[Facet] + ignored: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["suggests", "nodes", "pagination", "facets", "ignored"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchResultNode from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in suggests (list) + _items = [] + if self.suggests: + for _item_suggests in self.suggests: + if _item_suggests: + _items.append(_item_suggests.to_dict()) + _dict['suggests'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in nodes (list) + _items = [] + if self.nodes: + for _item_nodes in self.nodes: + if _item_nodes: + _items.append(_item_nodes.to_dict()) + _dict['nodes'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in facets (list) + _items = [] + if self.facets: + for _item_facets in self.facets: + if _item_facets: + _items.append(_item_facets.to_dict()) + _dict['facets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchResultNode from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "suggests": [Suggest.from_dict(_item) for _item in obj["suggests"]] if obj.get("suggests") is not None else None, + "nodes": [Node.from_dict(_item) for _item in obj["nodes"]] if obj.get("nodes") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None, + "facets": [Facet.from_dict(_item) for _item in obj["facets"]] if obj.get("facets") is not None else None, + "ignored": obj.get("ignored") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/search_v_card.py b/edu_sharing_openapi/edu_sharing_client/models/search_v_card.py new file mode 100644 index 00000000..3e0db7fe --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/search_v_card.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SearchVCard(BaseModel): + """ + SearchVCard + """ # noqa: E501 + vcard: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["vcard"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SearchVCard from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SearchVCard from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "vcard": obj.get("vcard") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/server_update_info.py b/edu_sharing_openapi/edu_sharing_client/models/server_update_info.py new file mode 100644 index 00000000..8f9d9b0b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/server_update_info.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ServerUpdateInfo(BaseModel): + """ + ServerUpdateInfo + """ # noqa: E501 + id: Optional[StrictStr] = None + description: Optional[StrictStr] = None + order: Optional[StrictInt] = None + auto: Optional[StrictBool] = None + testable: Optional[StrictBool] = None + executed_at: Optional[StrictInt] = Field(default=None, alias="executedAt") + __properties: ClassVar[List[str]] = ["id", "description", "order", "auto", "testable", "executedAt"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ServerUpdateInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ServerUpdateInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "description": obj.get("description"), + "order": obj.get("order"), + "auto": obj.get("auto"), + "testable": obj.get("testable"), + "executedAt": obj.get("executedAt") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/service.py b/edu_sharing_openapi/edu_sharing_client/models/service.py new file mode 100644 index 00000000..2aaccc47 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/service.py @@ -0,0 +1,131 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.audience import Audience +from edu_sharing_client.models.interface import Interface +from edu_sharing_client.models.provider import Provider +from typing import Optional, Set +from typing_extensions import Self + +class Service(BaseModel): + """ + Service + """ # noqa: E501 + name: Optional[StrictStr] = None + url: Optional[StrictStr] = None + icon: Optional[StrictStr] = None + logo: Optional[StrictStr] = None + in_language: Optional[StrictStr] = Field(default=None, alias="inLanguage") + type: Optional[StrictStr] = None + description: Optional[StrictStr] = None + audience: Optional[List[Audience]] = None + provider: Optional[Provider] = None + start_date: Optional[StrictStr] = Field(default=None, alias="startDate") + interfaces: Optional[List[Interface]] = None + about: Optional[List[StrictStr]] = None + is_accessible_for_free: Optional[StrictBool] = Field(default=None, alias="isAccessibleForFree") + __properties: ClassVar[List[str]] = ["name", "url", "icon", "logo", "inLanguage", "type", "description", "audience", "provider", "startDate", "interfaces", "about", "isAccessibleForFree"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Service from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in audience (list) + _items = [] + if self.audience: + for _item_audience in self.audience: + if _item_audience: + _items.append(_item_audience.to_dict()) + _dict['audience'] = _items + # override the default output from pydantic by calling `to_dict()` of provider + if self.provider: + _dict['provider'] = self.provider.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in interfaces (list) + _items = [] + if self.interfaces: + for _item_interfaces in self.interfaces: + if _item_interfaces: + _items.append(_item_interfaces.to_dict()) + _dict['interfaces'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Service from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "url": obj.get("url"), + "icon": obj.get("icon"), + "logo": obj.get("logo"), + "inLanguage": obj.get("inLanguage"), + "type": obj.get("type"), + "description": obj.get("description"), + "audience": [Audience.from_dict(_item) for _item in obj["audience"]] if obj.get("audience") is not None else None, + "provider": Provider.from_dict(obj["provider"]) if obj.get("provider") is not None else None, + "startDate": obj.get("startDate"), + "interfaces": [Interface.from_dict(_item) for _item in obj["interfaces"]] if obj.get("interfaces") is not None else None, + "about": obj.get("about"), + "isAccessibleForFree": obj.get("isAccessibleForFree") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/service_instance.py b/edu_sharing_openapi/edu_sharing_client/models/service_instance.py new file mode 100644 index 00000000..af92c738 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/service_instance.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.service_version import ServiceVersion +from typing import Optional, Set +from typing_extensions import Self + +class ServiceInstance(BaseModel): + """ + ServiceInstance + """ # noqa: E501 + version: ServiceVersion + endpoint: StrictStr + __properties: ClassVar[List[str]] = ["version", "endpoint"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ServiceInstance from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of version + if self.version: + _dict['version'] = self.version.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ServiceInstance from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "version": ServiceVersion.from_dict(obj["version"]) if obj.get("version") is not None else None, + "endpoint": obj.get("endpoint") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/service_version.py b/edu_sharing_openapi/edu_sharing_client/models/service_version.py new file mode 100644 index 00000000..d48b6888 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/service_version.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class ServiceVersion(BaseModel): + """ + ServiceVersion + """ # noqa: E501 + repository: Optional[StrictStr] = None + renderservice: Optional[StrictStr] = None + major: StrictInt + minor: StrictInt + __properties: ClassVar[List[str]] = ["repository", "renderservice", "major", "minor"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ServiceVersion from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ServiceVersion from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "repository": obj.get("repository"), + "renderservice": obj.get("renderservice"), + "major": obj.get("major"), + "minor": obj.get("minor") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/services.py b/edu_sharing_openapi/edu_sharing_client/models/services.py new file mode 100644 index 00000000..65d3ceff --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/services.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Services(BaseModel): + """ + Services + """ # noqa: E501 + visualization: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["visualization"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Services from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Services from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "visualization": obj.get("visualization") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/shared_folder_options.py b/edu_sharing_openapi/edu_sharing_client/models/shared_folder_options.py new file mode 100644 index 00000000..65e748bf --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/shared_folder_options.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SharedFolderOptions(BaseModel): + """ + SharedFolderOptions + """ # noqa: E501 + folders: Optional[StrictStr] = None + private_files: Optional[StrictStr] = Field(default=None, alias="privateFiles") + cc_files: Optional[StrictStr] = Field(default=None, alias="ccFiles") + move: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["folders", "privateFiles", "ccFiles", "move"] + + @field_validator('folders') + def folders_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + @field_validator('private_files') + def private_files_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + @field_validator('cc_files') + def cc_files_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['none', 'assign', 'delete']): + raise ValueError("must be one of enum values ('none', 'assign', 'delete')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SharedFolderOptions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SharedFolderOptions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "folders": obj.get("folders"), + "privateFiles": obj.get("privateFiles"), + "ccFiles": obj.get("ccFiles"), + "move": obj.get("move") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/sharing_info.py b/edu_sharing_openapi/edu_sharing_client/models/sharing_info.py new file mode 100644 index 00000000..c2b7fcb4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/sharing_info.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.person import Person +from typing import Optional, Set +from typing_extensions import Self + +class SharingInfo(BaseModel): + """ + SharingInfo + """ # noqa: E501 + password_matches: Optional[StrictBool] = Field(default=None, alias="passwordMatches") + password: Optional[StrictBool] = None + expired: Optional[StrictBool] = None + invited_by: Optional[Person] = Field(default=None, alias="invitedBy") + node: Optional[Node] = None + __properties: ClassVar[List[str]] = ["passwordMatches", "password", "expired", "invitedBy", "node"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SharingInfo from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of invited_by + if self.invited_by: + _dict['invitedBy'] = self.invited_by.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SharingInfo from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "passwordMatches": obj.get("passwordMatches"), + "password": obj.get("password"), + "expired": obj.get("expired"), + "invitedBy": Person.from_dict(obj["invitedBy"]) if obj.get("invitedBy") is not None else None, + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/simple_edit.py b/edu_sharing_openapi/edu_sharing_client/models/simple_edit.py new file mode 100644 index 00000000..d1251a84 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/simple_edit.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.simple_edit_global_groups import SimpleEditGlobalGroups +from edu_sharing_client.models.simple_edit_organization import SimpleEditOrganization +from typing import Optional, Set +from typing_extensions import Self + +class SimpleEdit(BaseModel): + """ + SimpleEdit + """ # noqa: E501 + global_groups: Optional[List[SimpleEditGlobalGroups]] = Field(default=None, alias="globalGroups") + organization: Optional[SimpleEditOrganization] = None + organization_filter: Optional[StrictStr] = Field(default=None, alias="organizationFilter") + licenses: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["globalGroups", "organization", "organizationFilter", "licenses"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SimpleEdit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in global_groups (list) + _items = [] + if self.global_groups: + for _item_global_groups in self.global_groups: + if _item_global_groups: + _items.append(_item_global_groups.to_dict()) + _dict['globalGroups'] = _items + # override the default output from pydantic by calling `to_dict()` of organization + if self.organization: + _dict['organization'] = self.organization.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SimpleEdit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "globalGroups": [SimpleEditGlobalGroups.from_dict(_item) for _item in obj["globalGroups"]] if obj.get("globalGroups") is not None else None, + "organization": SimpleEditOrganization.from_dict(obj["organization"]) if obj.get("organization") is not None else None, + "organizationFilter": obj.get("organizationFilter"), + "licenses": obj.get("licenses") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/simple_edit_global_groups.py b/edu_sharing_openapi/edu_sharing_client/models/simple_edit_global_groups.py new file mode 100644 index 00000000..e97c3a2b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/simple_edit_global_groups.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SimpleEditGlobalGroups(BaseModel): + """ + SimpleEditGlobalGroups + """ # noqa: E501 + toolpermission: Optional[StrictStr] = None + groups: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["toolpermission", "groups"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SimpleEditGlobalGroups from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SimpleEditGlobalGroups from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "toolpermission": obj.get("toolpermission"), + "groups": obj.get("groups") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/simple_edit_organization.py b/edu_sharing_openapi/edu_sharing_client/models/simple_edit_organization.py new file mode 100644 index 00000000..1ce8727a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/simple_edit_organization.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SimpleEditOrganization(BaseModel): + """ + SimpleEditOrganization + """ # noqa: E501 + group_types: Optional[List[StrictStr]] = Field(default=None, alias="groupTypes") + __properties: ClassVar[List[str]] = ["groupTypes"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SimpleEditOrganization from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SimpleEditOrganization from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "groupTypes": obj.get("groupTypes") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/sort.py b/edu_sharing_openapi/edu_sharing_client/models/sort.py new file mode 100644 index 00000000..176f0439 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/sort.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Sort(BaseModel): + """ + Sort + """ # noqa: E501 + sorted: Optional[StrictBool] = None + empty: Optional[StrictBool] = None + unsorted: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["sorted", "empty", "unsorted"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Sort from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Sort from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "sorted": obj.get("sorted"), + "empty": obj.get("empty"), + "unsorted": obj.get("unsorted") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistic_entity.py b/edu_sharing_openapi/edu_sharing_client/models/statistic_entity.py new file mode 100644 index 00000000..8c74679c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistic_entity.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class StatisticEntity(BaseModel): + """ + StatisticEntity + """ # noqa: E501 + value: StrictStr + count: StrictInt + __properties: ClassVar[List[str]] = ["value", "count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticEntity from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticEntity from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value"), + "count": obj.get("count") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistic_entry.py b/edu_sharing_openapi/edu_sharing_client/models/statistic_entry.py new file mode 100644 index 00000000..a4af5312 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistic_entry.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.statistic_entity import StatisticEntity +from typing import Optional, Set +from typing_extensions import Self + +class StatisticEntry(BaseModel): + """ + StatisticEntry + """ # noqa: E501 + var_property: StrictStr = Field(alias="property") + entities: List[StatisticEntity] + __properties: ClassVar[List[str]] = ["property", "entities"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in entities (list) + _items = [] + if self.entities: + for _item_entities in self.entities: + if _item_entities: + _items.append(_item_entities.to_dict()) + _dict['entities'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "property": obj.get("property"), + "entities": [StatisticEntity.from_dict(_item) for _item in obj["entities"]] if obj.get("entities") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistics.py b/edu_sharing_openapi/edu_sharing_client/models/statistics.py new file mode 100644 index 00000000..e53b270e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistics.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.statistic_entry import StatisticEntry +from typing import Optional, Set +from typing_extensions import Self + +class Statistics(BaseModel): + """ + Statistics + """ # noqa: E501 + entries: List[StatisticEntry] + __properties: ClassVar[List[str]] = ["entries"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Statistics from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in entries (list) + _items = [] + if self.entries: + for _item_entries in self.entries: + if _item_entries: + _items.append(_item_entries.to_dict()) + _dict['entries'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Statistics from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "entries": [StatisticEntry.from_dict(_item) for _item in obj["entries"]] if obj.get("entries") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistics_global.py b/edu_sharing_openapi/edu_sharing_client/models/statistics_global.py new file mode 100644 index 00000000..252639c2 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistics_global.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.statistics_group import StatisticsGroup +from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup +from edu_sharing_client.models.statistics_user import StatisticsUser +from typing import Optional, Set +from typing_extensions import Self + +class StatisticsGlobal(BaseModel): + """ + StatisticsGlobal + """ # noqa: E501 + overall: Optional[StatisticsGroup] = None + groups: Optional[List[StatisticsKeyGroup]] = None + user: Optional[StatisticsUser] = None + __properties: ClassVar[List[str]] = ["overall", "groups", "user"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticsGlobal from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of overall + if self.overall: + _dict['overall'] = self.overall.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in groups (list) + _items = [] + if self.groups: + for _item_groups in self.groups: + if _item_groups: + _items.append(_item_groups.to_dict()) + _dict['groups'] = _items + # override the default output from pydantic by calling `to_dict()` of user + if self.user: + _dict['user'] = self.user.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticsGlobal from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "overall": StatisticsGroup.from_dict(obj["overall"]) if obj.get("overall") is not None else None, + "groups": [StatisticsKeyGroup.from_dict(_item) for _item in obj["groups"]] if obj.get("groups") is not None else None, + "user": StatisticsUser.from_dict(obj["user"]) if obj.get("user") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistics_group.py b/edu_sharing_openapi/edu_sharing_client/models/statistics_group.py new file mode 100644 index 00000000..6534e4d1 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistics_group.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup +from typing import Optional, Set +from typing_extensions import Self + +class StatisticsGroup(BaseModel): + """ + StatisticsGroup + """ # noqa: E501 + count: Optional[StrictInt] = None + sub_groups: Optional[List[StatisticsSubGroup]] = Field(default=None, alias="subGroups") + __properties: ClassVar[List[str]] = ["count", "subGroups"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticsGroup from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in sub_groups (list) + _items = [] + if self.sub_groups: + for _item_sub_groups in self.sub_groups: + if _item_sub_groups: + _items.append(_item_sub_groups.to_dict()) + _dict['subGroups'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticsGroup from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "count": obj.get("count"), + "subGroups": [StatisticsSubGroup.from_dict(_item) for _item in obj["subGroups"]] if obj.get("subGroups") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistics_key_group.py b/edu_sharing_openapi/edu_sharing_client/models/statistics_key_group.py new file mode 100644 index 00000000..b6cae884 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistics_key_group.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup +from typing import Optional, Set +from typing_extensions import Self + +class StatisticsKeyGroup(BaseModel): + """ + StatisticsKeyGroup + """ # noqa: E501 + key: Optional[StrictStr] = None + display_name: Optional[StrictStr] = Field(default=None, alias="displayName") + count: Optional[StrictInt] = None + sub_groups: Optional[List[StatisticsSubGroup]] = Field(default=None, alias="subGroups") + __properties: ClassVar[List[str]] = ["key", "displayName", "count", "subGroups"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticsKeyGroup from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in sub_groups (list) + _items = [] + if self.sub_groups: + for _item_sub_groups in self.sub_groups: + if _item_sub_groups: + _items.append(_item_sub_groups.to_dict()) + _dict['subGroups'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticsKeyGroup from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "displayName": obj.get("displayName"), + "count": obj.get("count"), + "subGroups": [StatisticsSubGroup.from_dict(_item) for _item in obj["subGroups"]] if obj.get("subGroups") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistics_sub_group.py b/edu_sharing_openapi/edu_sharing_client/models/statistics_sub_group.py new file mode 100644 index 00000000..fca54e2c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistics_sub_group.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.sub_group_item import SubGroupItem +from typing import Optional, Set +from typing_extensions import Self + +class StatisticsSubGroup(BaseModel): + """ + StatisticsSubGroup + """ # noqa: E501 + id: Optional[StrictStr] = None + count: Optional[List[SubGroupItem]] = None + __properties: ClassVar[List[str]] = ["id", "count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticsSubGroup from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in count (list) + _items = [] + if self.count: + for _item_count in self.count: + if _item_count: + _items.append(_item_count.to_dict()) + _dict['count'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticsSubGroup from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "count": [SubGroupItem.from_dict(_item) for _item in obj["count"]] if obj.get("count") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/statistics_user.py b/edu_sharing_openapi/edu_sharing_client/models/statistics_user.py new file mode 100644 index 00000000..9e75105b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/statistics_user.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class StatisticsUser(BaseModel): + """ + StatisticsUser + """ # noqa: E501 + count: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StatisticsUser from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StatisticsUser from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "count": obj.get("count") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/stored_service.py b/edu_sharing_openapi/edu_sharing_client/models/stored_service.py new file mode 100644 index 00000000..71712b26 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/stored_service.py @@ -0,0 +1,133 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.audience import Audience +from edu_sharing_client.models.interface import Interface +from edu_sharing_client.models.provider import Provider +from typing import Optional, Set +from typing_extensions import Self + +class StoredService(BaseModel): + """ + StoredService + """ # noqa: E501 + name: Optional[StrictStr] = None + url: Optional[StrictStr] = None + icon: Optional[StrictStr] = None + logo: Optional[StrictStr] = None + in_language: Optional[StrictStr] = Field(default=None, alias="inLanguage") + type: Optional[StrictStr] = None + description: Optional[StrictStr] = None + audience: Optional[List[Audience]] = None + provider: Optional[Provider] = None + start_date: Optional[StrictStr] = Field(default=None, alias="startDate") + interfaces: Optional[List[Interface]] = None + about: Optional[List[StrictStr]] = None + id: Optional[StrictStr] = None + is_accessible_for_free: Optional[StrictBool] = Field(default=None, alias="isAccessibleForFree") + __properties: ClassVar[List[str]] = ["name", "url", "icon", "logo", "inLanguage", "type", "description", "audience", "provider", "startDate", "interfaces", "about", "id", "isAccessibleForFree"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StoredService from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in audience (list) + _items = [] + if self.audience: + for _item_audience in self.audience: + if _item_audience: + _items.append(_item_audience.to_dict()) + _dict['audience'] = _items + # override the default output from pydantic by calling `to_dict()` of provider + if self.provider: + _dict['provider'] = self.provider.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in interfaces (list) + _items = [] + if self.interfaces: + for _item_interfaces in self.interfaces: + if _item_interfaces: + _items.append(_item_interfaces.to_dict()) + _dict['interfaces'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StoredService from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "name": obj.get("name"), + "url": obj.get("url"), + "icon": obj.get("icon"), + "logo": obj.get("logo"), + "inLanguage": obj.get("inLanguage"), + "type": obj.get("type"), + "description": obj.get("description"), + "audience": [Audience.from_dict(_item) for _item in obj["audience"]] if obj.get("audience") is not None else None, + "provider": Provider.from_dict(obj["provider"]) if obj.get("provider") is not None else None, + "startDate": obj.get("startDate"), + "interfaces": [Interface.from_dict(_item) for _item in obj["interfaces"]] if obj.get("interfaces") is not None else None, + "about": obj.get("about"), + "id": obj.get("id"), + "isAccessibleForFree": obj.get("isAccessibleForFree") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/stream.py b/edu_sharing_openapi/edu_sharing_client/models/stream.py new file mode 100644 index 00000000..d2e0345c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/stream.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Stream(BaseModel): + """ + Stream + """ # noqa: E501 + enabled: Optional[StrictBool] = None + __properties: ClassVar[List[str]] = ["enabled"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Stream from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Stream from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enabled": obj.get("enabled") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/stream_entry.py b/edu_sharing_openapi/edu_sharing_client/models/stream_entry.py new file mode 100644 index 00000000..9d4ba37c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/stream_entry.py @@ -0,0 +1,113 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.user_simple import UserSimple +from typing import Optional, Set +from typing_extensions import Self + +class StreamEntry(BaseModel): + """ + StreamEntry + """ # noqa: E501 + id: Optional[StrictStr] = None + description: Optional[StrictStr] = None + nodes: Optional[List[Node]] = None + properties: Optional[Dict[str, Dict[str, Any]]] = None + priority: Optional[StrictInt] = None + author: Optional[UserSimple] = None + created: Optional[StrictInt] = None + modified: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["id", "description", "nodes", "properties", "priority", "author", "created", "modified"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in nodes (list) + _items = [] + if self.nodes: + for _item_nodes in self.nodes: + if _item_nodes: + _items.append(_item_nodes.to_dict()) + _dict['nodes'] = _items + # override the default output from pydantic by calling `to_dict()` of author + if self.author: + _dict['author'] = self.author.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "description": obj.get("description"), + "nodes": [Node.from_dict(_item) for _item in obj["nodes"]] if obj.get("nodes") is not None else None, + "properties": obj.get("properties"), + "priority": obj.get("priority"), + "author": UserSimple.from_dict(obj["author"]) if obj.get("author") is not None else None, + "created": obj.get("created"), + "modified": obj.get("modified") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/stream_entry_input.py b/edu_sharing_openapi/edu_sharing_client/models/stream_entry_input.py new file mode 100644 index 00000000..eaff03ec --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/stream_entry_input.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class StreamEntryInput(BaseModel): + """ + StreamEntryInput + """ # noqa: E501 + id: Optional[StrictStr] = None + title: Optional[StrictStr] = None + description: Optional[StrictStr] = None + nodes: Optional[List[StrictStr]] = None + properties: Optional[Dict[str, Dict[str, Any]]] = None + priority: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["id", "title", "description", "nodes", "properties", "priority"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamEntryInput from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamEntryInput from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "title": obj.get("title"), + "description": obj.get("description"), + "nodes": obj.get("nodes"), + "properties": obj.get("properties"), + "priority": obj.get("priority") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/stream_list.py b/edu_sharing_openapi/edu_sharing_client/models/stream_list.py new file mode 100644 index 00000000..a3780c79 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/stream_list.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.stream_entry import StreamEntry +from typing import Optional, Set +from typing_extensions import Self + +class StreamList(BaseModel): + """ + StreamList + """ # noqa: E501 + stream: Optional[List[StreamEntry]] = None + pagination: Optional[Pagination] = None + __properties: ClassVar[List[str]] = ["stream", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of StreamList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in stream (list) + _items = [] + if self.stream: + for _item_stream in self.stream: + if _item_stream: + _items.append(_item_stream.to_dict()) + _dict['stream'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of StreamList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "stream": [StreamEntry.from_dict(_item) for _item in obj["stream"]] if obj.get("stream") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/sub_group_item.py b/edu_sharing_openapi/edu_sharing_client/models/sub_group_item.py new file mode 100644 index 00000000..fa4c871a --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/sub_group_item.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class SubGroupItem(BaseModel): + """ + SubGroupItem + """ # noqa: E501 + key: Optional[StrictStr] = None + display_name: Optional[StrictStr] = Field(default=None, alias="displayName") + count: Optional[StrictInt] = None + __properties: ClassVar[List[str]] = ["key", "displayName", "count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SubGroupItem from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SubGroupItem from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "key": obj.get("key"), + "displayName": obj.get("displayName"), + "count": obj.get("count") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/suggest.py b/edu_sharing_openapi/edu_sharing_client/models/suggest.py new file mode 100644 index 00000000..f0014f7c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/suggest.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class Suggest(BaseModel): + """ + Suggest + """ # noqa: E501 + text: StrictStr = Field(description="suggested text") + highlighted: Optional[StrictStr] = Field(default=None, description="suggested text with corrected words highlighted") + score: Union[StrictFloat, StrictInt] = Field(description="score of the suggestion") + __properties: ClassVar[List[str]] = ["text", "highlighted", "score"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Suggest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Suggest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "text": obj.get("text"), + "highlighted": obj.get("highlighted"), + "score": obj.get("score") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/suggestion.py b/edu_sharing_openapi/edu_sharing_client/models/suggestion.py new file mode 100644 index 00000000..2304d000 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/suggestion.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Suggestion(BaseModel): + """ + Suggestion + """ # noqa: E501 + replacement_string: StrictStr = Field(alias="replacementString") + display_string: StrictStr = Field(alias="displayString") + key: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["replacementString", "displayString", "key"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Suggestion from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Suggestion from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "replacementString": obj.get("replacementString"), + "displayString": obj.get("displayString"), + "key": obj.get("key") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/suggestion_param.py b/edu_sharing_openapi/edu_sharing_client/models/suggestion_param.py new file mode 100644 index 00000000..9eb4e7a7 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/suggestion_param.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria +from edu_sharing_client.models.value_parameters import ValueParameters +from typing import Optional, Set +from typing_extensions import Self + +class SuggestionParam(BaseModel): + """ + SuggestionParam + """ # noqa: E501 + value_parameters: Optional[ValueParameters] = Field(default=None, alias="valueParameters") + criteria: Optional[List[MdsQueryCriteria]] = None + __properties: ClassVar[List[str]] = ["valueParameters", "criteria"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of SuggestionParam from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of value_parameters + if self.value_parameters: + _dict['valueParameters'] = self.value_parameters.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in criteria (list) + _items = [] + if self.criteria: + for _item_criteria in self.criteria: + if _item_criteria: + _items.append(_item_criteria.to_dict()) + _dict['criteria'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of SuggestionParam from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "valueParameters": ValueParameters.from_dict(obj["valueParameters"]) if obj.get("valueParameters") is not None else None, + "criteria": [MdsQueryCriteria.from_dict(_item) for _item in obj["criteria"]] if obj.get("criteria") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/suggestions.py b/edu_sharing_openapi/edu_sharing_client/models/suggestions.py new file mode 100644 index 00000000..a2fb3a11 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/suggestions.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.suggestion import Suggestion +from typing import Optional, Set +from typing_extensions import Self + +class Suggestions(BaseModel): + """ + Suggestions + """ # noqa: E501 + values: List[Suggestion] + __properties: ClassVar[List[str]] = ["values"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Suggestions from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in values (list) + _items = [] + if self.values: + for _item_values in self.values: + if _item_values: + _items.append(_item_values.to_dict()) + _dict['values'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Suggestions from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "values": [Suggestion.from_dict(_item) for _item in obj["values"]] if obj.get("values") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/tool.py b/edu_sharing_openapi/edu_sharing_client/models/tool.py new file mode 100644 index 00000000..5e569bef --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/tool.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Tool(BaseModel): + """ + Tool + """ # noqa: E501 + domain: Optional[StrictStr] = None + description: Optional[StrictStr] = None + app_id: Optional[StrictStr] = Field(default=None, alias="appId") + name: Optional[StrictStr] = None + logo: Optional[StrictStr] = None + custom_content_option: Optional[StrictBool] = Field(default=None, alias="customContentOption") + __properties: ClassVar[List[str]] = ["domain", "description", "appId", "name", "logo", "customContentOption"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Tool from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Tool from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "domain": obj.get("domain"), + "description": obj.get("description"), + "appId": obj.get("appId"), + "name": obj.get("name"), + "logo": obj.get("logo"), + "customContentOption": obj.get("customContentOption") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/tools.py b/edu_sharing_openapi/edu_sharing_client/models/tools.py new file mode 100644 index 00000000..db3788c0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/tools.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.tool import Tool +from typing import Optional, Set +from typing_extensions import Self + +class Tools(BaseModel): + """ + Tools + """ # noqa: E501 + tools: Optional[List[Tool]] = None + __properties: ClassVar[List[str]] = ["tools"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Tools from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in tools (list) + _items = [] + if self.tools: + for _item_tools in self.tools: + if _item_tools: + _items.append(_item_tools.to_dict()) + _dict['tools'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Tools from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "tools": [Tool.from_dict(_item) for _item in obj["tools"]] if obj.get("tools") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/tracking.py b/edu_sharing_openapi/edu_sharing_client/models/tracking.py new file mode 100644 index 00000000..b3fa7e25 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/tracking.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.tracking_authority import TrackingAuthority +from typing import Optional, Set +from typing_extensions import Self + +class Tracking(BaseModel): + """ + Tracking + """ # noqa: E501 + counts: Optional[Dict[str, StrictInt]] = None + var_date: Optional[StrictStr] = Field(default=None, alias="date") + fields: Optional[Dict[str, Dict[str, Any]]] = None + groups: Optional[Dict[str, Dict[str, Dict[str, StrictInt]]]] = None + authority: Optional[TrackingAuthority] = None + __properties: ClassVar[List[str]] = ["counts", "date", "fields", "groups", "authority"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Tracking from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of authority + if self.authority: + _dict['authority'] = self.authority.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Tracking from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "counts": obj.get("counts"), + "date": obj.get("date"), + "fields": obj.get("fields"), + "groups": obj.get("groups"), + "authority": TrackingAuthority.from_dict(obj["authority"]) if obj.get("authority") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/tracking_authority.py b/edu_sharing_openapi/edu_sharing_client/models/tracking_authority.py new file mode 100644 index 00000000..1d494e32 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/tracking_authority.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.group import Group +from edu_sharing_client.models.organization import Organization +from typing import Optional, Set +from typing_extensions import Self + +class TrackingAuthority(BaseModel): + """ + TrackingAuthority + """ # noqa: E501 + hash: Optional[StrictStr] = None + organization: Optional[List[Organization]] = None + mediacenter: Optional[List[Group]] = None + __properties: ClassVar[List[str]] = ["hash", "organization", "mediacenter"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of TrackingAuthority from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in organization (list) + _items = [] + if self.organization: + for _item_organization in self.organization: + if _item_organization: + _items.append(_item_organization.to_dict()) + _dict['organization'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in mediacenter (list) + _items = [] + if self.mediacenter: + for _item_mediacenter in self.mediacenter: + if _item_mediacenter: + _items.append(_item_mediacenter.to_dict()) + _dict['mediacenter'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of TrackingAuthority from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "hash": obj.get("hash"), + "organization": [Organization.from_dict(_item) for _item in obj["organization"]] if obj.get("organization") is not None else None, + "mediacenter": [Group.from_dict(_item) for _item in obj["mediacenter"]] if obj.get("mediacenter") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/tracking_node.py b/edu_sharing_openapi/edu_sharing_client/models/tracking_node.py new file mode 100644 index 00000000..01ff307d --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/tracking_node.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from edu_sharing_client.models.tracking_authority import TrackingAuthority +from typing import Optional, Set +from typing_extensions import Self + +class TrackingNode(BaseModel): + """ + TrackingNode + """ # noqa: E501 + counts: Optional[Dict[str, StrictInt]] = None + var_date: Optional[StrictStr] = Field(default=None, alias="date") + fields: Optional[Dict[str, Dict[str, Any]]] = None + groups: Optional[Dict[str, Dict[str, Dict[str, StrictInt]]]] = None + node: Optional[Node] = None + authority: Optional[TrackingAuthority] = None + __properties: ClassVar[List[str]] = ["counts", "date", "fields", "groups", "node", "authority"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of TrackingNode from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + # override the default output from pydantic by calling `to_dict()` of authority + if self.authority: + _dict['authority'] = self.authority.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of TrackingNode from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "counts": obj.get("counts"), + "date": obj.get("date"), + "fields": obj.get("fields"), + "groups": obj.get("groups"), + "node": Node.from_dict(obj["node"]) if obj.get("node") is not None else None, + "authority": TrackingAuthority.from_dict(obj["authority"]) if obj.get("authority") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/upload_result.py b/edu_sharing_openapi/edu_sharing_client/models/upload_result.py new file mode 100644 index 00000000..a46ba77f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/upload_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UploadResult(BaseModel): + """ + UploadResult + """ # noqa: E501 + file: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["file"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UploadResult from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UploadResult from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "file": obj.get("file") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/usage.py b/edu_sharing_openapi/edu_sharing_client/models/usage.py new file mode 100644 index 00000000..b39cd70f --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/usage.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.parameters import Parameters +from typing import Optional, Set +from typing_extensions import Self + +class Usage(BaseModel): + """ + Usage + """ # noqa: E501 + from_used: Optional[datetime] = Field(default=None, alias="fromUsed") + to_used: Optional[datetime] = Field(default=None, alias="toUsed") + usage_counter: Optional[StrictInt] = Field(default=None, alias="usageCounter") + app_subtype: Optional[StrictStr] = Field(default=None, alias="appSubtype") + app_type: Optional[StrictStr] = Field(default=None, alias="appType") + type: Optional[StrictStr] = None + created: Optional[datetime] = None + modified: Optional[datetime] = None + app_user: StrictStr = Field(alias="appUser") + app_user_mail: StrictStr = Field(alias="appUserMail") + course_id: StrictStr = Field(alias="courseId") + distinct_persons: Optional[StrictInt] = Field(default=None, alias="distinctPersons") + app_id: StrictStr = Field(alias="appId") + node_id: StrictStr = Field(alias="nodeId") + parent_node_id: StrictStr = Field(alias="parentNodeId") + usage_version: StrictStr = Field(alias="usageVersion") + usage_xml_params: Optional[Parameters] = Field(default=None, alias="usageXmlParams") + usage_xml_params_raw: Optional[StrictStr] = Field(default=None, alias="usageXmlParamsRaw") + resource_id: StrictStr = Field(alias="resourceId") + guid: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["fromUsed", "toUsed", "usageCounter", "appSubtype", "appType", "type", "created", "modified", "appUser", "appUserMail", "courseId", "distinctPersons", "appId", "nodeId", "parentNodeId", "usageVersion", "usageXmlParams", "usageXmlParamsRaw", "resourceId", "guid"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Usage from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of usage_xml_params + if self.usage_xml_params: + _dict['usageXmlParams'] = self.usage_xml_params.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Usage from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "fromUsed": obj.get("fromUsed"), + "toUsed": obj.get("toUsed"), + "usageCounter": obj.get("usageCounter"), + "appSubtype": obj.get("appSubtype"), + "appType": obj.get("appType"), + "type": obj.get("type"), + "created": obj.get("created"), + "modified": obj.get("modified"), + "appUser": obj.get("appUser"), + "appUserMail": obj.get("appUserMail"), + "courseId": obj.get("courseId"), + "distinctPersons": obj.get("distinctPersons"), + "appId": obj.get("appId"), + "nodeId": obj.get("nodeId"), + "parentNodeId": obj.get("parentNodeId"), + "usageVersion": obj.get("usageVersion"), + "usageXmlParams": Parameters.from_dict(obj["usageXmlParams"]) if obj.get("usageXmlParams") is not None else None, + "usageXmlParamsRaw": obj.get("usageXmlParamsRaw"), + "resourceId": obj.get("resourceId"), + "guid": obj.get("guid") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/usages.py b/edu_sharing_openapi/edu_sharing_client/models/usages.py new file mode 100644 index 00000000..3633bb3e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/usages.py @@ -0,0 +1,95 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.usage import Usage +from typing import Optional, Set +from typing_extensions import Self + +class Usages(BaseModel): + """ + Usages + """ # noqa: E501 + usages: Optional[List[Usage]] = None + __properties: ClassVar[List[str]] = ["usages"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Usages from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in usages (list) + _items = [] + if self.usages: + for _item_usages in self.usages: + if _item_usages: + _items.append(_item_usages.to_dict()) + _dict['usages'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Usages from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "usages": [Usage.from_dict(_item) for _item in obj["usages"]] if obj.get("usages") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user.py b/edu_sharing_openapi/edu_sharing_client/models/user.py new file mode 100644 index 00000000..83d256d4 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_ref import NodeRef +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.models.user_profile import UserProfile +from edu_sharing_client.models.user_quota import UserQuota +from edu_sharing_client.models.user_status import UserStatus +from typing import Optional, Set +from typing_extensions import Self + +class User(BaseModel): + """ + User + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + editable: Optional[StrictBool] = None + status: Optional[UserStatus] = None + organizations: Optional[List[Organization]] = None + quota: Optional[UserQuota] = None + authority_name: StrictStr = Field(alias="authorityName") + authority_type: Optional[StrictStr] = Field(default=None, alias="authorityType") + user_name: Optional[StrictStr] = Field(default=None, alias="userName") + profile: Optional[UserProfile] = None + home_folder: NodeRef = Field(alias="homeFolder") + shared_folders: Optional[List[NodeRef]] = Field(default=None, alias="sharedFolders") + __properties: ClassVar[List[str]] = ["properties", "editable", "status", "organizations", "quota", "authorityName", "authorityType", "userName", "profile", "homeFolder", "sharedFolders"] + + @field_validator('authority_type') + def authority_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST']): + raise ValueError("must be one of enum values ('USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of User from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in organizations (list) + _items = [] + if self.organizations: + for _item_organizations in self.organizations: + if _item_organizations: + _items.append(_item_organizations.to_dict()) + _dict['organizations'] = _items + # override the default output from pydantic by calling `to_dict()` of quota + if self.quota: + _dict['quota'] = self.quota.to_dict() + # override the default output from pydantic by calling `to_dict()` of profile + if self.profile: + _dict['profile'] = self.profile.to_dict() + # override the default output from pydantic by calling `to_dict()` of home_folder + if self.home_folder: + _dict['homeFolder'] = self.home_folder.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in shared_folders (list) + _items = [] + if self.shared_folders: + for _item_shared_folders in self.shared_folders: + if _item_shared_folders: + _items.append(_item_shared_folders.to_dict()) + _dict['sharedFolders'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of User from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "editable": obj.get("editable"), + "status": UserStatus.from_dict(obj["status"]) if obj.get("status") is not None else None, + "organizations": [Organization.from_dict(_item) for _item in obj["organizations"]] if obj.get("organizations") is not None else None, + "quota": UserQuota.from_dict(obj["quota"]) if obj.get("quota") is not None else None, + "authorityName": obj.get("authorityName"), + "authorityType": obj.get("authorityType"), + "userName": obj.get("userName"), + "profile": UserProfile.from_dict(obj["profile"]) if obj.get("profile") is not None else None, + "homeFolder": NodeRef.from_dict(obj["homeFolder"]) if obj.get("homeFolder") is not None else None, + "sharedFolders": [NodeRef.from_dict(_item) for _item in obj["sharedFolders"]] if obj.get("sharedFolders") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_credential.py b/edu_sharing_openapi/edu_sharing_client/models/user_credential.py new file mode 100644 index 00000000..6beeda4b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_credential.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserCredential(BaseModel): + """ + UserCredential + """ # noqa: E501 + old_password: Optional[StrictStr] = Field(default=None, alias="oldPassword") + new_password: StrictStr = Field(alias="newPassword") + __properties: ClassVar[List[str]] = ["oldPassword", "newPassword"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserCredential from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserCredential from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "oldPassword": obj.get("oldPassword"), + "newPassword": obj.get("newPassword") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_data_dto.py b/edu_sharing_openapi/edu_sharing_client/models/user_data_dto.py new file mode 100644 index 00000000..f5e96117 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_data_dto.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserDataDTO(BaseModel): + """ + UserDataDTO + """ # noqa: E501 + id: Optional[StrictStr] = None + first_name: Optional[StrictStr] = Field(default=None, alias="firstName") + last_name: Optional[StrictStr] = Field(default=None, alias="lastName") + mailbox: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id", "firstName", "lastName", "mailbox"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserDataDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserDataDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "firstName": obj.get("firstName"), + "lastName": obj.get("lastName"), + "mailbox": obj.get("mailbox") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_entries.py b/edu_sharing_openapi/edu_sharing_client/models/user_entries.py new file mode 100644 index 00000000..d152c084 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_entries.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from edu_sharing_client.models.pagination import Pagination +from edu_sharing_client.models.user_simple import UserSimple +from typing import Optional, Set +from typing_extensions import Self + +class UserEntries(BaseModel): + """ + UserEntries + """ # noqa: E501 + users: List[UserSimple] + pagination: Pagination + __properties: ClassVar[List[str]] = ["users", "pagination"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserEntries from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in users (list) + _items = [] + if self.users: + for _item_users in self.users: + if _item_users: + _items.append(_item_users.to_dict()) + _dict['users'] = _items + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict['pagination'] = self.pagination.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserEntries from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "users": [UserSimple.from_dict(_item) for _item in obj["users"]] if obj.get("users") is not None else None, + "pagination": Pagination.from_dict(obj["pagination"]) if obj.get("pagination") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_entry.py b/edu_sharing_openapi/edu_sharing_client/models/user_entry.py new file mode 100644 index 00000000..31fe2977 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_entry.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.user import User +from typing import Optional, Set +from typing_extensions import Self + +class UserEntry(BaseModel): + """ + UserEntry + """ # noqa: E501 + edit_profile: Optional[StrictBool] = Field(default=None, alias="editProfile") + person: User + __properties: ClassVar[List[str]] = ["editProfile", "person"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserEntry from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of person + if self.person: + _dict['person'] = self.person.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserEntry from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "editProfile": obj.get("editProfile"), + "person": User.from_dict(obj["person"]) if obj.get("person") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_profile.py b/edu_sharing_openapi/edu_sharing_client/models/user_profile.py new file mode 100644 index 00000000..b27e3302 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_profile.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserProfile(BaseModel): + """ + UserProfile + """ # noqa: E501 + primary_affiliation: Optional[StrictStr] = Field(default=None, alias="primaryAffiliation") + skills: Optional[List[StrictStr]] = None + types: Optional[List[StrictStr]] = None + vcard: Optional[StrictStr] = None + type: Optional[List[StrictStr]] = None + first_name: Optional[StrictStr] = Field(default=None, alias="firstName") + last_name: Optional[StrictStr] = Field(default=None, alias="lastName") + email: Optional[StrictStr] = None + avatar: Optional[StrictStr] = None + about: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["primaryAffiliation", "skills", "types", "vcard", "type", "firstName", "lastName", "email", "avatar", "about"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserProfile from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserProfile from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "primaryAffiliation": obj.get("primaryAffiliation"), + "skills": obj.get("skills"), + "types": obj.get("types"), + "vcard": obj.get("vcard"), + "type": obj.get("type"), + "firstName": obj.get("firstName"), + "lastName": obj.get("lastName"), + "email": obj.get("email"), + "avatar": obj.get("avatar"), + "about": obj.get("about") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_profile_app_auth.py b/edu_sharing_openapi/edu_sharing_client/models/user_profile_app_auth.py new file mode 100644 index 00000000..30f3a35c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_profile_app_auth.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserProfileAppAuth(BaseModel): + """ + UserProfileAppAuth + """ # noqa: E501 + primary_affiliation: Optional[StrictStr] = Field(default=None, alias="primaryAffiliation") + skills: Optional[List[StrictStr]] = None + types: Optional[List[StrictStr]] = None + extended_attributes: Optional[Dict[str, List[StrictStr]]] = Field(default=None, alias="extendedAttributes") + vcard: Optional[StrictStr] = None + type: Optional[List[StrictStr]] = None + first_name: Optional[StrictStr] = Field(default=None, alias="firstName") + last_name: Optional[StrictStr] = Field(default=None, alias="lastName") + email: Optional[StrictStr] = None + avatar: Optional[StrictStr] = None + about: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["primaryAffiliation", "skills", "types", "extendedAttributes", "vcard", "type", "firstName", "lastName", "email", "avatar", "about"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserProfileAppAuth from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserProfileAppAuth from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "primaryAffiliation": obj.get("primaryAffiliation"), + "skills": obj.get("skills"), + "types": obj.get("types"), + "extendedAttributes": obj.get("extendedAttributes"), + "vcard": obj.get("vcard"), + "type": obj.get("type"), + "firstName": obj.get("firstName"), + "lastName": obj.get("lastName"), + "email": obj.get("email"), + "avatar": obj.get("avatar"), + "about": obj.get("about") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_profile_edit.py b/edu_sharing_openapi/edu_sharing_client/models/user_profile_edit.py new file mode 100644 index 00000000..c785eda5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_profile_edit.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserProfileEdit(BaseModel): + """ + UserProfileEdit + """ # noqa: E501 + primary_affiliation: Optional[StrictStr] = Field(default=None, alias="primaryAffiliation") + skills: Optional[List[StrictStr]] = None + types: Optional[List[StrictStr]] = None + size_quota: Optional[StrictInt] = Field(default=None, alias="sizeQuota") + vcard: Optional[StrictStr] = None + type: Optional[List[StrictStr]] = None + first_name: Optional[StrictStr] = Field(default=None, alias="firstName") + last_name: Optional[StrictStr] = Field(default=None, alias="lastName") + email: Optional[StrictStr] = None + avatar: Optional[StrictStr] = None + about: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["primaryAffiliation", "skills", "types", "sizeQuota", "vcard", "type", "firstName", "lastName", "email", "avatar", "about"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserProfileEdit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserProfileEdit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "primaryAffiliation": obj.get("primaryAffiliation"), + "skills": obj.get("skills"), + "types": obj.get("types"), + "sizeQuota": obj.get("sizeQuota"), + "vcard": obj.get("vcard"), + "type": obj.get("type"), + "firstName": obj.get("firstName"), + "lastName": obj.get("lastName"), + "email": obj.get("email"), + "avatar": obj.get("avatar"), + "about": obj.get("about") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_quota.py b/edu_sharing_openapi/edu_sharing_client/models/user_quota.py new file mode 100644 index 00000000..99a6501c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_quota.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserQuota(BaseModel): + """ + UserQuota + """ # noqa: E501 + enabled: Optional[StrictBool] = None + size_current: Optional[StrictInt] = Field(default=None, alias="sizeCurrent") + size_quota: Optional[StrictInt] = Field(default=None, alias="sizeQuota") + __properties: ClassVar[List[str]] = ["enabled", "sizeCurrent", "sizeQuota"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserQuota from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserQuota from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "enabled": obj.get("enabled"), + "sizeCurrent": obj.get("sizeCurrent"), + "sizeQuota": obj.get("sizeQuota") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_simple.py b/edu_sharing_openapi/edu_sharing_client/models/user_simple.py new file mode 100644 index 00000000..13c28440 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_simple.py @@ -0,0 +1,127 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.organization import Organization +from edu_sharing_client.models.user_profile import UserProfile +from edu_sharing_client.models.user_status import UserStatus +from typing import Optional, Set +from typing_extensions import Self + +class UserSimple(BaseModel): + """ + UserSimple + """ # noqa: E501 + properties: Optional[Dict[str, List[StrictStr]]] = None + editable: Optional[StrictBool] = None + status: Optional[UserStatus] = None + organizations: Optional[List[Organization]] = None + authority_name: StrictStr = Field(alias="authorityName") + authority_type: Optional[StrictStr] = Field(default=None, alias="authorityType") + user_name: Optional[StrictStr] = Field(default=None, alias="userName") + profile: Optional[UserProfile] = None + __properties: ClassVar[List[str]] = ["properties", "editable", "status", "organizations", "authorityName", "authorityType", "userName", "profile"] + + @field_validator('authority_type') + def authority_type_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST']): + raise ValueError("must be one of enum values ('USER', 'GROUP', 'OWNER', 'EVERYONE', 'GUEST')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserSimple from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of status + if self.status: + _dict['status'] = self.status.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in organizations (list) + _items = [] + if self.organizations: + for _item_organizations in self.organizations: + if _item_organizations: + _items.append(_item_organizations.to_dict()) + _dict['organizations'] = _items + # override the default output from pydantic by calling `to_dict()` of profile + if self.profile: + _dict['profile'] = self.profile.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserSimple from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "properties": obj.get("properties"), + "editable": obj.get("editable"), + "status": UserStatus.from_dict(obj["status"]) if obj.get("status") is not None else None, + "organizations": [Organization.from_dict(_item) for _item in obj["organizations"]] if obj.get("organizations") is not None else None, + "authorityName": obj.get("authorityName"), + "authorityType": obj.get("authorityType"), + "userName": obj.get("userName"), + "profile": UserProfile.from_dict(obj["profile"]) if obj.get("profile") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_stats.py b/edu_sharing_openapi/edu_sharing_client/models/user_stats.py new file mode 100644 index 00000000..0653d3c6 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_stats.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserStats(BaseModel): + """ + UserStats + """ # noqa: E501 + node_count: Optional[StrictInt] = Field(default=None, alias="nodeCount") + node_count_cc: Optional[StrictInt] = Field(default=None, alias="nodeCountCC") + collection_count: Optional[StrictInt] = Field(default=None, alias="collectionCount") + __properties: ClassVar[List[str]] = ["nodeCount", "nodeCountCC", "collectionCount"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserStats from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserStats from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "nodeCount": obj.get("nodeCount"), + "nodeCountCC": obj.get("nodeCountCC"), + "collectionCount": obj.get("collectionCount") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/user_status.py b/edu_sharing_openapi/edu_sharing_client/models/user_status.py new file mode 100644 index 00000000..2d42e5e5 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/user_status.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr, field_validator +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class UserStatus(BaseModel): + """ + UserStatus + """ # noqa: E501 + status: Optional[StrictStr] = None + var_date: Optional[StrictInt] = Field(default=None, alias="date") + __properties: ClassVar[List[str]] = ["status", "date"] + + @field_validator('status') + def status_validate_enum(cls, value): + """Validates the enum""" + if value is None: + return value + + if value not in set(['active', 'blocked', 'todelete']): + raise ValueError("must be one of enum values ('active', 'blocked', 'todelete')") + return value + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of UserStatus from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of UserStatus from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "status": obj.get("status"), + "date": obj.get("date") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/value.py b/edu_sharing_openapi/edu_sharing_client/models/value.py new file mode 100644 index 00000000..84bdb926 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/value.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Value(BaseModel): + """ + Value + """ # noqa: E501 + value: StrictStr + count: StrictInt + __properties: ClassVar[List[str]] = ["value", "count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Value from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Value from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "value": obj.get("value"), + "count": obj.get("count") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/value_parameters.py b/edu_sharing_openapi/edu_sharing_client/models/value_parameters.py new file mode 100644 index 00000000..47b01d4c --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/value_parameters.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class ValueParameters(BaseModel): + """ + ValueParameters + """ # noqa: E501 + query: StrictStr + var_property: StrictStr = Field(alias="property") + pattern: StrictStr = Field(description="prefix of the value (or \"-all-\" for all values)") + __properties: ClassVar[List[str]] = ["query", "property", "pattern"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of ValueParameters from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of ValueParameters from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "query": obj.get("query"), + "property": obj.get("property"), + "pattern": obj.get("pattern") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/values.py b/edu_sharing_openapi/edu_sharing_client/models/values.py new file mode 100644 index 00000000..b6259c75 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/values.py @@ -0,0 +1,362 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.admin import Admin +from edu_sharing_client.models.available_mds import AvailableMds +from edu_sharing_client.models.banner import Banner +from edu_sharing_client.models.collections import Collections +from edu_sharing_client.models.config_frontpage import ConfigFrontpage +from edu_sharing_client.models.config_privacy import ConfigPrivacy +from edu_sharing_client.models.config_publish import ConfigPublish +from edu_sharing_client.models.config_rating import ConfigRating +from edu_sharing_client.models.config_remote import ConfigRemote +from edu_sharing_client.models.config_theme_colors import ConfigThemeColors +from edu_sharing_client.models.config_tutorial import ConfigTutorial +from edu_sharing_client.models.config_upload import ConfigUpload +from edu_sharing_client.models.config_workflow import ConfigWorkflow +from edu_sharing_client.models.context_menu_entry import ContextMenuEntry +from edu_sharing_client.models.font_icon import FontIcon +from edu_sharing_client.models.guest import Guest +from edu_sharing_client.models.help_menu_options import HelpMenuOptions +from edu_sharing_client.models.image import Image +from edu_sharing_client.models.license import License +from edu_sharing_client.models.license_agreement import LicenseAgreement +from edu_sharing_client.models.logout_info import LogoutInfo +from edu_sharing_client.models.mainnav import Mainnav +from edu_sharing_client.models.menu_entry import MenuEntry +from edu_sharing_client.models.register import Register +from edu_sharing_client.models.rendering import Rendering +from edu_sharing_client.models.services import Services +from edu_sharing_client.models.simple_edit import SimpleEdit +from edu_sharing_client.models.stream import Stream +from typing import Optional, Set +from typing_extensions import Self + +class Values(BaseModel): + """ + Values + """ # noqa: E501 + supported_languages: Optional[List[StrictStr]] = Field(default=None, alias="supportedLanguages") + extension: Optional[StrictStr] = None + login_url: Optional[StrictStr] = Field(default=None, alias="loginUrl") + login_allow_local: Optional[StrictBool] = Field(default=None, alias="loginAllowLocal") + login_providers_url: Optional[StrictStr] = Field(default=None, alias="loginProvidersUrl") + login_provider_target_url: Optional[StrictStr] = Field(default=None, alias="loginProviderTargetUrl") + register: Optional[Register] = None + recover_password_url: Optional[StrictStr] = Field(default=None, alias="recoverPasswordUrl") + imprint_url: Optional[StrictStr] = Field(default=None, alias="imprintUrl") + privacy_information_url: Optional[StrictStr] = Field(default=None, alias="privacyInformationUrl") + help_url: Optional[StrictStr] = Field(default=None, alias="helpUrl") + whats_new_url: Optional[StrictStr] = Field(default=None, alias="whatsNewUrl") + edit_profile_url: Optional[StrictStr] = Field(default=None, alias="editProfileUrl") + edit_profile: Optional[StrictBool] = Field(default=None, alias="editProfile") + workspace_columns: Optional[List[StrictStr]] = Field(default=None, alias="workspaceColumns") + workspace_shared_to_me_default_all: Optional[StrictBool] = Field(default=None, alias="workspaceSharedToMeDefaultAll") + hide_main_menu: Optional[List[StrictStr]] = Field(default=None, alias="hideMainMenu") + logout: Optional[LogoutInfo] = None + menu_entries: Optional[List[MenuEntry]] = Field(default=None, alias="menuEntries") + custom_options: Optional[List[ContextMenuEntry]] = Field(default=None, alias="customOptions") + user_menu_overrides: Optional[List[ContextMenuEntry]] = Field(default=None, alias="userMenuOverrides") + allowed_licenses: Optional[List[StrictStr]] = Field(default=None, alias="allowedLicenses") + custom_licenses: Optional[List[License]] = Field(default=None, alias="customLicenses") + workflow: Optional[ConfigWorkflow] = None + license_dialog_on_upload: Optional[StrictBool] = Field(default=None, alias="licenseDialogOnUpload") + node_report: Optional[StrictBool] = Field(default=None, alias="nodeReport") + branding: Optional[StrictBool] = None + rating: Optional[ConfigRating] = None + publishing_notice: Optional[StrictBool] = Field(default=None, alias="publishingNotice") + site_title: Optional[StrictStr] = Field(default=None, alias="siteTitle") + user_display_name: Optional[StrictStr] = Field(default=None, alias="userDisplayName") + user_secondary_display_name: Optional[StrictStr] = Field(default=None, alias="userSecondaryDisplayName") + user_affiliation: Optional[StrictBool] = Field(default=None, alias="userAffiliation") + default_username: Optional[StrictStr] = Field(default=None, alias="defaultUsername") + default_password: Optional[StrictStr] = Field(default=None, alias="defaultPassword") + banner: Optional[Banner] = None + available_mds: Optional[List[AvailableMds]] = Field(default=None, alias="availableMds") + available_repositories: Optional[List[StrictStr]] = Field(default=None, alias="availableRepositories") + search_view_type: Optional[StrictInt] = Field(default=None, alias="searchViewType") + workspace_view_type: Optional[StrictInt] = Field(default=None, alias="workspaceViewType") + items_per_request: Optional[StrictInt] = Field(default=None, alias="itemsPerRequest") + rendering: Optional[Rendering] = None + session_expired_dialog: Optional[Dict[str, Any]] = Field(default=None, alias="sessionExpiredDialog") + login_default_location: Optional[StrictStr] = Field(default=None, alias="loginDefaultLocation") + search_group_results: Optional[StrictBool] = Field(default=None, alias="searchGroupResults") + mainnav: Optional[Mainnav] = None + search_sidenav_mode: Optional[StrictStr] = Field(default=None, alias="searchSidenavMode") + guest: Optional[Guest] = None + collections: Optional[Collections] = None + license_agreement: Optional[LicenseAgreement] = Field(default=None, alias="licenseAgreement") + services: Optional[Services] = None + help_menu_options: Optional[List[HelpMenuOptions]] = Field(default=None, alias="helpMenuOptions") + images: Optional[List[Image]] = None + icons: Optional[List[FontIcon]] = None + stream: Optional[Stream] = None + admin: Optional[Admin] = None + simple_edit: Optional[SimpleEdit] = Field(default=None, alias="simpleEdit") + frontpage: Optional[ConfigFrontpage] = None + upload: Optional[ConfigUpload] = None + publish: Optional[ConfigPublish] = None + remote: Optional[ConfigRemote] = None + custom_css: Optional[StrictStr] = Field(default=None, alias="customCSS") + theme_colors: Optional[ConfigThemeColors] = Field(default=None, alias="themeColors") + privacy: Optional[ConfigPrivacy] = None + tutorial: Optional[ConfigTutorial] = None + __properties: ClassVar[List[str]] = ["supportedLanguages", "extension", "loginUrl", "loginAllowLocal", "loginProvidersUrl", "loginProviderTargetUrl", "register", "recoverPasswordUrl", "imprintUrl", "privacyInformationUrl", "helpUrl", "whatsNewUrl", "editProfileUrl", "editProfile", "workspaceColumns", "workspaceSharedToMeDefaultAll", "hideMainMenu", "logout", "menuEntries", "customOptions", "userMenuOverrides", "allowedLicenses", "customLicenses", "workflow", "licenseDialogOnUpload", "nodeReport", "branding", "rating", "publishingNotice", "siteTitle", "userDisplayName", "userSecondaryDisplayName", "userAffiliation", "defaultUsername", "defaultPassword", "banner", "availableMds", "availableRepositories", "searchViewType", "workspaceViewType", "itemsPerRequest", "rendering", "sessionExpiredDialog", "loginDefaultLocation", "searchGroupResults", "mainnav", "searchSidenavMode", "guest", "collections", "licenseAgreement", "services", "helpMenuOptions", "images", "icons", "stream", "admin", "simpleEdit", "frontpage", "upload", "publish", "remote", "customCSS", "themeColors", "privacy", "tutorial"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Values from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of register + if self.register: + _dict['register'] = self.register.to_dict() + # override the default output from pydantic by calling `to_dict()` of logout + if self.logout: + _dict['logout'] = self.logout.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in menu_entries (list) + _items = [] + if self.menu_entries: + for _item_menu_entries in self.menu_entries: + if _item_menu_entries: + _items.append(_item_menu_entries.to_dict()) + _dict['menuEntries'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in custom_options (list) + _items = [] + if self.custom_options: + for _item_custom_options in self.custom_options: + if _item_custom_options: + _items.append(_item_custom_options.to_dict()) + _dict['customOptions'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in user_menu_overrides (list) + _items = [] + if self.user_menu_overrides: + for _item_user_menu_overrides in self.user_menu_overrides: + if _item_user_menu_overrides: + _items.append(_item_user_menu_overrides.to_dict()) + _dict['userMenuOverrides'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in custom_licenses (list) + _items = [] + if self.custom_licenses: + for _item_custom_licenses in self.custom_licenses: + if _item_custom_licenses: + _items.append(_item_custom_licenses.to_dict()) + _dict['customLicenses'] = _items + # override the default output from pydantic by calling `to_dict()` of workflow + if self.workflow: + _dict['workflow'] = self.workflow.to_dict() + # override the default output from pydantic by calling `to_dict()` of rating + if self.rating: + _dict['rating'] = self.rating.to_dict() + # override the default output from pydantic by calling `to_dict()` of banner + if self.banner: + _dict['banner'] = self.banner.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in available_mds (list) + _items = [] + if self.available_mds: + for _item_available_mds in self.available_mds: + if _item_available_mds: + _items.append(_item_available_mds.to_dict()) + _dict['availableMds'] = _items + # override the default output from pydantic by calling `to_dict()` of rendering + if self.rendering: + _dict['rendering'] = self.rendering.to_dict() + # override the default output from pydantic by calling `to_dict()` of mainnav + if self.mainnav: + _dict['mainnav'] = self.mainnav.to_dict() + # override the default output from pydantic by calling `to_dict()` of guest + if self.guest: + _dict['guest'] = self.guest.to_dict() + # override the default output from pydantic by calling `to_dict()` of collections + if self.collections: + _dict['collections'] = self.collections.to_dict() + # override the default output from pydantic by calling `to_dict()` of license_agreement + if self.license_agreement: + _dict['licenseAgreement'] = self.license_agreement.to_dict() + # override the default output from pydantic by calling `to_dict()` of services + if self.services: + _dict['services'] = self.services.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in help_menu_options (list) + _items = [] + if self.help_menu_options: + for _item_help_menu_options in self.help_menu_options: + if _item_help_menu_options: + _items.append(_item_help_menu_options.to_dict()) + _dict['helpMenuOptions'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in images (list) + _items = [] + if self.images: + for _item_images in self.images: + if _item_images: + _items.append(_item_images.to_dict()) + _dict['images'] = _items + # override the default output from pydantic by calling `to_dict()` of each item in icons (list) + _items = [] + if self.icons: + for _item_icons in self.icons: + if _item_icons: + _items.append(_item_icons.to_dict()) + _dict['icons'] = _items + # override the default output from pydantic by calling `to_dict()` of stream + if self.stream: + _dict['stream'] = self.stream.to_dict() + # override the default output from pydantic by calling `to_dict()` of admin + if self.admin: + _dict['admin'] = self.admin.to_dict() + # override the default output from pydantic by calling `to_dict()` of simple_edit + if self.simple_edit: + _dict['simpleEdit'] = self.simple_edit.to_dict() + # override the default output from pydantic by calling `to_dict()` of frontpage + if self.frontpage: + _dict['frontpage'] = self.frontpage.to_dict() + # override the default output from pydantic by calling `to_dict()` of upload + if self.upload: + _dict['upload'] = self.upload.to_dict() + # override the default output from pydantic by calling `to_dict()` of publish + if self.publish: + _dict['publish'] = self.publish.to_dict() + # override the default output from pydantic by calling `to_dict()` of remote + if self.remote: + _dict['remote'] = self.remote.to_dict() + # override the default output from pydantic by calling `to_dict()` of theme_colors + if self.theme_colors: + _dict['themeColors'] = self.theme_colors.to_dict() + # override the default output from pydantic by calling `to_dict()` of privacy + if self.privacy: + _dict['privacy'] = self.privacy.to_dict() + # override the default output from pydantic by calling `to_dict()` of tutorial + if self.tutorial: + _dict['tutorial'] = self.tutorial.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Values from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "supportedLanguages": obj.get("supportedLanguages"), + "extension": obj.get("extension"), + "loginUrl": obj.get("loginUrl"), + "loginAllowLocal": obj.get("loginAllowLocal"), + "loginProvidersUrl": obj.get("loginProvidersUrl"), + "loginProviderTargetUrl": obj.get("loginProviderTargetUrl"), + "register": Register.from_dict(obj["register"]) if obj.get("register") is not None else None, + "recoverPasswordUrl": obj.get("recoverPasswordUrl"), + "imprintUrl": obj.get("imprintUrl"), + "privacyInformationUrl": obj.get("privacyInformationUrl"), + "helpUrl": obj.get("helpUrl"), + "whatsNewUrl": obj.get("whatsNewUrl"), + "editProfileUrl": obj.get("editProfileUrl"), + "editProfile": obj.get("editProfile"), + "workspaceColumns": obj.get("workspaceColumns"), + "workspaceSharedToMeDefaultAll": obj.get("workspaceSharedToMeDefaultAll"), + "hideMainMenu": obj.get("hideMainMenu"), + "logout": LogoutInfo.from_dict(obj["logout"]) if obj.get("logout") is not None else None, + "menuEntries": [MenuEntry.from_dict(_item) for _item in obj["menuEntries"]] if obj.get("menuEntries") is not None else None, + "customOptions": [ContextMenuEntry.from_dict(_item) for _item in obj["customOptions"]] if obj.get("customOptions") is not None else None, + "userMenuOverrides": [ContextMenuEntry.from_dict(_item) for _item in obj["userMenuOverrides"]] if obj.get("userMenuOverrides") is not None else None, + "allowedLicenses": obj.get("allowedLicenses"), + "customLicenses": [License.from_dict(_item) for _item in obj["customLicenses"]] if obj.get("customLicenses") is not None else None, + "workflow": ConfigWorkflow.from_dict(obj["workflow"]) if obj.get("workflow") is not None else None, + "licenseDialogOnUpload": obj.get("licenseDialogOnUpload"), + "nodeReport": obj.get("nodeReport"), + "branding": obj.get("branding"), + "rating": ConfigRating.from_dict(obj["rating"]) if obj.get("rating") is not None else None, + "publishingNotice": obj.get("publishingNotice"), + "siteTitle": obj.get("siteTitle"), + "userDisplayName": obj.get("userDisplayName"), + "userSecondaryDisplayName": obj.get("userSecondaryDisplayName"), + "userAffiliation": obj.get("userAffiliation"), + "defaultUsername": obj.get("defaultUsername"), + "defaultPassword": obj.get("defaultPassword"), + "banner": Banner.from_dict(obj["banner"]) if obj.get("banner") is not None else None, + "availableMds": [AvailableMds.from_dict(_item) for _item in obj["availableMds"]] if obj.get("availableMds") is not None else None, + "availableRepositories": obj.get("availableRepositories"), + "searchViewType": obj.get("searchViewType"), + "workspaceViewType": obj.get("workspaceViewType"), + "itemsPerRequest": obj.get("itemsPerRequest"), + "rendering": Rendering.from_dict(obj["rendering"]) if obj.get("rendering") is not None else None, + "sessionExpiredDialog": obj.get("sessionExpiredDialog"), + "loginDefaultLocation": obj.get("loginDefaultLocation"), + "searchGroupResults": obj.get("searchGroupResults"), + "mainnav": Mainnav.from_dict(obj["mainnav"]) if obj.get("mainnav") is not None else None, + "searchSidenavMode": obj.get("searchSidenavMode"), + "guest": Guest.from_dict(obj["guest"]) if obj.get("guest") is not None else None, + "collections": Collections.from_dict(obj["collections"]) if obj.get("collections") is not None else None, + "licenseAgreement": LicenseAgreement.from_dict(obj["licenseAgreement"]) if obj.get("licenseAgreement") is not None else None, + "services": Services.from_dict(obj["services"]) if obj.get("services") is not None else None, + "helpMenuOptions": [HelpMenuOptions.from_dict(_item) for _item in obj["helpMenuOptions"]] if obj.get("helpMenuOptions") is not None else None, + "images": [Image.from_dict(_item) for _item in obj["images"]] if obj.get("images") is not None else None, + "icons": [FontIcon.from_dict(_item) for _item in obj["icons"]] if obj.get("icons") is not None else None, + "stream": Stream.from_dict(obj["stream"]) if obj.get("stream") is not None else None, + "admin": Admin.from_dict(obj["admin"]) if obj.get("admin") is not None else None, + "simpleEdit": SimpleEdit.from_dict(obj["simpleEdit"]) if obj.get("simpleEdit") is not None else None, + "frontpage": ConfigFrontpage.from_dict(obj["frontpage"]) if obj.get("frontpage") is not None else None, + "upload": ConfigUpload.from_dict(obj["upload"]) if obj.get("upload") is not None else None, + "publish": ConfigPublish.from_dict(obj["publish"]) if obj.get("publish") is not None else None, + "remote": ConfigRemote.from_dict(obj["remote"]) if obj.get("remote") is not None else None, + "customCSS": obj.get("customCSS"), + "themeColors": ConfigThemeColors.from_dict(obj["themeColors"]) if obj.get("themeColors") is not None else None, + "privacy": ConfigPrivacy.from_dict(obj["privacy"]) if obj.get("privacy") is not None else None, + "tutorial": ConfigTutorial.from_dict(obj["tutorial"]) if obj.get("tutorial") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/variables.py b/edu_sharing_openapi/edu_sharing_client/models/variables.py new file mode 100644 index 00000000..257a2c0b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/variables.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Variables(BaseModel): + """ + Variables + """ # noqa: E501 + var_global: Optional[Dict[str, StrictStr]] = Field(default=None, alias="global") + current: Optional[Dict[str, StrictStr]] = None + __properties: ClassVar[List[str]] = ["global", "current"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Variables from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Variables from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "global": obj.get("global"), + "current": obj.get("current") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version.py b/edu_sharing_openapi/edu_sharing_client/models/version.py new file mode 100644 index 00000000..cdbf198b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class Version(BaseModel): + """ + Version + """ # noqa: E501 + full: Optional[StrictStr] = None + major: Optional[StrictStr] = None + minor: Optional[StrictStr] = None + patch: Optional[StrictStr] = None + qualifier: Optional[StrictStr] = None + build: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["full", "major", "minor", "patch", "qualifier", "build"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Version from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Version from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "full": obj.get("full"), + "major": obj.get("major"), + "minor": obj.get("minor"), + "patch": obj.get("patch"), + "qualifier": obj.get("qualifier"), + "build": obj.get("build") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version_build.py b/edu_sharing_openapi/edu_sharing_client/models/version_build.py new file mode 100644 index 00000000..89eee721 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version_build.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class VersionBuild(BaseModel): + """ + VersionBuild + """ # noqa: E501 + timestamp: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["timestamp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VersionBuild from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VersionBuild from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version_git.py b/edu_sharing_openapi/edu_sharing_client/models/version_git.py new file mode 100644 index 00000000..7c2b754e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version_git.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.version_git_commit import VersionGitCommit +from typing import Optional, Set +from typing_extensions import Self + +class VersionGit(BaseModel): + """ + VersionGit + """ # noqa: E501 + branch: Optional[StrictStr] = None + commit: Optional[VersionGitCommit] = None + __properties: ClassVar[List[str]] = ["branch", "commit"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VersionGit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of commit + if self.commit: + _dict['commit'] = self.commit.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VersionGit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "branch": obj.get("branch"), + "commit": VersionGitCommit.from_dict(obj["commit"]) if obj.get("commit") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version_git_commit.py b/edu_sharing_openapi/edu_sharing_client/models/version_git_commit.py new file mode 100644 index 00000000..a2be1689 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version_git_commit.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.version_timestamp import VersionTimestamp +from typing import Optional, Set +from typing_extensions import Self + +class VersionGitCommit(BaseModel): + """ + VersionGitCommit + """ # noqa: E501 + id: Optional[StrictStr] = None + timestamp: Optional[VersionTimestamp] = None + __properties: ClassVar[List[str]] = ["id", "timestamp"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VersionGitCommit from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of timestamp + if self.timestamp: + _dict['timestamp'] = self.timestamp.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VersionGitCommit from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "timestamp": VersionTimestamp.from_dict(obj["timestamp"]) if obj.get("timestamp") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version_maven.py b/edu_sharing_openapi/edu_sharing_client/models/version_maven.py new file mode 100644 index 00000000..749cc418 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version_maven.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.version_project import VersionProject +from typing import Optional, Set +from typing_extensions import Self + +class VersionMaven(BaseModel): + """ + VersionMaven + """ # noqa: E501 + bom: Optional[Dict[str, StrictStr]] = None + project: Optional[VersionProject] = None + __properties: ClassVar[List[str]] = ["bom", "project"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VersionMaven from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of project + if self.project: + _dict['project'] = self.project.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VersionMaven from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "bom": obj.get("bom"), + "project": VersionProject.from_dict(obj["project"]) if obj.get("project") is not None else None + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version_project.py b/edu_sharing_openapi/edu_sharing_client/models/version_project.py new file mode 100644 index 00000000..b868028e --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version_project.py @@ -0,0 +1,91 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class VersionProject(BaseModel): + """ + VersionProject + """ # noqa: E501 + artifact_id: Optional[StrictStr] = Field(default=None, alias="artifactId") + group_id: Optional[StrictStr] = Field(default=None, alias="groupId") + version: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["artifactId", "groupId", "version"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VersionProject from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VersionProject from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "artifactId": obj.get("artifactId"), + "groupId": obj.get("groupId"), + "version": obj.get("version") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/version_timestamp.py b/edu_sharing_openapi/edu_sharing_client/models/version_timestamp.py new file mode 100644 index 00000000..1f50bf3b --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/version_timestamp.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class VersionTimestamp(BaseModel): + """ + VersionTimestamp + """ # noqa: E501 + datetime: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["datetime"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of VersionTimestamp from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of VersionTimestamp from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "datetime": obj.get("datetime") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/website_information.py b/edu_sharing_openapi/edu_sharing_client/models/website_information.py new file mode 100644 index 00000000..43f48343 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/website_information.py @@ -0,0 +1,105 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node import Node +from typing import Optional, Set +from typing_extensions import Self + +class WebsiteInformation(BaseModel): + """ + WebsiteInformation + """ # noqa: E501 + duplicate_nodes: Optional[List[Node]] = Field(default=None, alias="duplicateNodes") + title: Optional[StrictStr] = None + page: Optional[StrictStr] = None + description: Optional[StrictStr] = None + license: Optional[StrictStr] = None + keywords: Optional[List[StrictStr]] = None + __properties: ClassVar[List[str]] = ["duplicateNodes", "title", "page", "description", "license", "keywords"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WebsiteInformation from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in duplicate_nodes (list) + _items = [] + if self.duplicate_nodes: + for _item_duplicate_nodes in self.duplicate_nodes: + if _item_duplicate_nodes: + _items.append(_item_duplicate_nodes.to_dict()) + _dict['duplicateNodes'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WebsiteInformation from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "duplicateNodes": [Node.from_dict(_item) for _item in obj["duplicateNodes"]] if obj.get("duplicateNodes") is not None else None, + "title": obj.get("title"), + "page": obj.get("page"), + "description": obj.get("description"), + "license": obj.get("license"), + "keywords": obj.get("keywords") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/widget_data_dto.py b/edu_sharing_openapi/edu_sharing_client/models/widget_data_dto.py new file mode 100644 index 00000000..13aa5683 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/widget_data_dto.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class WidgetDataDTO(BaseModel): + """ + WidgetDataDTO + """ # noqa: E501 + id: Optional[StrictStr] = None + caption: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["id", "caption"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WidgetDataDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WidgetDataDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "caption": obj.get("caption") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/workflow_event_dto.py b/edu_sharing_openapi/edu_sharing_client/models/workflow_event_dto.py new file mode 100644 index 00000000..604c2df0 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/workflow_event_dto.py @@ -0,0 +1,109 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.node_data_dto import NodeDataDTO +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO +from edu_sharing_client.models.user_data_dto import UserDataDTO +from typing import Optional, Set +from typing_extensions import Self + +class WorkflowEventDTO(NotificationEventDTO): + """ + WorkflowEventDTO + """ # noqa: E501 + node: Optional[NodeDataDTO] = None + workflow_status: Optional[StrictStr] = Field(default=None, alias="workflowStatus") + user_comment: Optional[StrictStr] = Field(default=None, alias="userComment") + __properties: ClassVar[List[str]] = ["timestamp", "creator", "receiver", "status", "_id", "_class", "node", "workflowStatus", "userComment"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WorkflowEventDTO from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of creator + if self.creator: + _dict['creator'] = self.creator.to_dict() + # override the default output from pydantic by calling `to_dict()` of receiver + if self.receiver: + _dict['receiver'] = self.receiver.to_dict() + # override the default output from pydantic by calling `to_dict()` of node + if self.node: + _dict['node'] = self.node.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WorkflowEventDTO from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "timestamp": obj.get("timestamp"), + "creator": UserDataDTO.from_dict(obj["creator"]) if obj.get("creator") is not None else None, + "receiver": UserDataDTO.from_dict(obj["receiver"]) if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "_id": obj.get("_id"), + "_class": obj.get("_class"), + "node": NodeDataDTO.from_dict(obj["node"]) if obj.get("node") is not None else None, + "workflowStatus": obj.get("workflowStatus"), + "userComment": obj.get("userComment") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/models/workflow_history.py b/edu_sharing_openapi/edu_sharing_client/models/workflow_history.py new file mode 100644 index 00000000..cac312e7 --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/models/workflow_history.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from edu_sharing_client.models.authority import Authority +from edu_sharing_client.models.user_simple import UserSimple +from typing import Optional, Set +from typing_extensions import Self + +class WorkflowHistory(BaseModel): + """ + WorkflowHistory + """ # noqa: E501 + time: Optional[StrictInt] = None + editor: Optional[UserSimple] = None + receiver: Optional[List[Authority]] = None + status: Optional[StrictStr] = None + comment: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["time", "editor", "receiver", "status", "comment"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of WorkflowHistory from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of editor + if self.editor: + _dict['editor'] = self.editor.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in receiver (list) + _items = [] + if self.receiver: + for _item_receiver in self.receiver: + if _item_receiver: + _items.append(_item_receiver.to_dict()) + _dict['receiver'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of WorkflowHistory from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "time": obj.get("time"), + "editor": UserSimple.from_dict(obj["editor"]) if obj.get("editor") is not None else None, + "receiver": [Authority.from_dict(_item) for _item in obj["receiver"]] if obj.get("receiver") is not None else None, + "status": obj.get("status"), + "comment": obj.get("comment") + }) + return _obj + + diff --git a/edu_sharing_openapi/edu_sharing_client/py.typed b/edu_sharing_openapi/edu_sharing_client/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/edu_sharing_openapi/edu_sharing_client/rest.py b/edu_sharing_openapi/edu_sharing_client/rest.py new file mode 100644 index 00000000..283e43aa --- /dev/null +++ b/edu_sharing_openapi/edu_sharing_client/rest.py @@ -0,0 +1,257 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import io +import json +import re +import ssl + +import urllib3 + +from edu_sharing_client.exceptions import ApiException, ApiValueError + +SUPPORTED_SOCKS_PROXIES = {"socks5", "socks5h", "socks4", "socks4a"} +RESTResponseType = urllib3.HTTPResponse + + +def is_socks_proxy_url(url): + if url is None: + return False + split_section = url.split("://") + if len(split_section) < 2: + return False + else: + return split_section[0].lower() in SUPPORTED_SOCKS_PROXIES + + +class RESTResponse(io.IOBase): + + def __init__(self, resp) -> None: + self.response = resp + self.status = resp.status + self.reason = resp.reason + self.data = None + + def read(self): + if self.data is None: + self.data = self.response.data + return self.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.response.headers + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.response.headers.get(name, default) + + +class RESTClientObject: + + def __init__(self, configuration) -> None: + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + pool_args = { + "cert_reqs": cert_reqs, + "ca_certs": configuration.ssl_ca_cert, + "cert_file": configuration.cert_file, + "key_file": configuration.key_file, + } + if configuration.assert_hostname is not None: + pool_args['assert_hostname'] = ( + configuration.assert_hostname + ) + + if configuration.retries is not None: + pool_args['retries'] = configuration.retries + + if configuration.tls_server_name: + pool_args['server_hostname'] = configuration.tls_server_name + + + if configuration.socket_options is not None: + pool_args['socket_options'] = configuration.socket_options + + if configuration.connection_pool_maxsize is not None: + pool_args['maxsize'] = configuration.connection_pool_maxsize + + # https pool manager + self.pool_manager: urllib3.PoolManager + + if configuration.proxy: + if is_socks_proxy_url(configuration.proxy): + from urllib3.contrib.socks import SOCKSProxyManager + pool_args["proxy_url"] = configuration.proxy + pool_args["headers"] = configuration.proxy_headers + self.pool_manager = SOCKSProxyManager(**pool_args) + else: + pool_args["proxy_url"] = configuration.proxy + pool_args["proxy_headers"] = configuration.proxy_headers + self.pool_manager = urllib3.ProxyManager(**pool_args) + else: + self.pool_manager = urllib3.PoolManager(**pool_args) + + def request( + self, + method, + url, + headers=None, + body=None, + post_params=None, + _request_timeout=None + ): + """Perform requests. + + :param method: http request method + :param url: http request url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in [ + 'GET', + 'HEAD', + 'DELETE', + 'POST', + 'PUT', + 'PATCH', + 'OPTIONS' + ] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, float)): + timeout = urllib3.Timeout(total=_request_timeout) + elif ( + isinstance(_request_timeout, tuple) + and len(_request_timeout) == 2 + ): + timeout = urllib3.Timeout( + connect=_request_timeout[0], + read=_request_timeout[1] + ) + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + + # no content type provided or payload is json + content_type = headers.get('Content-Type') + if ( + not content_type + or re.search('json', content_type, re.IGNORECASE) + ): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, + url, + body=request_body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif content_type == 'application/x-www-form-urlencoded': + r = self.pool_manager.request( + method, + url, + fields=post_params, + encode_multipart=False, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif content_type == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + # Ensures that dict objects are serialized + post_params = [(a, json.dumps(b)) if isinstance(b, dict) else (a,b) for a, b in post_params] + r = self.pool_manager.request( + method, + url, + fields=post_params, + encode_multipart=True, + timeout=timeout, + headers=headers, + preload_content=False + ) + # Pass a `string` parameter directly in the body to support + # other content types than JSON when `body` argument is + # provided in serialized form. + elif isinstance(body, str) or isinstance(body, bytes): + r = self.pool_manager.request( + method, + url, + body=body, + timeout=timeout, + headers=headers, + preload_content=False + ) + elif headers['Content-Type'] == 'text/plain' and isinstance(body, bool): + request_body = "true" if body else "false" + r = self.pool_manager.request( + method, + url, + body=request_body, + preload_content=False, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request( + method, + url, + fields={}, + timeout=timeout, + headers=headers, + preload_content=False + ) + except urllib3.exceptions.SSLError as e: + msg = "\n".join([type(e).__name__, str(e)]) + raise ApiException(status=0, reason=msg) + + return RESTResponse(r) diff --git a/edu_sharing_openapi/git_push.sh b/edu_sharing_openapi/git_push.sh new file mode 100644 index 00000000..f53a75d4 --- /dev/null +++ b/edu_sharing_openapi/git_push.sh @@ -0,0 +1,57 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-petstore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=$(git remote) +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:"${GIT_TOKEN}"@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' diff --git a/edu_sharing_openapi/pyproject.toml b/edu_sharing_openapi/pyproject.toml new file mode 100644 index 00000000..d3f220e3 --- /dev/null +++ b/edu_sharing_openapi/pyproject.toml @@ -0,0 +1,71 @@ +[tool.poetry] +name = "edu_sharing_client" +version = "1.0.0" +description = "edu-sharing Repository REST API" +authors = ["OpenAPI Generator Community "] +license = "NoLicense" +readme = "README.md" +repository = "https://github.com/GIT_USER_ID/GIT_REPO_ID" +keywords = ["OpenAPI", "OpenAPI-Generator", "edu-sharing Repository REST API"] +include = ["edu_sharing_client/py.typed"] + +[tool.poetry.dependencies] +python = "^3.7" + +urllib3 = ">= 1.25.3" +python-dateutil = ">=2.8.2" +pydantic = ">=2" +typing-extensions = ">=4.7.1" + +[tool.poetry.dev-dependencies] +pytest = ">=7.2.1" +tox = ">=3.9.0" +flake8 = ">=4.0.0" +types-python-dateutil = ">=2.8.19.14" +mypy = "1.4.1" + + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.pylint.'MESSAGES CONTROL'] +extension-pkg-whitelist = "pydantic" + +[tool.mypy] +files = [ + "edu_sharing_client", + #"test", # auto-generated tests + "tests", # hand-written tests +] +# TODO: enable "strict" once all these individual checks are passing +# strict = true + +# List from: https://mypy.readthedocs.io/en/stable/existing_code.html#introduce-stricter-options +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true + +## Getting these passing should be easy +strict_equality = true +strict_concatenate = true + +## Strongly recommend enabling this one as soon as you can +check_untyped_defs = true + +## These shouldn't be too much additional work, but may be tricky to +## get passing if you use a lot of untyped libraries +disallow_subclassing_any = true +disallow_untyped_decorators = true +disallow_any_generics = true + +### These next few are various gradations of forcing use of type annotations +#disallow_untyped_calls = true +#disallow_incomplete_defs = true +#disallow_untyped_defs = true +# +### This one isn't too hard to get passing, but return on investment is lower +#no_implicit_reexport = true +# +### This one can be tricky to get passing if you use a lot of untyped libraries +#warn_return_any = true diff --git a/edu_sharing_openapi/requirements.txt b/edu_sharing_openapi/requirements.txt new file mode 100644 index 00000000..cc85509e --- /dev/null +++ b/edu_sharing_openapi/requirements.txt @@ -0,0 +1,5 @@ +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.25.3, < 2.1.0 +pydantic >= 2 +typing-extensions >= 4.7.1 diff --git a/edu_sharing_openapi/setup.cfg b/edu_sharing_openapi/setup.cfg new file mode 100644 index 00000000..11433ee8 --- /dev/null +++ b/edu_sharing_openapi/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=99 diff --git a/edu_sharing_openapi/setup.py b/edu_sharing_openapi/setup.py new file mode 100644 index 00000000..037c164d --- /dev/null +++ b/edu_sharing_openapi/setup.py @@ -0,0 +1,49 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from setuptools import setup, find_packages # noqa: H301 + +# To install the library, run the following +# +# python setup.py install +# +# prerequisite: setuptools +# http://pypi.python.org/pypi/setuptools +NAME = "edu-sharing-client" +VERSION = "1.0.0" +PYTHON_REQUIRES = ">=3.7" +REQUIRES = [ + "urllib3 >= 1.25.3, < 2.1.0", + "python-dateutil", + "pydantic >= 2", + "typing-extensions >= 4.7.1", +] + +setup( + name=NAME, + version=VERSION, + description="edu-sharing Repository REST API", + author="OpenAPI Generator community", + author_email="team@openapitools.org", + url="", + keywords=["OpenAPI", "OpenAPI-Generator", "edu-sharing Repository REST API"], + install_requires=REQUIRES, + packages=find_packages(exclude=["test", "tests"]), + include_package_data=True, + long_description_content_type='text/markdown', + long_description="""\ + The public restful API of the edu-sharing repository. + """, # noqa: E501 + package_data={"edu_sharing_client": ["py.typed"]}, +) diff --git a/edu_sharing_openapi/test-requirements.txt b/edu_sharing_openapi/test-requirements.txt new file mode 100644 index 00000000..8e6d8cb1 --- /dev/null +++ b/edu_sharing_openapi/test-requirements.txt @@ -0,0 +1,5 @@ +pytest~=7.1.3 +pytest-cov>=2.8.1 +pytest-randomly>=3.12.0 +mypy>=1.4.1 +types-python-dateutil>=2.8.19 diff --git a/edu_sharing_openapi/test/__init__.py b/edu_sharing_openapi/test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/edu_sharing_openapi/test/test_about.py b/edu_sharing_openapi/test/test_about.py new file mode 100644 index 00000000..b74e1fbc --- /dev/null +++ b/edu_sharing_openapi/test/test_about.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.about import About + +class TestAbout(unittest.TestCase): + """About unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> About: + """Test About + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `About` + """ + model = About() + if include_optional: + return About( + plugins = [ + edu_sharing_client.models.plugin_info.PluginInfo( + id = '', ) + ], + features = [ + edu_sharing_client.models.feature_info.FeatureInfo( + id = 'handleService', ) + ], + themes_url = '', + last_cache_update = 56, + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + services = [ + edu_sharing_client.models.about_service.AboutService( + name = '', + instances = [ + edu_sharing_client.models.service_instance.ServiceInstance( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + endpoint = '', ) + ], ) + ] + ) + else: + return About( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + services = [ + edu_sharing_client.models.about_service.AboutService( + name = '', + instances = [ + edu_sharing_client.models.service_instance.ServiceInstance( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + endpoint = '', ) + ], ) + ], + ) + """ + + def testAbout(self): + """Test About""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_about_api.py b/edu_sharing_openapi/test/test_about_api.py new file mode 100644 index 00000000..a11c0209 --- /dev/null +++ b/edu_sharing_openapi/test/test_about_api.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.about_api import ABOUTApi + + +class TestABOUTApi(unittest.TestCase): + """ABOUTApi unit test stubs""" + + def setUp(self) -> None: + self.api = ABOUTApi() + + def tearDown(self) -> None: + pass + + def test_about(self) -> None: + """Test case for about + + Discover the API. + """ + pass + + def test_licenses(self) -> None: + """Test case for licenses + + License information. + """ + pass + + def test_status(self) -> None: + """Test case for status + + status of repo services + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_about_service.py b/edu_sharing_openapi/test/test_about_service.py new file mode 100644 index 00000000..b78ce074 --- /dev/null +++ b/edu_sharing_openapi/test/test_about_service.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.about_service import AboutService + +class TestAboutService(unittest.TestCase): + """AboutService unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AboutService: + """Test AboutService + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AboutService` + """ + model = AboutService() + if include_optional: + return AboutService( + name = '', + instances = [ + edu_sharing_client.models.service_instance.ServiceInstance( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + endpoint = '', ) + ] + ) + else: + return AboutService( + name = '', + instances = [ + edu_sharing_client.models.service_instance.ServiceInstance( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + endpoint = '', ) + ], + ) + """ + + def testAboutService(self): + """Test AboutService""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_abstract_entries.py b/edu_sharing_openapi/test/test_abstract_entries.py new file mode 100644 index 00000000..ed1b63f4 --- /dev/null +++ b/edu_sharing_openapi/test/test_abstract_entries.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.abstract_entries import AbstractEntries + +class TestAbstractEntries(unittest.TestCase): + """AbstractEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AbstractEntries: + """Test AbstractEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AbstractEntries` + """ + model = AbstractEntries() + if include_optional: + return AbstractEntries( + nodes = [ + None + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return AbstractEntries( + nodes = [ + None + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testAbstractEntries(self): + """Test AbstractEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_ace.py b/edu_sharing_openapi/test/test_ace.py new file mode 100644 index 00000000..08d2a12e --- /dev/null +++ b/edu_sharing_openapi/test/test_ace.py @@ -0,0 +1,97 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.ace import ACE + +class TestACE(unittest.TestCase): + """ACE unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ACE: + """Test ACE + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ACE` + """ + model = ACE() + if include_optional: + return ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ] + ) + else: + return ACE( + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + permissions = [ + '' + ], + ) + """ + + def testACE(self): + """Test ACE""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_acl.py b/edu_sharing_openapi/test/test_acl.py new file mode 100644 index 00000000..fcdcafde --- /dev/null +++ b/edu_sharing_openapi/test/test_acl.py @@ -0,0 +1,128 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.acl import ACL + +class TestACL(unittest.TestCase): + """ACL unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ACL: + """Test ACL + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ACL` + """ + model = ACL() + if include_optional: + return ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ] + ) + else: + return ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], + ) + """ + + def testACL(self): + """Test ACL""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_add_to_collection_event_dto.py b/edu_sharing_openapi/test/test_add_to_collection_event_dto.py new file mode 100644 index 00000000..26f2ab86 --- /dev/null +++ b/edu_sharing_openapi/test/test_add_to_collection_event_dto.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.add_to_collection_event_dto import AddToCollectionEventDTO + +class TestAddToCollectionEventDTO(unittest.TestCase): + """AddToCollectionEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AddToCollectionEventDTO: + """Test AddToCollectionEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AddToCollectionEventDTO` + """ + model = AddToCollectionEventDTO() + if include_optional: + return AddToCollectionEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + collection = edu_sharing_client.models.collection_dto.CollectionDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ) + ) + else: + return AddToCollectionEventDTO( + ) + """ + + def testAddToCollectionEventDTO(self): + """Test AddToCollectionEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_admin.py b/edu_sharing_openapi/test/test_admin.py new file mode 100644 index 00000000..8a6d5f5d --- /dev/null +++ b/edu_sharing_openapi/test/test_admin.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.admin import Admin + +class TestAdmin(unittest.TestCase): + """Admin unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Admin: + """Test Admin + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Admin` + """ + model = Admin() + if include_optional: + return Admin( + statistics = edu_sharing_client.models.statistics.Statistics( + entries = [ + edu_sharing_client.models.statistic_entry.StatisticEntry( + property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], ) + ], ), + editor_type = 'Textarea' + ) + else: + return Admin( + ) + """ + + def testAdmin(self): + """Test Admin""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_admin_statistics.py b/edu_sharing_openapi/test/test_admin_statistics.py new file mode 100644 index 00000000..13cdd3d6 --- /dev/null +++ b/edu_sharing_openapi/test/test_admin_statistics.py @@ -0,0 +1,260 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.admin_statistics import AdminStatistics + +class TestAdminStatistics(unittest.TestCase): + """AdminStatistics unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AdminStatistics: + """Test AdminStatistics + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AdminStatistics` + """ + model = AdminStatistics() + if include_optional: + return AdminStatistics( + active_sessions = 56, + number_of_previews = 56, + max_memory = 56, + allocated_memory = 56, + preview_cache_size = 56, + active_locks = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ] + ) + else: + return AdminStatistics( + ) + """ + + def testAdminStatistics(self): + """Test AdminStatistics""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_adminv1_api.py b/edu_sharing_openapi/test/test_adminv1_api.py new file mode 100644 index 00000000..45034a4f --- /dev/null +++ b/edu_sharing_openapi/test/test_adminv1_api.py @@ -0,0 +1,408 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.adminv1_api import ADMINV1Api + + +class TestADMINV1Api(unittest.TestCase): + """ADMINV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = ADMINV1Api() + + def tearDown(self) -> None: + pass + + def test_add_application(self) -> None: + """Test case for add_application + + register/add an application via xml file + """ + pass + + def test_add_application1(self) -> None: + """Test case for add_application1 + + register/add an application + """ + pass + + def test_add_toolpermission(self) -> None: + """Test case for add_toolpermission + + add a new toolpermissions + """ + pass + + def test_apply_template(self) -> None: + """Test case for apply_template + + apply a folder template + """ + pass + + def test_cancel_job(self) -> None: + """Test case for cancel_job + + cancel a running job + """ + pass + + def test_change_logging(self) -> None: + """Test case for change_logging + + Change the loglevel for classes at runtime. + """ + pass + + def test_clear_cache(self) -> None: + """Test case for clear_cache + + clear cache + """ + pass + + def test_create_preview(self) -> None: + """Test case for create_preview + + create preview. + """ + pass + + def test_delete_person(self) -> None: + """Test case for delete_person + + delete persons + """ + pass + + def test_export_by_lucene(self) -> None: + """Test case for export_by_lucene + + Search for custom lucene query and choose specific properties to load + """ + pass + + def test_export_lom(self) -> None: + """Test case for export_lom + + Export Nodes with LOM Metadata Format + """ + pass + + def test_get_all_jobs(self) -> None: + """Test case for get_all_jobs + + get all available jobs + """ + pass + + def test_get_all_toolpermissions(self) -> None: + """Test case for get_all_toolpermissions + + get all toolpermissions for an authority + """ + pass + + def test_get_application_xml(self) -> None: + """Test case for get_application_xml + + list any xml properties (like from homeApplication.properties.xml) + """ + pass + + def test_get_applications(self) -> None: + """Test case for get_applications + + list applications + """ + pass + + def test_get_cache_entries(self) -> None: + """Test case for get_cache_entries + + Get entries of a cache + """ + pass + + def test_get_cache_info(self) -> None: + """Test case for get_cache_info + + Get information about a cache + """ + pass + + def test_get_catalina_out(self) -> None: + """Test case for get_catalina_out + + Get last info from catalina out + """ + pass + + def test_get_cluster(self) -> None: + """Test case for get_cluster + + Get information about the Cluster + """ + pass + + def test_get_clusters(self) -> None: + """Test case for get_clusters + + Get information about the Cluster + """ + pass + + def test_get_config(self) -> None: + """Test case for get_config + + get the repository config object + """ + pass + + def test_get_config_file(self) -> None: + """Test case for get_config_file + + get a base system config file (e.g. edu-sharing.conf) + """ + pass + + def test_get_enabled_plugins(self) -> None: + """Test case for get_enabled_plugins + + get enabled system plugins + """ + pass + + def test_get_global_groups(self) -> None: + """Test case for get_global_groups + + Get global groups + """ + pass + + def test_get_jobs(self) -> None: + """Test case for get_jobs + + get all running jobs + """ + pass + + def test_get_lightbend_config(self) -> None: + """Test case for get_lightbend_config + + """ + pass + + def test_get_logging_runtime(self) -> None: + """Test case for get_logging_runtime + + get the logger config + """ + pass + + def test_get_oai_classes(self) -> None: + """Test case for get_oai_classes + + Get OAI class names + """ + pass + + def test_get_property_to_mds(self) -> None: + """Test case for get_property_to_mds + + Get a Mds Valuespace for all values of the given properties + """ + pass + + def test_get_statistics(self) -> None: + """Test case for get_statistics + + get statistics + """ + pass + + def test_get_version(self) -> None: + """Test case for get_version + + get detailed version information + """ + pass + + def test_import_collections(self) -> None: + """Test case for import_collections + + import collections via a xml file + """ + pass + + def test_import_excel(self) -> None: + """Test case for import_excel + + Import excel data + """ + pass + + def test_import_oai(self) -> None: + """Test case for import_oai + + Import oai data + """ + pass + + def test_import_oai_xml(self) -> None: + """Test case for import_oai_xml + + Import single xml via oai (for testing) + """ + pass + + def test_refresh_app_info(self) -> None: + """Test case for refresh_app_info + + refresh app info + """ + pass + + def test_refresh_cache(self) -> None: + """Test case for refresh_cache + + Refresh cache + """ + pass + + def test_refresh_edu_group_cache(self) -> None: + """Test case for refresh_edu_group_cache + + Refresh the Edu Group Cache + """ + pass + + def test_remove_application(self) -> None: + """Test case for remove_application + + remove an application + """ + pass + + def test_remove_cache_entry(self) -> None: + """Test case for remove_cache_entry + + remove cache entry + """ + pass + + def test_remove_oai_imports(self) -> None: + """Test case for remove_oai_imports + + Remove deleted imports + """ + pass + + def test_search_by_elastic_dsl(self) -> None: + """Test case for search_by_elastic_dsl + + Search for custom elastic DSL query + """ + pass + + def test_search_by_lucene(self) -> None: + """Test case for search_by_lucene + + Search for custom lucene query + """ + pass + + def test_server_update_list(self) -> None: + """Test case for server_update_list + + list available update tasks + """ + pass + + def test_server_update_list1(self) -> None: + """Test case for server_update_list1 + + Run an update tasks + """ + pass + + def test_set_config(self) -> None: + """Test case for set_config + + set/update the repository config object + """ + pass + + def test_set_toolpermissions(self) -> None: + """Test case for set_toolpermissions + + set toolpermissions for an authority + """ + pass + + def test_start_job(self) -> None: + """Test case for start_job + + Start a Job. + """ + pass + + def test_start_job_sync(self) -> None: + """Test case for start_job_sync + + Start a Job. + """ + pass + + def test_switch_authority(self) -> None: + """Test case for switch_authority + + switch the session to a known authority name + """ + pass + + def test_test_mail(self) -> None: + """Test case for test_mail + + Test a mail template + """ + pass + + def test_update_application_xml(self) -> None: + """Test case for update_application_xml + + edit any properties xml (like homeApplication.properties.xml) + """ + pass + + def test_update_config_file(self) -> None: + """Test case for update_config_file + + update a base system config file (e.g. edu-sharing.conf) + """ + pass + + def test_upload_temp(self) -> None: + """Test case for upload_temp + + Upload a file + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_application.py b/edu_sharing_openapi/test/test_application.py new file mode 100644 index 00000000..00e53bfb --- /dev/null +++ b/edu_sharing_openapi/test/test_application.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.application import Application + +class TestApplication(unittest.TestCase): + """Application unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Application: + """Test Application + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Application` + """ + model = Application() + if include_optional: + return Application( + id = '', + title = '', + webserver_url = '', + client_base_url = '', + type = '', + subtype = '', + repository_type = '', + xml = '', + file = '', + content_url = '', + config_url = '' + ) + else: + return Application( + ) + """ + + def testApplication(self): + """Test Application""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_archivev1_api.py b/edu_sharing_openapi/test/test_archivev1_api.py new file mode 100644 index 00000000..5ad1fc13 --- /dev/null +++ b/edu_sharing_openapi/test/test_archivev1_api.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.archivev1_api import ARCHIVEV1Api + + +class TestARCHIVEV1Api(unittest.TestCase): + """ARCHIVEV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = ARCHIVEV1Api() + + def tearDown(self) -> None: + pass + + def test_purge(self) -> None: + """Test case for purge + + Searches for archive nodes. + """ + pass + + def test_restore(self) -> None: + """Test case for restore + + restore archived nodes. + """ + pass + + def test_search_archive(self) -> None: + """Test case for search_archive + + Searches for archive nodes. + """ + pass + + def test_search_archive_person(self) -> None: + """Test case for search_archive_person + + Searches for archive nodes. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_audience.py b/edu_sharing_openapi/test/test_audience.py new file mode 100644 index 00000000..c4acf471 --- /dev/null +++ b/edu_sharing_openapi/test/test_audience.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.audience import Audience + +class TestAudience(unittest.TestCase): + """Audience unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Audience: + """Test Audience + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Audience` + """ + model = Audience() + if include_optional: + return Audience( + name = '' + ) + else: + return Audience( + ) + """ + + def testAudience(self): + """Test Audience""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_authentication_token.py b/edu_sharing_openapi/test/test_authentication_token.py new file mode 100644 index 00000000..b3b3c5b6 --- /dev/null +++ b/edu_sharing_openapi/test/test_authentication_token.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.authentication_token import AuthenticationToken + +class TestAuthenticationToken(unittest.TestCase): + """AuthenticationToken unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AuthenticationToken: + """Test AuthenticationToken + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AuthenticationToken` + """ + model = AuthenticationToken() + if include_optional: + return AuthenticationToken( + user_id = '', + ticket = '' + ) + else: + return AuthenticationToken( + ) + """ + + def testAuthenticationToken(self): + """Test AuthenticationToken""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_authenticationv1_api.py b/edu_sharing_openapi/test/test_authenticationv1_api.py new file mode 100644 index 00000000..830ad7b2 --- /dev/null +++ b/edu_sharing_openapi/test/test_authenticationv1_api.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.authenticationv1_api import AUTHENTICATIONV1Api + + +class TestAUTHENTICATIONV1Api(unittest.TestCase): + """AUTHENTICATIONV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = AUTHENTICATIONV1Api() + + def tearDown(self) -> None: + pass + + def test_authenticate(self) -> None: + """Test case for authenticate + + authenticate user of an registered application. + """ + pass + + def test_has_access_to_scope(self) -> None: + """Test case for has_access_to_scope + + Returns true if the current user has access to the given scope + """ + pass + + def test_login(self) -> None: + """Test case for login + + Validates the Basic Auth Credentials and check if the session is a logged in user + """ + pass + + def test_login_to_scope(self) -> None: + """Test case for login_to_scope + + Validates the Basic Auth Credentials and check if the session is a logged in user + """ + pass + + def test_logout(self) -> None: + """Test case for logout + + Destroys the current session and logout the user + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_authority.py b/edu_sharing_openapi/test/test_authority.py new file mode 100644 index 00000000..6b72aef6 --- /dev/null +++ b/edu_sharing_openapi/test/test_authority.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.authority import Authority + +class TestAuthority(unittest.TestCase): + """Authority unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Authority: + """Test Authority + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Authority` + """ + model = Authority() + if include_optional: + return Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER' + ) + else: + return Authority( + authority_name = '', + ) + """ + + def testAuthority(self): + """Test Authority""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_authority_entries.py b/edu_sharing_openapi/test/test_authority_entries.py new file mode 100644 index 00000000..51bd8fee --- /dev/null +++ b/edu_sharing_openapi/test/test_authority_entries.py @@ -0,0 +1,80 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.authority_entries import AuthorityEntries + +class TestAuthorityEntries(unittest.TestCase): + """AuthorityEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AuthorityEntries: + """Test AuthorityEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AuthorityEntries` + """ + model = AuthorityEntries() + if include_optional: + return AuthorityEntries( + authorities = [ + edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return AuthorityEntries( + authorities = [ + edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testAuthorityEntries(self): + """Test AuthorityEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_available_mds.py b/edu_sharing_openapi/test/test_available_mds.py new file mode 100644 index 00000000..71687f00 --- /dev/null +++ b/edu_sharing_openapi/test/test_available_mds.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.available_mds import AvailableMds + +class TestAvailableMds(unittest.TestCase): + """AvailableMds unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> AvailableMds: + """Test AvailableMds + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `AvailableMds` + """ + model = AvailableMds() + if include_optional: + return AvailableMds( + repository = '', + mds = [ + '' + ] + ) + else: + return AvailableMds( + ) + """ + + def testAvailableMds(self): + """Test AvailableMds""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_banner.py b/edu_sharing_openapi/test/test_banner.py new file mode 100644 index 00000000..54c534f6 --- /dev/null +++ b/edu_sharing_openapi/test/test_banner.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.banner import Banner + +class TestBanner(unittest.TestCase): + """Banner unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Banner: + """Test Banner + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Banner` + """ + model = Banner() + if include_optional: + return Banner( + url = '', + href = '', + components = [ + '' + ] + ) + else: + return Banner( + ) + """ + + def testBanner(self): + """Test Banner""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_bulkv1_api.py b/edu_sharing_openapi/test/test_bulkv1_api.py new file mode 100644 index 00000000..f94f2ab0 --- /dev/null +++ b/edu_sharing_openapi/test/test_bulkv1_api.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.bulkv1_api import BULKV1Api + + +class TestBULKV1Api(unittest.TestCase): + """BULKV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = BULKV1Api() + + def tearDown(self) -> None: + pass + + def test_find(self) -> None: + """Test case for find + + gets a given node + """ + pass + + def test_sync(self) -> None: + """Test case for sync + + Create or update a given node + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_cache_cluster.py b/edu_sharing_openapi/test/test_cache_cluster.py new file mode 100644 index 00000000..02ba9720 --- /dev/null +++ b/edu_sharing_openapi/test/test_cache_cluster.py @@ -0,0 +1,77 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.cache_cluster import CacheCluster + +class TestCacheCluster(unittest.TestCase): + """CacheCluster unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CacheCluster: + """Test CacheCluster + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CacheCluster` + """ + model = CacheCluster() + if include_optional: + return CacheCluster( + instances = [ + edu_sharing_client.models.cache_member.CacheMember( + name = '', ) + ], + cache_infos = [ + edu_sharing_client.models.cache_info.CacheInfo( + size = 56, + statistic_hits = 56, + name = '', + backup_count = 56, + backup_entry_count = 56, + backup_entry_memory_cost = 56, + heap_cost = 56, + owned_entry_count = 56, + get_owned_entry_memory_cost = 56, + size_in_memory = 56, + member = '', + group_name = '', + max_size = 56, ) + ], + local_member = '', + free_memory = 56, + total_memory = 56, + max_memory = 56, + available_processors = 56, + time_stamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + group_name = '' + ) + else: + return CacheCluster( + ) + """ + + def testCacheCluster(self): + """Test CacheCluster""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_cache_info.py b/edu_sharing_openapi/test/test_cache_info.py new file mode 100644 index 00000000..a45da18a --- /dev/null +++ b/edu_sharing_openapi/test/test_cache_info.py @@ -0,0 +1,63 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.cache_info import CacheInfo + +class TestCacheInfo(unittest.TestCase): + """CacheInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CacheInfo: + """Test CacheInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CacheInfo` + """ + model = CacheInfo() + if include_optional: + return CacheInfo( + size = 56, + statistic_hits = 56, + name = '', + backup_count = 56, + backup_entry_count = 56, + backup_entry_memory_cost = 56, + heap_cost = 56, + owned_entry_count = 56, + get_owned_entry_memory_cost = 56, + size_in_memory = 56, + member = '', + group_name = '', + max_size = 56 + ) + else: + return CacheInfo( + ) + """ + + def testCacheInfo(self): + """Test CacheInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_cache_member.py b/edu_sharing_openapi/test/test_cache_member.py new file mode 100644 index 00000000..91138a32 --- /dev/null +++ b/edu_sharing_openapi/test/test_cache_member.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.cache_member import CacheMember + +class TestCacheMember(unittest.TestCase): + """CacheMember unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CacheMember: + """Test CacheMember + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CacheMember` + """ + model = CacheMember() + if include_optional: + return CacheMember( + name = '' + ) + else: + return CacheMember( + ) + """ + + def testCacheMember(self): + """Test CacheMember""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_catalog.py b/edu_sharing_openapi/test/test_catalog.py new file mode 100644 index 00000000..7912747c --- /dev/null +++ b/edu_sharing_openapi/test/test_catalog.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.catalog import Catalog + +class TestCatalog(unittest.TestCase): + """Catalog unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Catalog: + """Test Catalog + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Catalog` + """ + model = Catalog() + if include_optional: + return Catalog( + name = '', + url = '' + ) + else: + return Catalog( + ) + """ + + def testCatalog(self): + """Test Catalog""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_clientutilsv1_api.py b/edu_sharing_openapi/test/test_clientutilsv1_api.py new file mode 100644 index 00000000..355d8c48 --- /dev/null +++ b/edu_sharing_openapi/test/test_clientutilsv1_api.py @@ -0,0 +1,38 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.clientutilsv1_api import CLIENTUTILSV1Api + + +class TestCLIENTUTILSV1Api(unittest.TestCase): + """CLIENTUTILSV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = CLIENTUTILSV1Api() + + def tearDown(self) -> None: + pass + + def test_get_website_information(self) -> None: + """Test case for get_website_information + + Read generic information about a webpage + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection.py b/edu_sharing_openapi/test/test_collection.py new file mode 100644 index 00000000..f5eacddc --- /dev/null +++ b/edu_sharing_openapi/test/test_collection.py @@ -0,0 +1,72 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection import Collection + +class TestCollection(unittest.TestCase): + """Collection unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Collection: + """Test Collection + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Collection` + """ + model = Collection() + if include_optional: + return Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56 + ) + else: + return Collection( + level0 = True, + title = '', + type = '', + viewtype = '', + from_user = True, + ) + """ + + def testCollection(self): + """Test Collection""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_counts.py b/edu_sharing_openapi/test/test_collection_counts.py new file mode 100644 index 00000000..a31313af --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_counts.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_counts import CollectionCounts + +class TestCollectionCounts(unittest.TestCase): + """CollectionCounts unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionCounts: + """Test CollectionCounts + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionCounts` + """ + model = CollectionCounts() + if include_optional: + return CollectionCounts( + refs = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], + collections = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ] + ) + else: + return CollectionCounts( + ) + """ + + def testCollectionCounts(self): + """Test CollectionCounts""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_dto.py b/edu_sharing_openapi/test/test_collection_dto.py new file mode 100644 index 00000000..96118b5e --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_dto.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_dto import CollectionDTO + +class TestCollectionDTO(unittest.TestCase): + """CollectionDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionDTO: + """Test CollectionDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionDTO` + """ + model = CollectionDTO() + if include_optional: + return CollectionDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + } + ) + else: + return CollectionDTO( + ) + """ + + def testCollectionDTO(self): + """Test CollectionDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_entries.py b/edu_sharing_openapi/test/test_collection_entries.py new file mode 100644 index 00000000..fb1f36ee --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_entries.py @@ -0,0 +1,464 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_entries import CollectionEntries + +class TestCollectionEntries(unittest.TestCase): + """CollectionEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionEntries: + """Test CollectionEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionEntries` + """ + model = CollectionEntries() + if include_optional: + return CollectionEntries( + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + collections = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ] + ) + else: + return CollectionEntries( + collections = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + ) + """ + + def testCollectionEntries(self): + """Test CollectionEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_entry.py b/edu_sharing_openapi/test/test_collection_entry.py new file mode 100644 index 00000000..737949b9 --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_entry.py @@ -0,0 +1,456 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_entry import CollectionEntry + +class TestCollectionEntry(unittest.TestCase): + """CollectionEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionEntry: + """Test CollectionEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionEntry` + """ + model = CollectionEntry() + if include_optional: + return CollectionEntry( + collection = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ) + else: + return CollectionEntry( + collection = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + ) + """ + + def testCollectionEntry(self): + """Test CollectionEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_options.py b/edu_sharing_openapi/test/test_collection_options.py new file mode 100644 index 00000000..fb6ff296 --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_options.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_options import CollectionOptions + +class TestCollectionOptions(unittest.TestCase): + """CollectionOptions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionOptions: + """Test CollectionOptions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionOptions` + """ + model = CollectionOptions() + if include_optional: + return CollectionOptions( + private_collections = 'none', + public_collections = 'none' + ) + else: + return CollectionOptions( + ) + """ + + def testCollectionOptions(self): + """Test CollectionOptions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_proposal_entries.py b/edu_sharing_openapi/test/test_collection_proposal_entries.py new file mode 100644 index 00000000..d035aa24 --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_proposal_entries.py @@ -0,0 +1,528 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_proposal_entries import CollectionProposalEntries + +class TestCollectionProposalEntries(unittest.TestCase): + """CollectionProposalEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionProposalEntries: + """Test CollectionProposalEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionProposalEntries` + """ + model = CollectionProposalEntries() + if include_optional: + return CollectionProposalEntries( + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + collections = [ + edu_sharing_client.models.node_collection_proposal_count.NodeCollectionProposalCount( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = , + proposal_counts = { + 'key' : 56 + }, + proposal_count = { + 'key' : 56 + }, + ref = , + parent = , + type = '', + aspects = , + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = , + download_url = '', + properties = , + mimetype = '', + mediatype = '', + size = '', + preview = , + icon_url = '', + collection = , + owner = , + is_public = True, ) + ] + ) + else: + return CollectionProposalEntries( + collections = [ + edu_sharing_client.models.node_collection_proposal_count.NodeCollectionProposalCount( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = , + proposal_counts = { + 'key' : 56 + }, + proposal_count = { + 'key' : 56 + }, + ref = , + parent = , + type = '', + aspects = , + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = , + download_url = '', + properties = , + mimetype = '', + mediatype = '', + size = '', + preview = , + icon_url = '', + collection = , + owner = , + is_public = True, ) + ], + ) + """ + + def testCollectionProposalEntries(self): + """Test CollectionProposalEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collection_reference.py b/edu_sharing_openapi/test/test_collection_reference.py new file mode 100644 index 00000000..0b3d241e --- /dev/null +++ b/edu_sharing_openapi/test/test_collection_reference.py @@ -0,0 +1,703 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collection_reference import CollectionReference + +class TestCollectionReference(unittest.TestCase): + """CollectionReference unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionReference: + """Test CollectionReference + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionReference` + """ + model = CollectionReference() + if include_optional: + return CollectionReference( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + access_original = [ + '' + ], + original_restricted_access = True, + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + type = '', + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + original_id = '', + is_public = True + ) + else: + return CollectionReference( + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + name = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + ) + """ + + def testCollectionReference(self): + """Test CollectionReference""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collections.py b/edu_sharing_openapi/test/test_collections.py new file mode 100644 index 00000000..f8fa5131 --- /dev/null +++ b/edu_sharing_openapi/test/test_collections.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collections import Collections + +class TestCollections(unittest.TestCase): + """Collections unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Collections: + """Test Collections + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Collections` + """ + model = Collections() + if include_optional: + return Collections( + colors = [ + '' + ] + ) + else: + return Collections( + ) + """ + + def testCollections(self): + """Test Collections""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collections_result.py b/edu_sharing_openapi/test/test_collections_result.py new file mode 100644 index 00000000..d3b4ed15 --- /dev/null +++ b/edu_sharing_openapi/test/test_collections_result.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.collections_result import CollectionsResult + +class TestCollectionsResult(unittest.TestCase): + """CollectionsResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CollectionsResult: + """Test CollectionsResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CollectionsResult` + """ + model = CollectionsResult() + if include_optional: + return CollectionsResult( + count = 56 + ) + else: + return CollectionsResult( + ) + """ + + def testCollectionsResult(self): + """Test CollectionsResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_collectionv1_api.py b/edu_sharing_openapi/test/test_collectionv1_api.py new file mode 100644 index 00000000..a2f77412 --- /dev/null +++ b/edu_sharing_openapi/test/test_collectionv1_api.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.collectionv1_api import COLLECTIONV1Api + + +class TestCOLLECTIONV1Api(unittest.TestCase): + """COLLECTIONV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = COLLECTIONV1Api() + + def tearDown(self) -> None: + pass + + def test_add_to_collection(self) -> None: + """Test case for add_to_collection + + Add a node to a collection. + """ + pass + + def test_change_icon_of_collection(self) -> None: + """Test case for change_icon_of_collection + + Writes Preview Image of a collection. + """ + pass + + def test_create_collection(self) -> None: + """Test case for create_collection + + Create a new collection. + """ + pass + + def test_delete_collection(self) -> None: + """Test case for delete_collection + + Delete a collection. + """ + pass + + def test_delete_from_collection(self) -> None: + """Test case for delete_from_collection + + Delete a node from a collection. + """ + pass + + def test_get_collection(self) -> None: + """Test case for get_collection + + Get a collection. + """ + pass + + def test_get_collections_containing_proposals(self) -> None: + """Test case for get_collections_containing_proposals + + Get all collections containing proposals with a given state (via search index) + """ + pass + + def test_get_collections_proposals(self) -> None: + """Test case for get_collections_proposals + + Get proposed objects for collection (requires edit permissions on collection). + """ + pass + + def test_get_collections_references(self) -> None: + """Test case for get_collections_references + + Get references objects for collection. + """ + pass + + def test_get_collections_subcollections(self) -> None: + """Test case for get_collections_subcollections + + Get child collections for collection (or root). + """ + pass + + def test_remove_icon_of_collection(self) -> None: + """Test case for remove_icon_of_collection + + Deletes Preview Image of a collection. + """ + pass + + def test_search_collections(self) -> None: + """Test case for search_collections + + Search collections. + """ + pass + + def test_set_collection_order(self) -> None: + """Test case for set_collection_order + + Set order of nodes in a collection. In order to work as expected, provide a list of all nodes in this collection + """ + pass + + def test_set_pinned_collections(self) -> None: + """Test case for set_pinned_collections + + Set pinned collections. + """ + pass + + def test_update_collection(self) -> None: + """Test case for update_collection + + Update a collection. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_comment.py b/edu_sharing_openapi/test/test_comment.py new file mode 100644 index 00000000..371c725f --- /dev/null +++ b/edu_sharing_openapi/test/test_comment.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.comment import Comment + +class TestComment(unittest.TestCase): + """Comment unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Comment: + """Test Comment + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Comment` + """ + model = Comment() + if include_optional: + return Comment( + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + reply_to = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + creator = edu_sharing_client.models.user_simple.UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), ), + created = 56, + comment = '' + ) + else: + return Comment( + ) + """ + + def testComment(self): + """Test Comment""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_comment_event_dto.py b/edu_sharing_openapi/test/test_comment_event_dto.py new file mode 100644 index 00000000..3aebbd06 --- /dev/null +++ b/edu_sharing_openapi/test/test_comment_event_dto.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.comment_event_dto import CommentEventDTO + +class TestCommentEventDTO(unittest.TestCase): + """CommentEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CommentEventDTO: + """Test CommentEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CommentEventDTO` + """ + model = CommentEventDTO() + if include_optional: + return CommentEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + comment_content = '', + comment_reference = '', + event = '' + ) + else: + return CommentEventDTO( + ) + """ + + def testCommentEventDTO(self): + """Test CommentEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_comments.py b/edu_sharing_openapi/test/test_comments.py new file mode 100644 index 00000000..576f2855 --- /dev/null +++ b/edu_sharing_openapi/test/test_comments.py @@ -0,0 +1,112 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.comments import Comments + +class TestComments(unittest.TestCase): + """Comments unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Comments: + """Test Comments + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Comments` + """ + model = Comments() + if include_optional: + return Comments( + comments = [ + edu_sharing_client.models.comment.Comment( + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + reply_to = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + creator = edu_sharing_client.models.user_simple.UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = , ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), ), + created = 56, + comment = '', ) + ] + ) + else: + return Comments( + ) + """ + + def testComments(self): + """Test Comments""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_commentv1_api.py b/edu_sharing_openapi/test/test_commentv1_api.py new file mode 100644 index 00000000..941fd529 --- /dev/null +++ b/edu_sharing_openapi/test/test_commentv1_api.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.commentv1_api import COMMENTV1Api + + +class TestCOMMENTV1Api(unittest.TestCase): + """COMMENTV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = COMMENTV1Api() + + def tearDown(self) -> None: + pass + + def test_add_comment(self) -> None: + """Test case for add_comment + + create a new comment + """ + pass + + def test_delete_comment(self) -> None: + """Test case for delete_comment + + delete a comment + """ + pass + + def test_edit_comment(self) -> None: + """Test case for edit_comment + + edit a comment + """ + pass + + def test_get_comments(self) -> None: + """Test case for get_comments + + list comments + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_condition.py b/edu_sharing_openapi/test/test_condition.py new file mode 100644 index 00000000..2445d616 --- /dev/null +++ b/edu_sharing_openapi/test/test_condition.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.condition import Condition + +class TestCondition(unittest.TestCase): + """Condition unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Condition: + """Test Condition + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Condition` + """ + model = Condition() + if include_optional: + return Condition( + type = 'TOOLPERMISSION', + negate = True, + value = '' + ) + else: + return Condition( + ) + """ + + def testCondition(self): + """Test Condition""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config.py b/edu_sharing_openapi/test/test_config.py new file mode 100644 index 00000000..f8cbedf1 --- /dev/null +++ b/edu_sharing_openapi/test/test_config.py @@ -0,0 +1,554 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config import Config + +class TestConfig(unittest.TestCase): + """Config unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Config: + """Test Config + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Config` + """ + model = Config() + if include_optional: + return Config( + current = edu_sharing_client.models.values.Values( + supported_languages = [ + '' + ], + extension = '', + login_url = '', + login_allow_local = True, + login_providers_url = '', + login_provider_target_url = '', + register = edu_sharing_client.models.register.Register( + local = True, + recover_password = True, + login_url = '', + recover_url = '', + required_fields = [ + '' + ], ), + recover_password_url = '', + imprint_url = '', + privacy_information_url = '', + help_url = '', + whats_new_url = '', + edit_profile_url = '', + edit_profile = True, + workspace_columns = [ + '' + ], + workspace_shared_to_me_default_all = True, + hide_main_menu = [ + '' + ], + logout = edu_sharing_client.models.logout_info.LogoutInfo( + url = '', + destroy_session = True, + ajax = True, + next = '', ), + menu_entries = [ + edu_sharing_client.models.menu_entry.MenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + path = '', + scope = '', ) + ], + custom_options = [ + edu_sharing_client.models.context_menu_entry.ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + scopes = [ + 'Render' + ], + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update', ) + ], + user_menu_overrides = [ + edu_sharing_client.models.context_menu_entry.ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update', ) + ], + allowed_licenses = [ + '' + ], + custom_licenses = [ + edu_sharing_client.models.license.License( + icon = '', + url = '', ) + ], + workflow = edu_sharing_client.models.config_workflow.ConfigWorkflow( + default_receiver = '', + default_status = '', + comment_required = True, + workflows = [ + edu_sharing_client.models.config_workflow_list.ConfigWorkflowList( + id = '', + color = '', + has_receiver = True, + next = [ + '' + ], ) + ], ), + license_dialog_on_upload = True, + node_report = True, + branding = True, + rating = edu_sharing_client.models.config_rating.ConfigRating( + mode = 'none', ), + publishing_notice = True, + site_title = '', + user_display_name = '', + user_secondary_display_name = '', + user_affiliation = True, + default_username = '', + default_password = '', + banner = edu_sharing_client.models.banner.Banner( + url = '', + href = '', + components = [ + '' + ], ), + available_mds = [ + edu_sharing_client.models.available_mds.AvailableMds( + repository = '', + mds = [ + '' + ], ) + ], + available_repositories = [ + '' + ], + search_view_type = 56, + workspace_view_type = 56, + items_per_request = 56, + rendering = edu_sharing_client.models.rendering.Rendering( + show_preview = True, + show_download_button = True, + prerender = True, + gdpr = [ + edu_sharing_client.models.rendering_gdpr.RenderingGdpr( + matcher = '', + name = '', + privacy_information_url = '', ) + ], ), + session_expired_dialog = edu_sharing_client.models.session_expired_dialog.SessionExpiredDialog(), + login_default_location = '', + search_group_results = True, + mainnav = edu_sharing_client.models.mainnav.Mainnav( + icon = edu_sharing_client.models.icon.Icon( + url = '', ), + main_menu_style = '', ), + search_sidenav_mode = '', + guest = edu_sharing_client.models.guest.Guest( + enabled = True, ), + collections = edu_sharing_client.models.collections.Collections( + colors = [ + '' + ], ), + license_agreement = edu_sharing_client.models.license_agreement.LicenseAgreement( + node_id = [ + edu_sharing_client.models.license_agreement_node.LicenseAgreementNode( + language = '', + value = '', ) + ], ), + services = edu_sharing_client.models.services.Services( + visualization = '', ), + help_menu_options = [ + edu_sharing_client.models.help_menu_options.HelpMenuOptions( + key = '', + url = '', ) + ], + images = [ + edu_sharing_client.models.image.Image( + src = '', + replace = '', ) + ], + icons = [ + edu_sharing_client.models.font_icon.FontIcon( + original = '', + replace = '', + css_class = '', ) + ], + stream = edu_sharing_client.models.stream.Stream( + enabled = True, ), + admin = edu_sharing_client.models.admin.Admin( + statistics = edu_sharing_client.models.statistics.Statistics( + entries = [ + edu_sharing_client.models.statistic_entry.StatisticEntry( + property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], ) + ], ), + editor_type = 'Textarea', ), + simple_edit = edu_sharing_client.models.simple_edit.SimpleEdit( + global_groups = [ + edu_sharing_client.models.simple_edit_global_groups.SimpleEditGlobalGroups( + toolpermission = '', + groups = [ + '' + ], ) + ], + organization = edu_sharing_client.models.simple_edit_organization.SimpleEditOrganization( + group_types = [ + '' + ], ), + organization_filter = '', + licenses = [ + '' + ], ), + frontpage = edu_sharing_client.models.config_frontpage.ConfigFrontpage( + enabled = True, ), + upload = edu_sharing_client.models.config_upload.ConfigUpload( + post_dialog = 'SimpleEdit', ), + publish = edu_sharing_client.models.config_publish.ConfigPublish( + license_mandatory = True, + author_mandatory = True, ), + remote = edu_sharing_client.models.config_remote.ConfigRemote( + rocketchat = edu_sharing_client.models.config_remote_rocketchat.ConfigRemoteRocketchat(), ), + custom_css = '', + theme_colors = edu_sharing_client.models.config_theme_colors.ConfigThemeColors( + color = [ + edu_sharing_client.models.config_theme_color.ConfigThemeColor( + variable = '', + value = '', ) + ], ), + privacy = edu_sharing_client.models.config_privacy.ConfigPrivacy( + cookie_disclaimer = True, ), + tutorial = edu_sharing_client.models.config_tutorial.ConfigTutorial( + enabled = True, ), ), + var_global = edu_sharing_client.models.values.Values( + supported_languages = [ + '' + ], + extension = '', + login_url = '', + login_allow_local = True, + login_providers_url = '', + login_provider_target_url = '', + register = edu_sharing_client.models.register.Register( + local = True, + recover_password = True, + login_url = '', + recover_url = '', + required_fields = [ + '' + ], ), + recover_password_url = '', + imprint_url = '', + privacy_information_url = '', + help_url = '', + whats_new_url = '', + edit_profile_url = '', + edit_profile = True, + workspace_columns = [ + '' + ], + workspace_shared_to_me_default_all = True, + hide_main_menu = [ + '' + ], + logout = edu_sharing_client.models.logout_info.LogoutInfo( + url = '', + destroy_session = True, + ajax = True, + next = '', ), + menu_entries = [ + edu_sharing_client.models.menu_entry.MenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + path = '', + scope = '', ) + ], + custom_options = [ + edu_sharing_client.models.context_menu_entry.ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + scopes = [ + 'Render' + ], + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update', ) + ], + user_menu_overrides = [ + edu_sharing_client.models.context_menu_entry.ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update', ) + ], + allowed_licenses = [ + '' + ], + custom_licenses = [ + edu_sharing_client.models.license.License( + icon = '', + url = '', ) + ], + workflow = edu_sharing_client.models.config_workflow.ConfigWorkflow( + default_receiver = '', + default_status = '', + comment_required = True, + workflows = [ + edu_sharing_client.models.config_workflow_list.ConfigWorkflowList( + id = '', + color = '', + has_receiver = True, + next = [ + '' + ], ) + ], ), + license_dialog_on_upload = True, + node_report = True, + branding = True, + rating = edu_sharing_client.models.config_rating.ConfigRating( + mode = 'none', ), + publishing_notice = True, + site_title = '', + user_display_name = '', + user_secondary_display_name = '', + user_affiliation = True, + default_username = '', + default_password = '', + banner = edu_sharing_client.models.banner.Banner( + url = '', + href = '', + components = [ + '' + ], ), + available_mds = [ + edu_sharing_client.models.available_mds.AvailableMds( + repository = '', + mds = [ + '' + ], ) + ], + available_repositories = [ + '' + ], + search_view_type = 56, + workspace_view_type = 56, + items_per_request = 56, + rendering = edu_sharing_client.models.rendering.Rendering( + show_preview = True, + show_download_button = True, + prerender = True, + gdpr = [ + edu_sharing_client.models.rendering_gdpr.RenderingGdpr( + matcher = '', + name = '', + privacy_information_url = '', ) + ], ), + session_expired_dialog = edu_sharing_client.models.session_expired_dialog.SessionExpiredDialog(), + login_default_location = '', + search_group_results = True, + mainnav = edu_sharing_client.models.mainnav.Mainnav( + icon = edu_sharing_client.models.icon.Icon( + url = '', ), + main_menu_style = '', ), + search_sidenav_mode = '', + guest = edu_sharing_client.models.guest.Guest( + enabled = True, ), + collections = edu_sharing_client.models.collections.Collections( + colors = [ + '' + ], ), + license_agreement = edu_sharing_client.models.license_agreement.LicenseAgreement( + node_id = [ + edu_sharing_client.models.license_agreement_node.LicenseAgreementNode( + language = '', + value = '', ) + ], ), + services = edu_sharing_client.models.services.Services( + visualization = '', ), + help_menu_options = [ + edu_sharing_client.models.help_menu_options.HelpMenuOptions( + key = '', + url = '', ) + ], + images = [ + edu_sharing_client.models.image.Image( + src = '', + replace = '', ) + ], + icons = [ + edu_sharing_client.models.font_icon.FontIcon( + original = '', + replace = '', + css_class = '', ) + ], + stream = edu_sharing_client.models.stream.Stream( + enabled = True, ), + admin = edu_sharing_client.models.admin.Admin( + statistics = edu_sharing_client.models.statistics.Statistics( + entries = [ + edu_sharing_client.models.statistic_entry.StatisticEntry( + property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], ) + ], ), + editor_type = 'Textarea', ), + simple_edit = edu_sharing_client.models.simple_edit.SimpleEdit( + global_groups = [ + edu_sharing_client.models.simple_edit_global_groups.SimpleEditGlobalGroups( + toolpermission = '', + groups = [ + '' + ], ) + ], + organization = edu_sharing_client.models.simple_edit_organization.SimpleEditOrganization( + group_types = [ + '' + ], ), + organization_filter = '', + licenses = [ + '' + ], ), + frontpage = edu_sharing_client.models.config_frontpage.ConfigFrontpage( + enabled = True, ), + upload = edu_sharing_client.models.config_upload.ConfigUpload( + post_dialog = 'SimpleEdit', ), + publish = edu_sharing_client.models.config_publish.ConfigPublish( + license_mandatory = True, + author_mandatory = True, ), + remote = edu_sharing_client.models.config_remote.ConfigRemote( + rocketchat = edu_sharing_client.models.config_remote_rocketchat.ConfigRemoteRocketchat(), ), + custom_css = '', + theme_colors = edu_sharing_client.models.config_theme_colors.ConfigThemeColors( + color = [ + edu_sharing_client.models.config_theme_color.ConfigThemeColor( + variable = '', + value = '', ) + ], ), + privacy = edu_sharing_client.models.config_privacy.ConfigPrivacy( + cookie_disclaimer = True, ), + tutorial = edu_sharing_client.models.config_tutorial.ConfigTutorial( + enabled = True, ), ), + language = edu_sharing_client.models.language.Language( + global = { + 'key' : '' + }, + current = { + 'key' : '' + }, + current_language = '', ) + ) + else: + return Config( + ) + """ + + def testConfig(self): + """Test Config""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_frontpage.py b/edu_sharing_openapi/test/test_config_frontpage.py new file mode 100644 index 00000000..83d0a5bd --- /dev/null +++ b/edu_sharing_openapi/test/test_config_frontpage.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_frontpage import ConfigFrontpage + +class TestConfigFrontpage(unittest.TestCase): + """ConfigFrontpage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigFrontpage: + """Test ConfigFrontpage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigFrontpage` + """ + model = ConfigFrontpage() + if include_optional: + return ConfigFrontpage( + enabled = True + ) + else: + return ConfigFrontpage( + ) + """ + + def testConfigFrontpage(self): + """Test ConfigFrontpage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_privacy.py b/edu_sharing_openapi/test/test_config_privacy.py new file mode 100644 index 00000000..dc6a2b1a --- /dev/null +++ b/edu_sharing_openapi/test/test_config_privacy.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_privacy import ConfigPrivacy + +class TestConfigPrivacy(unittest.TestCase): + """ConfigPrivacy unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigPrivacy: + """Test ConfigPrivacy + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigPrivacy` + """ + model = ConfigPrivacy() + if include_optional: + return ConfigPrivacy( + cookie_disclaimer = True + ) + else: + return ConfigPrivacy( + ) + """ + + def testConfigPrivacy(self): + """Test ConfigPrivacy""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_publish.py b/edu_sharing_openapi/test/test_config_publish.py new file mode 100644 index 00000000..7d4135c3 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_publish.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_publish import ConfigPublish + +class TestConfigPublish(unittest.TestCase): + """ConfigPublish unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigPublish: + """Test ConfigPublish + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigPublish` + """ + model = ConfigPublish() + if include_optional: + return ConfigPublish( + license_mandatory = True, + author_mandatory = True + ) + else: + return ConfigPublish( + ) + """ + + def testConfigPublish(self): + """Test ConfigPublish""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_rating.py b/edu_sharing_openapi/test/test_config_rating.py new file mode 100644 index 00000000..d29056f8 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_rating.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_rating import ConfigRating + +class TestConfigRating(unittest.TestCase): + """ConfigRating unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigRating: + """Test ConfigRating + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigRating` + """ + model = ConfigRating() + if include_optional: + return ConfigRating( + mode = 'none' + ) + else: + return ConfigRating( + ) + """ + + def testConfigRating(self): + """Test ConfigRating""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_remote.py b/edu_sharing_openapi/test/test_config_remote.py new file mode 100644 index 00000000..7152692a --- /dev/null +++ b/edu_sharing_openapi/test/test_config_remote.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_remote import ConfigRemote + +class TestConfigRemote(unittest.TestCase): + """ConfigRemote unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigRemote: + """Test ConfigRemote + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigRemote` + """ + model = ConfigRemote() + if include_optional: + return ConfigRemote( + rocketchat = edu_sharing_client.models.config_remote_rocketchat.ConfigRemoteRocketchat() + ) + else: + return ConfigRemote( + ) + """ + + def testConfigRemote(self): + """Test ConfigRemote""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_theme_color.py b/edu_sharing_openapi/test/test_config_theme_color.py new file mode 100644 index 00000000..eb81fc64 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_theme_color.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_theme_color import ConfigThemeColor + +class TestConfigThemeColor(unittest.TestCase): + """ConfigThemeColor unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigThemeColor: + """Test ConfigThemeColor + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigThemeColor` + """ + model = ConfigThemeColor() + if include_optional: + return ConfigThemeColor( + variable = '', + value = '' + ) + else: + return ConfigThemeColor( + ) + """ + + def testConfigThemeColor(self): + """Test ConfigThemeColor""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_theme_colors.py b/edu_sharing_openapi/test/test_config_theme_colors.py new file mode 100644 index 00000000..cc8a8206 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_theme_colors.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_theme_colors import ConfigThemeColors + +class TestConfigThemeColors(unittest.TestCase): + """ConfigThemeColors unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigThemeColors: + """Test ConfigThemeColors + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigThemeColors` + """ + model = ConfigThemeColors() + if include_optional: + return ConfigThemeColors( + color = [ + edu_sharing_client.models.config_theme_color.ConfigThemeColor( + variable = '', + value = '', ) + ] + ) + else: + return ConfigThemeColors( + ) + """ + + def testConfigThemeColors(self): + """Test ConfigThemeColors""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_tutorial.py b/edu_sharing_openapi/test/test_config_tutorial.py new file mode 100644 index 00000000..4fcfa649 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_tutorial.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_tutorial import ConfigTutorial + +class TestConfigTutorial(unittest.TestCase): + """ConfigTutorial unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigTutorial: + """Test ConfigTutorial + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigTutorial` + """ + model = ConfigTutorial() + if include_optional: + return ConfigTutorial( + enabled = True + ) + else: + return ConfigTutorial( + ) + """ + + def testConfigTutorial(self): + """Test ConfigTutorial""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_upload.py b/edu_sharing_openapi/test/test_config_upload.py new file mode 100644 index 00000000..4d0d5c56 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_upload.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_upload import ConfigUpload + +class TestConfigUpload(unittest.TestCase): + """ConfigUpload unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigUpload: + """Test ConfigUpload + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigUpload` + """ + model = ConfigUpload() + if include_optional: + return ConfigUpload( + post_dialog = 'SimpleEdit' + ) + else: + return ConfigUpload( + ) + """ + + def testConfigUpload(self): + """Test ConfigUpload""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_workflow.py b/edu_sharing_openapi/test/test_config_workflow.py new file mode 100644 index 00000000..19d3cf19 --- /dev/null +++ b/edu_sharing_openapi/test/test_config_workflow.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_workflow import ConfigWorkflow + +class TestConfigWorkflow(unittest.TestCase): + """ConfigWorkflow unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigWorkflow: + """Test ConfigWorkflow + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigWorkflow` + """ + model = ConfigWorkflow() + if include_optional: + return ConfigWorkflow( + default_receiver = '', + default_status = '', + comment_required = True, + workflows = [ + edu_sharing_client.models.config_workflow_list.ConfigWorkflowList( + id = '', + color = '', + has_receiver = True, + next = [ + '' + ], ) + ] + ) + else: + return ConfigWorkflow( + ) + """ + + def testConfigWorkflow(self): + """Test ConfigWorkflow""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_config_workflow_list.py b/edu_sharing_openapi/test/test_config_workflow_list.py new file mode 100644 index 00000000..4335197b --- /dev/null +++ b/edu_sharing_openapi/test/test_config_workflow_list.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.config_workflow_list import ConfigWorkflowList + +class TestConfigWorkflowList(unittest.TestCase): + """ConfigWorkflowList unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConfigWorkflowList: + """Test ConfigWorkflowList + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConfigWorkflowList` + """ + model = ConfigWorkflowList() + if include_optional: + return ConfigWorkflowList( + id = '', + color = '', + has_receiver = True, + next = [ + '' + ] + ) + else: + return ConfigWorkflowList( + ) + """ + + def testConfigWorkflowList(self): + """Test ConfigWorkflowList""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_configv1_api.py b/edu_sharing_openapi/test/test_configv1_api.py new file mode 100644 index 00000000..f6d8fd43 --- /dev/null +++ b/edu_sharing_openapi/test/test_configv1_api.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.configv1_api import CONFIGV1Api + + +class TestCONFIGV1Api(unittest.TestCase): + """CONFIGV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = CONFIGV1Api() + + def tearDown(self) -> None: + pass + + def test_get_config1(self) -> None: + """Test case for get_config1 + + get repository config values + """ + pass + + def test_get_dynamic_value(self) -> None: + """Test case for get_dynamic_value + + Get a config entry (appropriate rights for the entry are required) + """ + pass + + def test_get_language(self) -> None: + """Test case for get_language + + get override strings for the current language + """ + pass + + def test_get_language_defaults(self) -> None: + """Test case for get_language_defaults + + get all inital language strings for angular + """ + pass + + def test_get_variables(self) -> None: + """Test case for get_variables + + get global config variables + """ + pass + + def test_set_dynamic_value(self) -> None: + """Test case for set_dynamic_value + + Set a config entry (admin rights required) + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_connector.py b/edu_sharing_openapi/test/test_connector.py new file mode 100644 index 00000000..b234ef58 --- /dev/null +++ b/edu_sharing_openapi/test/test_connector.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.connector import Connector + +class TestConnector(unittest.TestCase): + """Connector unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Connector: + """Test Connector + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Connector` + """ + model = Connector() + if include_optional: + return Connector( + id = '', + icon = '', + show_new = True, + parameters = [ + '' + ], + filetypes = [ + edu_sharing_client.models.connector_file_type.ConnectorFileType( + ccressourceversion = '', + ccressourcetype = '', + ccresourcesubtype = '', + editor_type = '', + mimetype = '', + filetype = '', + creatable = True, + editable = True, ) + ], + only_desktop = True, + has_view_mode = True + ) + else: + return Connector( + show_new = True, + ) + """ + + def testConnector(self): + """Test Connector""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_connector_file_type.py b/edu_sharing_openapi/test/test_connector_file_type.py new file mode 100644 index 00000000..e60589b8 --- /dev/null +++ b/edu_sharing_openapi/test/test_connector_file_type.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.connector_file_type import ConnectorFileType + +class TestConnectorFileType(unittest.TestCase): + """ConnectorFileType unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConnectorFileType: + """Test ConnectorFileType + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConnectorFileType` + """ + model = ConnectorFileType() + if include_optional: + return ConnectorFileType( + ccressourceversion = '', + ccressourcetype = '', + ccresourcesubtype = '', + editor_type = '', + mimetype = '', + filetype = '', + creatable = True, + editable = True + ) + else: + return ConnectorFileType( + ) + """ + + def testConnectorFileType(self): + """Test ConnectorFileType""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_connector_list.py b/edu_sharing_openapi/test/test_connector_list.py new file mode 100644 index 00000000..264a4678 --- /dev/null +++ b/edu_sharing_openapi/test/test_connector_list.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.connector_list import ConnectorList + +class TestConnectorList(unittest.TestCase): + """ConnectorList unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ConnectorList: + """Test ConnectorList + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ConnectorList` + """ + model = ConnectorList() + if include_optional: + return ConnectorList( + url = '', + connectors = [ + edu_sharing_client.models.connector.Connector( + id = '', + icon = '', + show_new = True, + parameters = [ + '' + ], + filetypes = [ + edu_sharing_client.models.connector_file_type.ConnectorFileType( + ccressourceversion = '', + ccressourcetype = '', + ccresourcesubtype = '', + editor_type = '', + mimetype = '', + filetype = '', + creatable = True, + editable = True, ) + ], + only_desktop = True, + has_view_mode = True, ) + ] + ) + else: + return ConnectorList( + ) + """ + + def testConnectorList(self): + """Test ConnectorList""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_connectorv1_api.py b/edu_sharing_openapi/test/test_connectorv1_api.py new file mode 100644 index 00000000..e942283b --- /dev/null +++ b/edu_sharing_openapi/test/test_connectorv1_api.py @@ -0,0 +1,38 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.connectorv1_api import CONNECTORV1Api + + +class TestCONNECTORV1Api(unittest.TestCase): + """CONNECTORV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = CONNECTORV1Api() + + def tearDown(self) -> None: + pass + + def test_list_connectors(self) -> None: + """Test case for list_connectors + + List all available connectors + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_content.py b/edu_sharing_openapi/test/test_content.py new file mode 100644 index 00000000..7661be27 --- /dev/null +++ b/edu_sharing_openapi/test/test_content.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.content import Content + +class TestContent(unittest.TestCase): + """Content unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Content: + """Test Content + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Content` + """ + model = Content() + if include_optional: + return Content( + url = '', + hash = '', + version = '' + ) + else: + return Content( + ) + """ + + def testContent(self): + """Test Content""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_context_menu_entry.py b/edu_sharing_openapi/test/test_context_menu_entry.py new file mode 100644 index 00000000..2efdfa4a --- /dev/null +++ b/edu_sharing_openapi/test/test_context_menu_entry.py @@ -0,0 +1,72 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.context_menu_entry import ContextMenuEntry + +class TestContextMenuEntry(unittest.TestCase): + """ContextMenuEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ContextMenuEntry: + """Test ContextMenuEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ContextMenuEntry` + """ + model = ContextMenuEntry() + if include_optional: + return ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + scopes = [ + 'Render' + ], + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update' + ) + else: + return ContextMenuEntry( + ) + """ + + def testContextMenuEntry(self): + """Test ContextMenuEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_contributor.py b/edu_sharing_openapi/test/test_contributor.py new file mode 100644 index 00000000..d5b4344f --- /dev/null +++ b/edu_sharing_openapi/test/test_contributor.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.contributor import Contributor + +class TestContributor(unittest.TestCase): + """Contributor unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Contributor: + """Test Contributor + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Contributor` + """ + model = Contributor() + if include_optional: + return Contributor( + var_property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '' + ) + else: + return Contributor( + ) + """ + + def testContributor(self): + """Test Contributor""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_counts.py b/edu_sharing_openapi/test/test_counts.py new file mode 100644 index 00000000..aba38fcf --- /dev/null +++ b/edu_sharing_openapi/test/test_counts.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.counts import Counts + +class TestCounts(unittest.TestCase): + """Counts unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Counts: + """Test Counts + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Counts` + """ + model = Counts() + if include_optional: + return Counts( + elements = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ] + ) + else: + return Counts( + ) + """ + + def testCounts(self): + """Test Counts""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_create.py b/edu_sharing_openapi/test/test_create.py new file mode 100644 index 00000000..22648ff0 --- /dev/null +++ b/edu_sharing_openapi/test/test_create.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.create import Create + +class TestCreate(unittest.TestCase): + """Create unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Create: + """Test Create + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Create` + """ + model = Create() + if include_optional: + return Create( + only_metadata = True + ) + else: + return Create( + ) + """ + + def testCreate(self): + """Test Create""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_create_usage.py b/edu_sharing_openapi/test/test_create_usage.py new file mode 100644 index 00000000..49f53780 --- /dev/null +++ b/edu_sharing_openapi/test/test_create_usage.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.create_usage import CreateUsage + +class TestCreateUsage(unittest.TestCase): + """CreateUsage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> CreateUsage: + """Test CreateUsage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `CreateUsage` + """ + model = CreateUsage() + if include_optional: + return CreateUsage( + app_id = '', + course_id = '', + resource_id = '', + node_id = '', + node_version = '' + ) + else: + return CreateUsage( + ) + """ + + def testCreateUsage(self): + """Test CreateUsage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_delete_option.py b/edu_sharing_openapi/test/test_delete_option.py new file mode 100644 index 00000000..7dcc5705 --- /dev/null +++ b/edu_sharing_openapi/test/test_delete_option.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.delete_option import DeleteOption + +class TestDeleteOption(unittest.TestCase): + """DeleteOption unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DeleteOption: + """Test DeleteOption + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DeleteOption` + """ + model = DeleteOption() + if include_optional: + return DeleteOption( + delete = True + ) + else: + return DeleteOption( + ) + """ + + def testDeleteOption(self): + """Test DeleteOption""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_dynamic_config.py b/edu_sharing_openapi/test/test_dynamic_config.py new file mode 100644 index 00000000..ebd9b8b3 --- /dev/null +++ b/edu_sharing_openapi/test/test_dynamic_config.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.dynamic_config import DynamicConfig + +class TestDynamicConfig(unittest.TestCase): + """DynamicConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DynamicConfig: + """Test DynamicConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DynamicConfig` + """ + model = DynamicConfig() + if include_optional: + return DynamicConfig( + node_id = '', + value = '' + ) + else: + return DynamicConfig( + ) + """ + + def testDynamicConfig(self): + """Test DynamicConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_dynamic_registration_token.py b/edu_sharing_openapi/test/test_dynamic_registration_token.py new file mode 100644 index 00000000..f10e3a94 --- /dev/null +++ b/edu_sharing_openapi/test/test_dynamic_registration_token.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.dynamic_registration_token import DynamicRegistrationToken + +class TestDynamicRegistrationToken(unittest.TestCase): + """DynamicRegistrationToken unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DynamicRegistrationToken: + """Test DynamicRegistrationToken + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DynamicRegistrationToken` + """ + model = DynamicRegistrationToken() + if include_optional: + return DynamicRegistrationToken( + token = '', + url = '', + registered_app_id = '', + ts_created = 56, + ts_expiry = 56, + valid = True + ) + else: + return DynamicRegistrationToken( + ) + """ + + def testDynamicRegistrationToken(self): + """Test DynamicRegistrationToken""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_dynamic_registration_tokens.py b/edu_sharing_openapi/test/test_dynamic_registration_tokens.py new file mode 100644 index 00000000..8c12f890 --- /dev/null +++ b/edu_sharing_openapi/test/test_dynamic_registration_tokens.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.dynamic_registration_tokens import DynamicRegistrationTokens + +class TestDynamicRegistrationTokens(unittest.TestCase): + """DynamicRegistrationTokens unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> DynamicRegistrationTokens: + """Test DynamicRegistrationTokens + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `DynamicRegistrationTokens` + """ + model = DynamicRegistrationTokens() + if include_optional: + return DynamicRegistrationTokens( + registration_links = [ + edu_sharing_client.models.dynamic_registration_token.DynamicRegistrationToken( + token = '', + url = '', + registered_app_id = '', + ts_created = 56, + ts_expiry = 56, + valid = True, ) + ] + ) + else: + return DynamicRegistrationTokens( + ) + """ + + def testDynamicRegistrationTokens(self): + """Test DynamicRegistrationTokens""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_element.py b/edu_sharing_openapi/test/test_element.py new file mode 100644 index 00000000..9f4bed32 --- /dev/null +++ b/edu_sharing_openapi/test/test_element.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.element import Element + +class TestElement(unittest.TestCase): + """Element unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Element: + """Test Element + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Element` + """ + model = Element() + if include_optional: + return Element( + id = '', + name = '', + type = '' + ) + else: + return Element( + ) + """ + + def testElement(self): + """Test Element""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_error_response.py b/edu_sharing_openapi/test/test_error_response.py new file mode 100644 index 00000000..a70c9d49 --- /dev/null +++ b/edu_sharing_openapi/test/test_error_response.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.error_response import ErrorResponse + +class TestErrorResponse(unittest.TestCase): + """ErrorResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ErrorResponse: + """Test ErrorResponse + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ErrorResponse` + """ + model = ErrorResponse() + if include_optional: + return ErrorResponse( + stacktrace = '', + details = { + 'key' : None + }, + error = '', + message = '', + log_level = '', + stacktrace_array = [ + '' + ] + ) + else: + return ErrorResponse( + error = '', + message = '', + stacktrace_array = [ + '' + ], + ) + """ + + def testErrorResponse(self): + """Test ErrorResponse""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_excel_result.py b/edu_sharing_openapi/test/test_excel_result.py new file mode 100644 index 00000000..9ced241f --- /dev/null +++ b/edu_sharing_openapi/test/test_excel_result.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.excel_result import ExcelResult + +class TestExcelResult(unittest.TestCase): + """ExcelResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ExcelResult: + """Test ExcelResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ExcelResult` + """ + model = ExcelResult() + if include_optional: + return ExcelResult( + rows = 56 + ) + else: + return ExcelResult( + ) + """ + + def testExcelResult(self): + """Test ExcelResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_facet.py b/edu_sharing_openapi/test/test_facet.py new file mode 100644 index 00000000..27b11868 --- /dev/null +++ b/edu_sharing_openapi/test/test_facet.py @@ -0,0 +1,63 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.facet import Facet + +class TestFacet(unittest.TestCase): + """Facet unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Facet: + """Test Facet + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Facet` + """ + model = Facet() + if include_optional: + return Facet( + var_property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56 + ) + else: + return Facet( + var_property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + ) + """ + + def testFacet(self): + """Test Facet""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_feature_info.py b/edu_sharing_openapi/test/test_feature_info.py new file mode 100644 index 00000000..89ebce35 --- /dev/null +++ b/edu_sharing_openapi/test/test_feature_info.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.feature_info import FeatureInfo + +class TestFeatureInfo(unittest.TestCase): + """FeatureInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FeatureInfo: + """Test FeatureInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FeatureInfo` + """ + model = FeatureInfo() + if include_optional: + return FeatureInfo( + id = 'handleService' + ) + else: + return FeatureInfo( + ) + """ + + def testFeatureInfo(self): + """Test FeatureInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_feedback_data.py b/edu_sharing_openapi/test/test_feedback_data.py new file mode 100644 index 00000000..04924b1b --- /dev/null +++ b/edu_sharing_openapi/test/test_feedback_data.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.feedback_data import FeedbackData + +class TestFeedbackData(unittest.TestCase): + """FeedbackData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FeedbackData: + """Test FeedbackData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FeedbackData` + """ + model = FeedbackData() + if include_optional: + return FeedbackData( + authority = '', + data = { + 'key' : [ + '' + ] + }, + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f') + ) + else: + return FeedbackData( + ) + """ + + def testFeedbackData(self): + """Test FeedbackData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_feedback_result.py b/edu_sharing_openapi/test/test_feedback_result.py new file mode 100644 index 00000000..e27c55bb --- /dev/null +++ b/edu_sharing_openapi/test/test_feedback_result.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.feedback_result import FeedbackResult + +class TestFeedbackResult(unittest.TestCase): + """FeedbackResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FeedbackResult: + """Test FeedbackResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FeedbackResult` + """ + model = FeedbackResult() + if include_optional: + return FeedbackResult( + node_id = '', + was_updated = True + ) + else: + return FeedbackResult( + ) + """ + + def testFeedbackResult(self): + """Test FeedbackResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_feedbackv1_api.py b/edu_sharing_openapi/test/test_feedbackv1_api.py new file mode 100644 index 00000000..ce4c9568 --- /dev/null +++ b/edu_sharing_openapi/test/test_feedbackv1_api.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.feedbackv1_api import FEEDBACKV1Api + + +class TestFEEDBACKV1Api(unittest.TestCase): + """FEEDBACKV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = FEEDBACKV1Api() + + def tearDown(self) -> None: + pass + + def test_add_feedback(self) -> None: + """Test case for add_feedback + + Give feedback on a node + """ + pass + + def test_get_feedbacks(self) -> None: + """Test case for get_feedbacks + + Get given feedback on a node + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_filter.py b/edu_sharing_openapi/test/test_filter.py new file mode 100644 index 00000000..ed8aec15 --- /dev/null +++ b/edu_sharing_openapi/test/test_filter.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.filter import Filter + +class TestFilter(unittest.TestCase): + """Filter unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Filter: + """Test Filter + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Filter` + """ + model = Filter() + if include_optional: + return Filter( + entries = [ + edu_sharing_client.models.filter_entry.FilterEntry( + property = '', + values = [ + '' + ], ) + ] + ) + else: + return Filter( + entries = [ + edu_sharing_client.models.filter_entry.FilterEntry( + property = '', + values = [ + '' + ], ) + ], + ) + """ + + def testFilter(self): + """Test Filter""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_filter_entry.py b/edu_sharing_openapi/test/test_filter_entry.py new file mode 100644 index 00000000..a5f2b14a --- /dev/null +++ b/edu_sharing_openapi/test/test_filter_entry.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.filter_entry import FilterEntry + +class TestFilterEntry(unittest.TestCase): + """FilterEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FilterEntry: + """Test FilterEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FilterEntry` + """ + model = FilterEntry() + if include_optional: + return FilterEntry( + var_property = '', + values = [ + '' + ] + ) + else: + return FilterEntry( + var_property = '', + values = [ + '' + ], + ) + """ + + def testFilterEntry(self): + """Test FilterEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_font_icon.py b/edu_sharing_openapi/test/test_font_icon.py new file mode 100644 index 00000000..87348e09 --- /dev/null +++ b/edu_sharing_openapi/test/test_font_icon.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.font_icon import FontIcon + +class TestFontIcon(unittest.TestCase): + """FontIcon unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> FontIcon: + """Test FontIcon + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `FontIcon` + """ + model = FontIcon() + if include_optional: + return FontIcon( + original = '', + replace = '', + css_class = '' + ) + else: + return FontIcon( + ) + """ + + def testFontIcon(self): + """Test FontIcon""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_frontpage.py b/edu_sharing_openapi/test/test_frontpage.py new file mode 100644 index 00000000..3db6d7f9 --- /dev/null +++ b/edu_sharing_openapi/test/test_frontpage.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.frontpage import Frontpage + +class TestFrontpage(unittest.TestCase): + """Frontpage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Frontpage: + """Test Frontpage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Frontpage` + """ + model = Frontpage() + if include_optional: + return Frontpage( + total_count = 56, + display_count = 56, + mode = 'collection', + timespan = 56, + timespan_all = True, + queries = [ + edu_sharing_client.models.query.Query( + condition = edu_sharing_client.models.condition.Condition( + type = 'TOOLPERMISSION', + negate = True, + value = '', ), + query = '', ) + ], + collection = '' + ) + else: + return Frontpage( + ) + """ + + def testFrontpage(self): + """Test Frontpage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_general.py b/edu_sharing_openapi/test/test_general.py new file mode 100644 index 00000000..1582f390 --- /dev/null +++ b/edu_sharing_openapi/test/test_general.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.general import General + +class TestGeneral(unittest.TestCase): + """General unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> General: + """Test General + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `General` + """ + model = General() + if include_optional: + return General( + referenced_in_name = '', + referenced_in_type = '', + referenced_in_instance = '' + ) + else: + return General( + ) + """ + + def testGeneral(self): + """Test General""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_geo.py b/edu_sharing_openapi/test/test_geo.py new file mode 100644 index 00000000..94d5d236 --- /dev/null +++ b/edu_sharing_openapi/test/test_geo.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.geo import Geo + +class TestGeo(unittest.TestCase): + """Geo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Geo: + """Test Geo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Geo` + """ + model = Geo() + if include_optional: + return Geo( + longitude = 1.337, + latitude = 1.337, + address_country = '' + ) + else: + return Geo( + ) + """ + + def testGeo(self): + """Test Geo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_group.py b/edu_sharing_openapi/test/test_group.py new file mode 100644 index 00000000..c6e5a12c --- /dev/null +++ b/edu_sharing_openapi/test/test_group.py @@ -0,0 +1,106 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.group import Group + +class TestGroup(unittest.TestCase): + """Group unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Group: + """Test Group + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Group` + """ + model = Group() + if include_optional: + return Group( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ) + ) + else: + return Group( + authority_name = '', + ) + """ + + def testGroup(self): + """Test Group""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_group_entries.py b/edu_sharing_openapi/test/test_group_entries.py new file mode 100644 index 00000000..e51869a3 --- /dev/null +++ b/edu_sharing_openapi/test/test_group_entries.py @@ -0,0 +1,148 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.group_entries import GroupEntries + +class TestGroupEntries(unittest.TestCase): + """GroupEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GroupEntries: + """Test GroupEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GroupEntries` + """ + model = GroupEntries() + if include_optional: + return GroupEntries( + groups = [ + edu_sharing_client.models.group.Group( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return GroupEntries( + groups = [ + edu_sharing_client.models.group.Group( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testGroupEntries(self): + """Test GroupEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_group_entry.py b/edu_sharing_openapi/test/test_group_entry.py new file mode 100644 index 00000000..0cc6b0a3 --- /dev/null +++ b/edu_sharing_openapi/test/test_group_entry.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.group_entry import GroupEntry + +class TestGroupEntry(unittest.TestCase): + """GroupEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GroupEntry: + """Test GroupEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GroupEntry` + """ + model = GroupEntry() + if include_optional: + return GroupEntry( + group = edu_sharing_client.models.group.Group( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), ) + ) + else: + return GroupEntry( + group = edu_sharing_client.models.group.Group( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), ), + ) + """ + + def testGroupEntry(self): + """Test GroupEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_group_profile.py b/edu_sharing_openapi/test/test_group_profile.py new file mode 100644 index 00000000..245fcff3 --- /dev/null +++ b/edu_sharing_openapi/test/test_group_profile.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.group_profile import GroupProfile + +class TestGroupProfile(unittest.TestCase): + """GroupProfile unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GroupProfile: + """Test GroupProfile + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GroupProfile` + """ + model = GroupProfile() + if include_optional: + return GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '' + ) + else: + return GroupProfile( + ) + """ + + def testGroupProfile(self): + """Test GroupProfile""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_group_signup_details.py b/edu_sharing_openapi/test/test_group_signup_details.py new file mode 100644 index 00000000..ca14781c --- /dev/null +++ b/edu_sharing_openapi/test/test_group_signup_details.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.group_signup_details import GroupSignupDetails + +class TestGroupSignupDetails(unittest.TestCase): + """GroupSignupDetails unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> GroupSignupDetails: + """Test GroupSignupDetails + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `GroupSignupDetails` + """ + model = GroupSignupDetails() + if include_optional: + return GroupSignupDetails( + signup_method = 'simple', + signup_password = '' + ) + else: + return GroupSignupDetails( + ) + """ + + def testGroupSignupDetails(self): + """Test GroupSignupDetails""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_guest.py b/edu_sharing_openapi/test/test_guest.py new file mode 100644 index 00000000..ed6caebd --- /dev/null +++ b/edu_sharing_openapi/test/test_guest.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.guest import Guest + +class TestGuest(unittest.TestCase): + """Guest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Guest: + """Test Guest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Guest` + """ + model = Guest() + if include_optional: + return Guest( + enabled = True + ) + else: + return Guest( + ) + """ + + def testGuest(self): + """Test Guest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_handle_param.py b/edu_sharing_openapi/test/test_handle_param.py new file mode 100644 index 00000000..1e977f64 --- /dev/null +++ b/edu_sharing_openapi/test/test_handle_param.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.handle_param import HandleParam + +class TestHandleParam(unittest.TestCase): + """HandleParam unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> HandleParam: + """Test HandleParam + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `HandleParam` + """ + model = HandleParam() + if include_optional: + return HandleParam( + handle_service = 'distinct', + doi_service = 'distinct' + ) + else: + return HandleParam( + ) + """ + + def testHandleParam(self): + """Test HandleParam""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_help_menu_options.py b/edu_sharing_openapi/test/test_help_menu_options.py new file mode 100644 index 00000000..74bb73ac --- /dev/null +++ b/edu_sharing_openapi/test/test_help_menu_options.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.help_menu_options import HelpMenuOptions + +class TestHelpMenuOptions(unittest.TestCase): + """HelpMenuOptions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> HelpMenuOptions: + """Test HelpMenuOptions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `HelpMenuOptions` + """ + model = HelpMenuOptions() + if include_optional: + return HelpMenuOptions( + key = '', + icon = '', + url = '' + ) + else: + return HelpMenuOptions( + ) + """ + + def testHelpMenuOptions(self): + """Test HelpMenuOptions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_home_folder_options.py b/edu_sharing_openapi/test/test_home_folder_options.py new file mode 100644 index 00000000..12e3558e --- /dev/null +++ b/edu_sharing_openapi/test/test_home_folder_options.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.home_folder_options import HomeFolderOptions + +class TestHomeFolderOptions(unittest.TestCase): + """HomeFolderOptions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> HomeFolderOptions: + """Test HomeFolderOptions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `HomeFolderOptions` + """ + model = HomeFolderOptions() + if include_optional: + return HomeFolderOptions( + folders = 'none', + private_files = 'none', + cc_files = 'none', + keep_folder_structure = True + ) + else: + return HomeFolderOptions( + ) + """ + + def testHomeFolderOptions(self): + """Test HomeFolderOptions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_iamv1_api.py b/edu_sharing_openapi/test/test_iamv1_api.py new file mode 100644 index 00000000..bc6fcbb6 --- /dev/null +++ b/edu_sharing_openapi/test/test_iamv1_api.py @@ -0,0 +1,269 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.iamv1_api import IAMV1Api + + +class TestIAMV1Api(unittest.TestCase): + """IAMV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = IAMV1Api() + + def tearDown(self) -> None: + pass + + def test_add_membership(self) -> None: + """Test case for add_membership + + Add member to the group. + """ + pass + + def test_add_node_list(self) -> None: + """Test case for add_node_list + + Add a node to node a list of a user + """ + pass + + def test_change_group_profile(self) -> None: + """Test case for change_group_profile + + Set profile of the group. + """ + pass + + def test_change_user_avatar(self) -> None: + """Test case for change_user_avatar + + Set avatar of the user. + """ + pass + + def test_change_user_password(self) -> None: + """Test case for change_user_password + + Change/Set password of the user. + """ + pass + + def test_change_user_profile(self) -> None: + """Test case for change_user_profile + + Set profile of the user. + """ + pass + + def test_confirm_signup(self) -> None: + """Test case for confirm_signup + + put the pending user into the group + """ + pass + + def test_create_group(self) -> None: + """Test case for create_group + + Create a new group. + """ + pass + + def test_create_user(self) -> None: + """Test case for create_user + + Create a new user. + """ + pass + + def test_delete_group(self) -> None: + """Test case for delete_group + + Delete the group. + """ + pass + + def test_delete_membership(self) -> None: + """Test case for delete_membership + + Delete member from the group. + """ + pass + + def test_delete_user(self) -> None: + """Test case for delete_user + + Delete the user. + """ + pass + + def test_get_group(self) -> None: + """Test case for get_group + + Get the group. + """ + pass + + def test_get_membership(self) -> None: + """Test case for get_membership + + Get all members of the group. + """ + pass + + def test_get_node_list(self) -> None: + """Test case for get_node_list + + Get a specific node list for a user + """ + pass + + def test_get_preferences(self) -> None: + """Test case for get_preferences + + Get preferences stored for user + """ + pass + + def test_get_profile_settings(self) -> None: + """Test case for get_profile_settings + + Get profileSettings configuration + """ + pass + + def test_get_recently_invited(self) -> None: + """Test case for get_recently_invited + + Get recently invited authorities. + """ + pass + + def test_get_subgroup_by_type(self) -> None: + """Test case for get_subgroup_by_type + + Get a subgroup by the specified type + """ + pass + + def test_get_user(self) -> None: + """Test case for get_user + + Get the user. + """ + pass + + def test_get_user_groups(self) -> None: + """Test case for get_user_groups + + Get all groups the given user is member of. + """ + pass + + def test_get_user_stats(self) -> None: + """Test case for get_user_stats + + Get the user stats. + """ + pass + + def test_reject_signup(self) -> None: + """Test case for reject_signup + + reject the pending user + """ + pass + + def test_remove_node_list(self) -> None: + """Test case for remove_node_list + + Delete a node of a node list of a user + """ + pass + + def test_remove_user_avatar(self) -> None: + """Test case for remove_user_avatar + + Remove avatar of the user. + """ + pass + + def test_search_authorities(self) -> None: + """Test case for search_authorities + + Search authorities. + """ + pass + + def test_search_groups(self) -> None: + """Test case for search_groups + + Search groups. + """ + pass + + def test_search_user(self) -> None: + """Test case for search_user + + Search users. + """ + pass + + def test_set_preferences(self) -> None: + """Test case for set_preferences + + Set preferences for user + """ + pass + + def test_set_profile_settings(self) -> None: + """Test case for set_profile_settings + + Set profileSettings Configuration + """ + pass + + def test_signup_group(self) -> None: + """Test case for signup_group + + let the current user signup to the given group + """ + pass + + def test_signup_group_details(self) -> None: + """Test case for signup_group_details + + requires admin rights + """ + pass + + def test_signup_group_list(self) -> None: + """Test case for signup_group_list + + list pending users that want to join this group + """ + pass + + def test_update_user_status(self) -> None: + """Test case for update_user_status + + update the user status. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_icon.py b/edu_sharing_openapi/test/test_icon.py new file mode 100644 index 00000000..6f78d159 --- /dev/null +++ b/edu_sharing_openapi/test/test_icon.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.icon import Icon + +class TestIcon(unittest.TestCase): + """Icon unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Icon: + """Test Icon + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Icon` + """ + model = Icon() + if include_optional: + return Icon( + url = '' + ) + else: + return Icon( + ) + """ + + def testIcon(self): + """Test Icon""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_image.py b/edu_sharing_openapi/test/test_image.py new file mode 100644 index 00000000..9f2b9230 --- /dev/null +++ b/edu_sharing_openapi/test/test_image.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.image import Image + +class TestImage(unittest.TestCase): + """Image unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Image: + """Test Image + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Image` + """ + model = Image() + if include_optional: + return Image( + src = '', + replace = '' + ) + else: + return Image( + ) + """ + + def testImage(self): + """Test Image""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_interface.py b/edu_sharing_openapi/test/test_interface.py new file mode 100644 index 00000000..bc667c6a --- /dev/null +++ b/edu_sharing_openapi/test/test_interface.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.interface import Interface + +class TestInterface(unittest.TestCase): + """Interface unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Interface: + """Test Interface + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Interface` + """ + model = Interface() + if include_optional: + return Interface( + url = '', + set = '', + metadata_prefix = '', + documentation = '', + format = 'Json', + type = 'Search' + ) + else: + return Interface( + ) + """ + + def testInterface(self): + """Test Interface""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_invite_event_dto.py b/edu_sharing_openapi/test/test_invite_event_dto.py new file mode 100644 index 00000000..81a26471 --- /dev/null +++ b/edu_sharing_openapi/test/test_invite_event_dto.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.invite_event_dto import InviteEventDTO + +class TestInviteEventDTO(unittest.TestCase): + """InviteEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> InviteEventDTO: + """Test InviteEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `InviteEventDTO` + """ + model = InviteEventDTO() + if include_optional: + return InviteEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + name = '', + type = '', + user_comment = '', + permissions = [ + '' + ] + ) + else: + return InviteEventDTO( + ) + """ + + def testInviteEventDTO(self): + """Test InviteEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job.py b/edu_sharing_openapi/test/test_job.py new file mode 100644 index 00000000..3ce0835a --- /dev/null +++ b/edu_sharing_openapi/test/test_job.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job import Job + +class TestJob(unittest.TestCase): + """Job unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Job: + """Test Job + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Job` + """ + model = Job() + if include_optional: + return Job( + id = '', + status = '' + ) + else: + return Job( + id = '', + status = '', + ) + """ + + def testJob(self): + """Test Job""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_builder.py b/edu_sharing_openapi/test/test_job_builder.py new file mode 100644 index 00000000..79c16fd5 --- /dev/null +++ b/edu_sharing_openapi/test/test_job_builder.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_builder import JobBuilder + +class TestJobBuilder(unittest.TestCase): + """JobBuilder unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobBuilder: + """Test JobBuilder + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobBuilder` + """ + model = JobBuilder() + if include_optional: + return JobBuilder( + job_data = edu_sharing_client.models.job_builder.JobBuilder( + job_data = edu_sharing_client.models.job_builder.JobBuilder(), ) + ) + else: + return JobBuilder( + ) + """ + + def testJobBuilder(self): + """Test JobBuilder""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_data_map.py b/edu_sharing_openapi/test/test_job_data_map.py new file mode 100644 index 00000000..a4569fd3 --- /dev/null +++ b/edu_sharing_openapi/test/test_job_data_map.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_data_map import JobDataMap + +class TestJobDataMap(unittest.TestCase): + """JobDataMap unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobDataMap: + """Test JobDataMap + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobDataMap` + """ + model = JobDataMap() + if include_optional: + return JobDataMap( + dirty = True, + allows_transient_data = True, + keys = [ + '' + ], + wrapped_map = { + 'key' : None + }, + empty = True + ) + else: + return JobDataMap( + ) + """ + + def testJobDataMap(self): + """Test JobDataMap""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_description.py b/edu_sharing_openapi/test/test_job_description.py new file mode 100644 index 00000000..5469865b --- /dev/null +++ b/edu_sharing_openapi/test/test_job_description.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_description import JobDescription + +class TestJobDescription(unittest.TestCase): + """JobDescription unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobDescription: + """Test JobDescription + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobDescription` + """ + model = JobDescription() + if include_optional: + return JobDescription( + name = '', + description = '', + params = [ + edu_sharing_client.models.job_field_description.JobFieldDescription( + name = '', + description = '', + file = True, + sample_value = '', + is_array = True, + array = True, ) + ], + tags = [ + 'DeletePersonJob' + ] + ) + else: + return JobDescription( + ) + """ + + def testJobDescription(self): + """Test JobDescription""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_detail.py b/edu_sharing_openapi/test/test_job_detail.py new file mode 100644 index 00000000..82aedf26 --- /dev/null +++ b/edu_sharing_openapi/test/test_job_detail.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_detail import JobDetail + +class TestJobDetail(unittest.TestCase): + """JobDetail unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobDetail: + """Test JobDetail + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobDetail` + """ + model = JobDetail() + if include_optional: + return JobDetail( + key = edu_sharing_client.models.job_key.JobKey( + name = '', + group = '', ), + job_data_map = { + 'key' : None + }, + durable = True, + persist_job_data_after_execution = True, + concurrent_exection_disallowed = True, + job_builder = edu_sharing_client.models.job_builder.JobBuilder( + job_data = edu_sharing_client.models.job_builder.JobBuilder(), ), + description = '' + ) + else: + return JobDetail( + ) + """ + + def testJobDetail(self): + """Test JobDetail""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_detail_job_data_map.py b/edu_sharing_openapi/test/test_job_detail_job_data_map.py new file mode 100644 index 00000000..2bcf60db --- /dev/null +++ b/edu_sharing_openapi/test/test_job_detail_job_data_map.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_detail_job_data_map import JobDetailJobDataMap + +class TestJobDetailJobDataMap(unittest.TestCase): + """JobDetailJobDataMap unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobDetailJobDataMap: + """Test JobDetailJobDataMap + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobDetailJobDataMap` + """ + model = JobDetailJobDataMap() + if include_optional: + return JobDetailJobDataMap( + dirty = True, + allows_transient_data = True, + keys = [ + '' + ], + wrapped_map = { + 'key' : None + }, + empty = True + ) + else: + return JobDetailJobDataMap( + ) + """ + + def testJobDetailJobDataMap(self): + """Test JobDetailJobDataMap""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_entry.py b/edu_sharing_openapi/test/test_job_entry.py new file mode 100644 index 00000000..b0398054 --- /dev/null +++ b/edu_sharing_openapi/test/test_job_entry.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_entry import JobEntry + +class TestJobEntry(unittest.TestCase): + """JobEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobEntry: + """Test JobEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobEntry` + """ + model = JobEntry() + if include_optional: + return JobEntry( + data = edu_sharing_client.models.job.Job( + id = '', + status = '', ) + ) + else: + return JobEntry( + data = edu_sharing_client.models.job.Job( + id = '', + status = '', ), + ) + """ + + def testJobEntry(self): + """Test JobEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_field_description.py b/edu_sharing_openapi/test/test_job_field_description.py new file mode 100644 index 00000000..38dabd81 --- /dev/null +++ b/edu_sharing_openapi/test/test_job_field_description.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_field_description import JobFieldDescription + +class TestJobFieldDescription(unittest.TestCase): + """JobFieldDescription unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobFieldDescription: + """Test JobFieldDescription + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobFieldDescription` + """ + model = JobFieldDescription() + if include_optional: + return JobFieldDescription( + name = '', + description = '', + file = True, + sample_value = '', + is_array = True, + array = True + ) + else: + return JobFieldDescription( + ) + """ + + def testJobFieldDescription(self): + """Test JobFieldDescription""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_info.py b/edu_sharing_openapi/test/test_job_info.py new file mode 100644 index 00000000..5f4f71ce --- /dev/null +++ b/edu_sharing_openapi/test/test_job_info.py @@ -0,0 +1,85 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_info import JobInfo + +class TestJobInfo(unittest.TestCase): + """JobInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobInfo: + """Test JobInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobInfo` + """ + model = JobInfo() + if include_optional: + return JobInfo( + job_data_map = { + 'key' : None + }, + job_name = '', + job_group = '', + start_time = 56, + finish_time = 56, + status = 'Running', + worst_level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, + version2_level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, ), ), + log = [ + edu_sharing_client.models.log_entry.LogEntry( + class_name = '', + level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, + version2_level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, ), ), + date = 56, + message = '', ) + ], + job_detail = edu_sharing_client.models.job_detail.JobDetail( + key = edu_sharing_client.models.job_key.JobKey( + name = '', + group = '', ), + job_data_map = { + 'key' : None + }, + durable = True, + persist_job_data_after_execution = True, + concurrent_exection_disallowed = True, + job_builder = edu_sharing_client.models.job_builder.JobBuilder( + job_data = edu_sharing_client.models.job_builder.JobBuilder(), ), + description = '', ) + ) + else: + return JobInfo( + ) + """ + + def testJobInfo(self): + """Test JobInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_job_key.py b/edu_sharing_openapi/test/test_job_key.py new file mode 100644 index 00000000..2f4e1c20 --- /dev/null +++ b/edu_sharing_openapi/test/test_job_key.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.job_key import JobKey + +class TestJobKey(unittest.TestCase): + """JobKey unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JobKey: + """Test JobKey + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JobKey` + """ + model = JobKey() + if include_optional: + return JobKey( + name = '', + group = '' + ) + else: + return JobKey( + ) + """ + + def testJobKey(self): + """Test JobKey""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_json_object.py b/edu_sharing_openapi/test/test_json_object.py new file mode 100644 index 00000000..db8f255b --- /dev/null +++ b/edu_sharing_openapi/test/test_json_object.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.json_object import JSONObject + +class TestJSONObject(unittest.TestCase): + """JSONObject unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> JSONObject: + """Test JSONObject + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `JSONObject` + """ + model = JSONObject() + if include_optional: + return JSONObject( + empty = True + ) + else: + return JSONObject( + ) + """ + + def testJSONObject(self): + """Test JSONObject""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_key_value_pair.py b/edu_sharing_openapi/test/test_key_value_pair.py new file mode 100644 index 00000000..0b06ec1f --- /dev/null +++ b/edu_sharing_openapi/test/test_key_value_pair.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.key_value_pair import KeyValuePair + +class TestKeyValuePair(unittest.TestCase): + """KeyValuePair unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> KeyValuePair: + """Test KeyValuePair + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `KeyValuePair` + """ + model = KeyValuePair() + if include_optional: + return KeyValuePair( + key = '', + value = '' + ) + else: + return KeyValuePair( + ) + """ + + def testKeyValuePair(self): + """Test KeyValuePair""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_knowledgev1_api.py b/edu_sharing_openapi/test/test_knowledgev1_api.py new file mode 100644 index 00000000..5387c730 --- /dev/null +++ b/edu_sharing_openapi/test/test_knowledgev1_api.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.knowledgev1_api import KNOWLEDGEV1Api + + +class TestKNOWLEDGEV1Api(unittest.TestCase): + """KNOWLEDGEV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = KNOWLEDGEV1Api() + + def tearDown(self) -> None: + pass + + def test_get_analyzing_job_status(self) -> None: + """Test case for get_analyzing_job_status + + Get analyzing job status. + """ + pass + + def test_run_analyzing_job(self) -> None: + """Test case for run_analyzing_job + + Run analyzing job. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_language.py b/edu_sharing_openapi/test/test_language.py new file mode 100644 index 00000000..65b5bc0b --- /dev/null +++ b/edu_sharing_openapi/test/test_language.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.language import Language + +class TestLanguage(unittest.TestCase): + """Language unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Language: + """Test Language + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Language` + """ + model = Language() + if include_optional: + return Language( + var_global = { + 'key' : '' + }, + current = { + 'key' : '' + }, + current_language = '' + ) + else: + return Language( + ) + """ + + def testLanguage(self): + """Test Language""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_level.py b/edu_sharing_openapi/test/test_level.py new file mode 100644 index 00000000..092dafcf --- /dev/null +++ b/edu_sharing_openapi/test/test_level.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.level import Level + +class TestLevel(unittest.TestCase): + """Level unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Level: + """Test Level + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Level` + """ + model = Level() + if include_optional: + return Level( + syslog_equivalent = 56, + version2_level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, ) + ) + else: + return Level( + ) + """ + + def testLevel(self): + """Test Level""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_license.py b/edu_sharing_openapi/test/test_license.py new file mode 100644 index 00000000..600bc754 --- /dev/null +++ b/edu_sharing_openapi/test/test_license.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.license import License + +class TestLicense(unittest.TestCase): + """License unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> License: + """Test License + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `License` + """ + model = License() + if include_optional: + return License( + icon = '', + url = '' + ) + else: + return License( + ) + """ + + def testLicense(self): + """Test License""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_license_agreement.py b/edu_sharing_openapi/test/test_license_agreement.py new file mode 100644 index 00000000..c6b754d5 --- /dev/null +++ b/edu_sharing_openapi/test/test_license_agreement.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.license_agreement import LicenseAgreement + +class TestLicenseAgreement(unittest.TestCase): + """LicenseAgreement unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LicenseAgreement: + """Test LicenseAgreement + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LicenseAgreement` + """ + model = LicenseAgreement() + if include_optional: + return LicenseAgreement( + node_id = [ + edu_sharing_client.models.license_agreement_node.LicenseAgreementNode( + language = '', + value = '', ) + ] + ) + else: + return LicenseAgreement( + ) + """ + + def testLicenseAgreement(self): + """Test LicenseAgreement""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_license_agreement_node.py b/edu_sharing_openapi/test/test_license_agreement_node.py new file mode 100644 index 00000000..d0fae04f --- /dev/null +++ b/edu_sharing_openapi/test/test_license_agreement_node.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.license_agreement_node import LicenseAgreementNode + +class TestLicenseAgreementNode(unittest.TestCase): + """LicenseAgreementNode unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LicenseAgreementNode: + """Test LicenseAgreementNode + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LicenseAgreementNode` + """ + model = LicenseAgreementNode() + if include_optional: + return LicenseAgreementNode( + language = '', + value = '' + ) + else: + return LicenseAgreementNode( + ) + """ + + def testLicenseAgreementNode(self): + """Test LicenseAgreementNode""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_licenses.py b/edu_sharing_openapi/test/test_licenses.py new file mode 100644 index 00000000..6815e04b --- /dev/null +++ b/edu_sharing_openapi/test/test_licenses.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.licenses import Licenses + +class TestLicenses(unittest.TestCase): + """Licenses unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Licenses: + """Test Licenses + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Licenses` + """ + model = Licenses() + if include_optional: + return Licenses( + repository = { + 'key' : '' + }, + services = { + 'key' : { + 'key' : '' + } + } + ) + else: + return Licenses( + ) + """ + + def testLicenses(self): + """Test Licenses""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_location.py b/edu_sharing_openapi/test/test_location.py new file mode 100644 index 00000000..1307abc2 --- /dev/null +++ b/edu_sharing_openapi/test/test_location.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.location import Location + +class TestLocation(unittest.TestCase): + """Location unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Location: + """Test Location + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Location` + """ + model = Location() + if include_optional: + return Location( + geo = edu_sharing_client.models.geo.Geo( + longitude = 1.337, + latitude = 1.337, + address_country = '', ) + ) + else: + return Location( + ) + """ + + def testLocation(self): + """Test Location""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_log_entry.py b/edu_sharing_openapi/test/test_log_entry.py new file mode 100644 index 00000000..c971662b --- /dev/null +++ b/edu_sharing_openapi/test/test_log_entry.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.log_entry import LogEntry + +class TestLogEntry(unittest.TestCase): + """LogEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LogEntry: + """Test LogEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LogEntry` + """ + model = LogEntry() + if include_optional: + return LogEntry( + class_name = '', + level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, + version2_level = edu_sharing_client.models.level.Level( + syslog_equivalent = 56, ), ), + var_date = 56, + message = '' + ) + else: + return LogEntry( + ) + """ + + def testLogEntry(self): + """Test LogEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_logger_config_result.py b/edu_sharing_openapi/test/test_logger_config_result.py new file mode 100644 index 00000000..e47c46b6 --- /dev/null +++ b/edu_sharing_openapi/test/test_logger_config_result.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.logger_config_result import LoggerConfigResult + +class TestLoggerConfigResult(unittest.TestCase): + """LoggerConfigResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LoggerConfigResult: + """Test LoggerConfigResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LoggerConfigResult` + """ + model = LoggerConfigResult() + if include_optional: + return LoggerConfigResult( + name = '', + level = '', + appender = [ + '' + ], + config = True + ) + else: + return LoggerConfigResult( + ) + """ + + def testLoggerConfigResult(self): + """Test LoggerConfigResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_login.py b/edu_sharing_openapi/test/test_login.py new file mode 100644 index 00000000..6e3537e6 --- /dev/null +++ b/edu_sharing_openapi/test/test_login.py @@ -0,0 +1,286 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.login import Login + +class TestLogin(unittest.TestCase): + """Login unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Login: + """Test Login + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Login` + """ + model = Login() + if include_optional: + return Login( + remote_authentications = { + 'key' : edu_sharing_client.models.remote_auth_description.RemoteAuthDescription( + url = '', + token = '', ) + }, + is_valid_login = True, + is_admin = True, + lti_session = edu_sharing_client.models.lti_session.LTISession( + accept_multiple = True, + deeplink_return_url = '', + accept_types = [ + '' + ], + accept_presentation_document_targets = [ + '' + ], + can_confirm = True, + title = '', + text = '', + custom_content_node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), ), + current_scope = '', + user_home = '', + session_timeout = 56, + tool_permissions = [ + '' + ], + status_code = '', + authority_name = '', + is_guest = True + ) + else: + return Login( + is_valid_login = True, + is_admin = True, + current_scope = '', + session_timeout = 56, + is_guest = True, + ) + """ + + def testLogin(self): + """Test Login""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_login_credentials.py b/edu_sharing_openapi/test/test_login_credentials.py new file mode 100644 index 00000000..88a1c6da --- /dev/null +++ b/edu_sharing_openapi/test/test_login_credentials.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.login_credentials import LoginCredentials + +class TestLoginCredentials(unittest.TestCase): + """LoginCredentials unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LoginCredentials: + """Test LoginCredentials + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LoginCredentials` + """ + model = LoginCredentials() + if include_optional: + return LoginCredentials( + user_name = '', + password = '', + scope = '' + ) + else: + return LoginCredentials( + user_name = '', + password = '', + scope = '', + ) + """ + + def testLoginCredentials(self): + """Test LoginCredentials""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_logout_info.py b/edu_sharing_openapi/test/test_logout_info.py new file mode 100644 index 00000000..0c069414 --- /dev/null +++ b/edu_sharing_openapi/test/test_logout_info.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.logout_info import LogoutInfo + +class TestLogoutInfo(unittest.TestCase): + """LogoutInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LogoutInfo: + """Test LogoutInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LogoutInfo` + """ + model = LogoutInfo() + if include_optional: + return LogoutInfo( + url = '', + destroy_session = True, + ajax = True, + next = '' + ) + else: + return LogoutInfo( + ) + """ + + def testLogoutInfo(self): + """Test LogoutInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_lti_platform_configuration.py b/edu_sharing_openapi/test/test_lti_platform_configuration.py new file mode 100644 index 00000000..c4e595a4 --- /dev/null +++ b/edu_sharing_openapi/test/test_lti_platform_configuration.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.lti_platform_configuration import LTIPlatformConfiguration + +class TestLTIPlatformConfiguration(unittest.TestCase): + """LTIPlatformConfiguration unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LTIPlatformConfiguration: + """Test LTIPlatformConfiguration + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LTIPlatformConfiguration` + """ + model = LTIPlatformConfiguration() + if include_optional: + return LTIPlatformConfiguration( + product_family_code = '', + version = '', + messages_supported = [ + edu_sharing_client.models.message.Message( + type = '', + placements = [ + '' + ], ) + ], + variables = [ + '' + ] + ) + else: + return LTIPlatformConfiguration( + ) + """ + + def testLTIPlatformConfiguration(self): + """Test LTIPlatformConfiguration""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_lti_platform_v13_api.py b/edu_sharing_openapi/test/test_lti_platform_v13_api.py new file mode 100644 index 00000000..0fc4c321 --- /dev/null +++ b/edu_sharing_openapi/test/test_lti_platform_v13_api.py @@ -0,0 +1,136 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.lti_platform_v13_api import LTIPlatformV13Api + + +class TestLTIPlatformV13Api(unittest.TestCase): + """LTIPlatformV13Api unit test stubs""" + + def setUp(self) -> None: + self.api = LTIPlatformV13Api() + + def tearDown(self) -> None: + pass + + def test_auth(self) -> None: + """Test case for auth + + LTI Platform oidc endpoint. responds to a login authentication request + """ + pass + + def test_auth_token_endpoint(self) -> None: + """Test case for auth_token_endpoint + + LTIPlatform auth token endpoint + """ + pass + + def test_change_content(self) -> None: + """Test case for change_content + + Custom edu-sharing endpoint to change content of node. + """ + pass + + def test_convert_to_resourcelink(self) -> None: + """Test case for convert_to_resourcelink + + manual convertion of an io to an resource link without deeplinking + """ + pass + + def test_deep_linking_response(self) -> None: + """Test case for deep_linking_response + + receiving deeplink response messages. + """ + pass + + def test_generate_login_initiation_form(self) -> None: + """Test case for generate_login_initiation_form + + generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti deeplink flow. + """ + pass + + def test_generate_login_initiation_form_resource_link(self) -> None: + """Test case for generate_login_initiation_form_resource_link + + generate a form used for Initiating Login from a Third Party. Use thes endpoint when starting a lti resourcelink flow. + """ + pass + + def test_get_content(self) -> None: + """Test case for get_content + + Custom edu-sharing endpoint to get content of node. + """ + pass + + def test_manual_registration(self) -> None: + """Test case for manual_registration + + manual registration endpoint for registration of tools. + """ + pass + + def test_open_id_registration(self) -> None: + """Test case for open_id_registration + + registration endpoint the tool uses to register at platform. + """ + pass + + def test_openid_configuration(self) -> None: + """Test case for openid_configuration + + LTIPlatform openid configuration + """ + pass + + def test_start_dynamic_registration(self) -> None: + """Test case for start_dynamic_registration + + starts lti dynamic registration. + """ + pass + + def test_start_dynamic_registration_get(self) -> None: + """Test case for start_dynamic_registration_get + + starts lti dynamic registration. + """ + pass + + def test_test_token(self) -> None: + """Test case for test_token + + test creates a token signed with homeapp. + """ + pass + + def test_tools(self) -> None: + """Test case for tools + + List of tools registered + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_lti_session.py b/edu_sharing_openapi/test/test_lti_session.py new file mode 100644 index 00000000..4fe93779 --- /dev/null +++ b/edu_sharing_openapi/test/test_lti_session.py @@ -0,0 +1,264 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.lti_session import LTISession + +class TestLTISession(unittest.TestCase): + """LTISession unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LTISession: + """Test LTISession + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LTISession` + """ + model = LTISession() + if include_optional: + return LTISession( + accept_multiple = True, + deeplink_return_url = '', + accept_types = [ + '' + ], + accept_presentation_document_targets = [ + '' + ], + can_confirm = True, + title = '', + text = '', + custom_content_node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ) + else: + return LTISession( + ) + """ + + def testLTISession(self): + """Test LTISession""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_lti_tool_configuration.py b/edu_sharing_openapi/test/test_lti_tool_configuration.py new file mode 100644 index 00000000..029451fc --- /dev/null +++ b/edu_sharing_openapi/test/test_lti_tool_configuration.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.lti_tool_configuration import LTIToolConfiguration + +class TestLTIToolConfiguration(unittest.TestCase): + """LTIToolConfiguration unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> LTIToolConfiguration: + """Test LTIToolConfiguration + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `LTIToolConfiguration` + """ + model = LTIToolConfiguration() + if include_optional: + return LTIToolConfiguration( + version = '', + deployment_id = '', + target_link_uri = '', + domain = '', + description = '', + claims = [ + '' + ] + ) + else: + return LTIToolConfiguration( + ) + """ + + def testLTIToolConfiguration(self): + """Test LTIToolConfiguration""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_ltiv13_api.py b/edu_sharing_openapi/test/test_ltiv13_api.py new file mode 100644 index 00000000..5dca591f --- /dev/null +++ b/edu_sharing_openapi/test/test_ltiv13_api.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.ltiv13_api import LTIV13Api + + +class TestLTIV13Api(unittest.TestCase): + """LTIV13Api unit test stubs""" + + def setUp(self) -> None: + self.api = LTIV13Api() + + def tearDown(self) -> None: + pass + + def test_generate_deep_linking_response(self) -> None: + """Test case for generate_deep_linking_response + + generate DeepLinkingResponse + """ + pass + + def test_get_details_snippet(self) -> None: + """Test case for get_details_snippet + + get a html snippet containing a rendered version of a node. this method can be called from a platform as a xhr request instead of doing the resource link flow + """ + pass + + def test_jwks_uri(self) -> None: + """Test case for jwks_uri + + LTI - returns repository JSON Web Key Sets + """ + pass + + def test_login_initiations(self) -> None: + """Test case for login_initiations + + lti authentication process preparation. + """ + pass + + def test_login_initiations_get(self) -> None: + """Test case for login_initiations_get + + lti authentication process preparation. + """ + pass + + def test_lti(self) -> None: + """Test case for lti + + lti tool redirect. + """ + pass + + def test_lti_registration_dynamic(self) -> None: + """Test case for lti_registration_dynamic + + LTI Dynamic Registration - Initiate registration + """ + pass + + def test_lti_registration_url(self) -> None: + """Test case for lti_registration_url + + LTI Dynamic Registration - generates url for platform + """ + pass + + def test_lti_target(self) -> None: + """Test case for lti_target + + lti tool resource link target. + """ + pass + + def test_register_by_type(self) -> None: + """Test case for register_by_type + + register LTI platform + """ + pass + + def test_register_test(self) -> None: + """Test case for register_test + + register LTI platform + """ + pass + + def test_remove_lti_registration_url(self) -> None: + """Test case for remove_lti_registration_url + + LTI Dynamic Regitration - delete url + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mainnav.py b/edu_sharing_openapi/test/test_mainnav.py new file mode 100644 index 00000000..ae075e66 --- /dev/null +++ b/edu_sharing_openapi/test/test_mainnav.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mainnav import Mainnav + +class TestMainnav(unittest.TestCase): + """Mainnav unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Mainnav: + """Test Mainnav + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Mainnav` + """ + model = Mainnav() + if include_optional: + return Mainnav( + icon = edu_sharing_client.models.icon.Icon( + url = '', ), + main_menu_style = '' + ) + else: + return Mainnav( + ) + """ + + def testMainnav(self): + """Test Mainnav""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_manual_registration_data.py b/edu_sharing_openapi/test/test_manual_registration_data.py new file mode 100644 index 00000000..3e7c593a --- /dev/null +++ b/edu_sharing_openapi/test/test_manual_registration_data.py @@ -0,0 +1,67 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.manual_registration_data import ManualRegistrationData + +class TestManualRegistrationData(unittest.TestCase): + """ManualRegistrationData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ManualRegistrationData: + """Test ManualRegistrationData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ManualRegistrationData` + """ + model = ManualRegistrationData() + if include_optional: + return ManualRegistrationData( + tool_name = '', + tool_url = '', + tool_description = '', + keyset_url = '', + login_initiation_url = '', + redirection_urls = [ + '' + ], + custom_parameters = [ + '' + ], + logo_url = '', + target_link_uri = '', + target_link_uri_deep_link = '', + client_name = '' + ) + else: + return ManualRegistrationData( + target_link_uri = '', + client_name = '', + ) + """ + + def testManualRegistrationData(self): + """Test ManualRegistrationData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mc_org_connect_result.py b/edu_sharing_openapi/test/test_mc_org_connect_result.py new file mode 100644 index 00000000..05c37a06 --- /dev/null +++ b/edu_sharing_openapi/test/test_mc_org_connect_result.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mc_org_connect_result import McOrgConnectResult + +class TestMcOrgConnectResult(unittest.TestCase): + """McOrgConnectResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> McOrgConnectResult: + """Test McOrgConnectResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `McOrgConnectResult` + """ + model = McOrgConnectResult() + if include_optional: + return McOrgConnectResult( + rows = 56 + ) + else: + return McOrgConnectResult( + ) + """ + + def testMcOrgConnectResult(self): + """Test McOrgConnectResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds.py b/edu_sharing_openapi/test/test_mds.py new file mode 100644 index 00000000..9c5e630f --- /dev/null +++ b/edu_sharing_openapi/test/test_mds.py @@ -0,0 +1,248 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds import Mds + +class TestMds(unittest.TestCase): + """Mds unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Mds: + """Test Mds + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Mds` + """ + model = Mds() + if include_optional: + return Mds( + name = '', + create = edu_sharing_client.models.create.Create( + only_metadata = True, ), + widgets = [ + edu_sharing_client.models.mds_widget.MdsWidget( + ids = { + 'key' : '' + }, + link = '', + configuration = '', + format = '', + allow_valuespace_suggestions = True, + count_defaultvalue_as_filter = True, + condition = edu_sharing_client.models.mds_widget_condition.MdsWidgetCondition( + type = 'PROPERTY', + value = '', + negate = True, + dynamic = True, + pattern = '', ), + maxlength = 56, + interaction_type = 'Input', + filter_mode = 'disabled', + expandable = 'disabled', + subwidgets = [ + edu_sharing_client.models.mds_subwidget.MdsSubwidget( + id = '', ) + ], + required = 'mandatory', + id = '', + caption = '', + bottom_caption = '', + icon = '', + type = '', + template = '', + has_values = True, + values = [ + edu_sharing_client.models.mds_value.MdsValue( + id = '', + caption = '', + description = '', + parent = '', + url = '', + alternative_ids = [ + '' + ], ) + ], + placeholder = '', + unit = '', + min = 56, + max = 56, + default_min = 56, + default_max = 56, + step = 56, + is_required = 'mandatory', + allowempty = True, + defaultvalue = '', + is_extended = True, + is_searchable = True, + hide_if_empty = True, ) + ], + views = [ + edu_sharing_client.models.mds_view.MdsView( + id = '', + caption = '', + icon = '', + html = '', + rel = 'suggestions', + hide_if_empty = True, + is_extended = True, ) + ], + groups = [ + edu_sharing_client.models.mds_group.MdsGroup( + rendering = 'legacy', + id = '', + views = [ + '' + ], ) + ], + lists = [ + edu_sharing_client.models.mds_list.MdsList( + id = '', + columns = [ + edu_sharing_client.models.mds_column.MdsColumn( + id = '', + format = '', + show_default = True, ) + ], ) + ], + sorts = [ + edu_sharing_client.models.mds_sort.MdsSort( + id = '', + columns = [ + edu_sharing_client.models.mds_sort_column.MdsSortColumn( + id = '', + mode = '', ) + ], + default = edu_sharing_client.models.mds_sort_default.MdsSortDefault( + sort_by = '', + sort_ascending = True, ), ) + ] + ) + else: + return Mds( + name = '', + widgets = [ + edu_sharing_client.models.mds_widget.MdsWidget( + ids = { + 'key' : '' + }, + link = '', + configuration = '', + format = '', + allow_valuespace_suggestions = True, + count_defaultvalue_as_filter = True, + condition = edu_sharing_client.models.mds_widget_condition.MdsWidgetCondition( + type = 'PROPERTY', + value = '', + negate = True, + dynamic = True, + pattern = '', ), + maxlength = 56, + interaction_type = 'Input', + filter_mode = 'disabled', + expandable = 'disabled', + subwidgets = [ + edu_sharing_client.models.mds_subwidget.MdsSubwidget( + id = '', ) + ], + required = 'mandatory', + id = '', + caption = '', + bottom_caption = '', + icon = '', + type = '', + template = '', + has_values = True, + values = [ + edu_sharing_client.models.mds_value.MdsValue( + id = '', + caption = '', + description = '', + parent = '', + url = '', + alternative_ids = [ + '' + ], ) + ], + placeholder = '', + unit = '', + min = 56, + max = 56, + default_min = 56, + default_max = 56, + step = 56, + is_required = 'mandatory', + allowempty = True, + defaultvalue = '', + is_extended = True, + is_searchable = True, + hide_if_empty = True, ) + ], + views = [ + edu_sharing_client.models.mds_view.MdsView( + id = '', + caption = '', + icon = '', + html = '', + rel = 'suggestions', + hide_if_empty = True, + is_extended = True, ) + ], + groups = [ + edu_sharing_client.models.mds_group.MdsGroup( + rendering = 'legacy', + id = '', + views = [ + '' + ], ) + ], + lists = [ + edu_sharing_client.models.mds_list.MdsList( + id = '', + columns = [ + edu_sharing_client.models.mds_column.MdsColumn( + id = '', + format = '', + show_default = True, ) + ], ) + ], + sorts = [ + edu_sharing_client.models.mds_sort.MdsSort( + id = '', + columns = [ + edu_sharing_client.models.mds_sort_column.MdsSortColumn( + id = '', + mode = '', ) + ], + default = edu_sharing_client.models.mds_sort_default.MdsSortDefault( + sort_by = '', + sort_ascending = True, ), ) + ], + ) + """ + + def testMds(self): + """Test Mds""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_column.py b/edu_sharing_openapi/test/test_mds_column.py new file mode 100644 index 00000000..93db8578 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_column.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_column import MdsColumn + +class TestMdsColumn(unittest.TestCase): + """MdsColumn unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsColumn: + """Test MdsColumn + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsColumn` + """ + model = MdsColumn() + if include_optional: + return MdsColumn( + id = '', + format = '', + show_default = True + ) + else: + return MdsColumn( + ) + """ + + def testMdsColumn(self): + """Test MdsColumn""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_entries.py b/edu_sharing_openapi/test/test_mds_entries.py new file mode 100644 index 00000000..d9708990 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_entries.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_entries import MdsEntries + +class TestMdsEntries(unittest.TestCase): + """MdsEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsEntries: + """Test MdsEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsEntries` + """ + model = MdsEntries() + if include_optional: + return MdsEntries( + metadatasets = [ + edu_sharing_client.models.metadata_set_info.MetadataSetInfo( + id = '', + name = '', ) + ] + ) + else: + return MdsEntries( + metadatasets = [ + edu_sharing_client.models.metadata_set_info.MetadataSetInfo( + id = '', + name = '', ) + ], + ) + """ + + def testMdsEntries(self): + """Test MdsEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_group.py b/edu_sharing_openapi/test/test_mds_group.py new file mode 100644 index 00000000..bd332203 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_group.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_group import MdsGroup + +class TestMdsGroup(unittest.TestCase): + """MdsGroup unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsGroup: + """Test MdsGroup + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsGroup` + """ + model = MdsGroup() + if include_optional: + return MdsGroup( + rendering = 'legacy', + id = '', + views = [ + '' + ] + ) + else: + return MdsGroup( + ) + """ + + def testMdsGroup(self): + """Test MdsGroup""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_list.py b/edu_sharing_openapi/test/test_mds_list.py new file mode 100644 index 00000000..6891ac6d --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_list.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_list import MdsList + +class TestMdsList(unittest.TestCase): + """MdsList unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsList: + """Test MdsList + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsList` + """ + model = MdsList() + if include_optional: + return MdsList( + id = '', + columns = [ + edu_sharing_client.models.mds_column.MdsColumn( + id = '', + format = '', + show_default = True, ) + ] + ) + else: + return MdsList( + ) + """ + + def testMdsList(self): + """Test MdsList""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_query_criteria.py b/edu_sharing_openapi/test/test_mds_query_criteria.py new file mode 100644 index 00000000..598e0ade --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_query_criteria.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_query_criteria import MdsQueryCriteria + +class TestMdsQueryCriteria(unittest.TestCase): + """MdsQueryCriteria unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsQueryCriteria: + """Test MdsQueryCriteria + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsQueryCriteria` + """ + model = MdsQueryCriteria() + if include_optional: + return MdsQueryCriteria( + var_property = '', + values = [ + '' + ] + ) + else: + return MdsQueryCriteria( + var_property = '', + values = [ + '' + ], + ) + """ + + def testMdsQueryCriteria(self): + """Test MdsQueryCriteria""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_sort.py b/edu_sharing_openapi/test/test_mds_sort.py new file mode 100644 index 00000000..18380c24 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_sort.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_sort import MdsSort + +class TestMdsSort(unittest.TestCase): + """MdsSort unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsSort: + """Test MdsSort + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsSort` + """ + model = MdsSort() + if include_optional: + return MdsSort( + id = '', + columns = [ + edu_sharing_client.models.mds_sort_column.MdsSortColumn( + id = '', + mode = '', ) + ], + default = edu_sharing_client.models.mds_sort_default.MdsSortDefault( + sort_by = '', + sort_ascending = True, ) + ) + else: + return MdsSort( + id = '', + ) + """ + + def testMdsSort(self): + """Test MdsSort""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_sort_column.py b/edu_sharing_openapi/test/test_mds_sort_column.py new file mode 100644 index 00000000..868f8c1f --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_sort_column.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_sort_column import MdsSortColumn + +class TestMdsSortColumn(unittest.TestCase): + """MdsSortColumn unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsSortColumn: + """Test MdsSortColumn + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsSortColumn` + """ + model = MdsSortColumn() + if include_optional: + return MdsSortColumn( + id = '', + mode = '' + ) + else: + return MdsSortColumn( + id = '', + ) + """ + + def testMdsSortColumn(self): + """Test MdsSortColumn""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_sort_default.py b/edu_sharing_openapi/test/test_mds_sort_default.py new file mode 100644 index 00000000..d727be0a --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_sort_default.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_sort_default import MdsSortDefault + +class TestMdsSortDefault(unittest.TestCase): + """MdsSortDefault unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsSortDefault: + """Test MdsSortDefault + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsSortDefault` + """ + model = MdsSortDefault() + if include_optional: + return MdsSortDefault( + sort_by = '', + sort_ascending = True + ) + else: + return MdsSortDefault( + sort_by = '', + sort_ascending = True, + ) + """ + + def testMdsSortDefault(self): + """Test MdsSortDefault""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_subwidget.py b/edu_sharing_openapi/test/test_mds_subwidget.py new file mode 100644 index 00000000..2fea3e8c --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_subwidget.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_subwidget import MdsSubwidget + +class TestMdsSubwidget(unittest.TestCase): + """MdsSubwidget unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsSubwidget: + """Test MdsSubwidget + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsSubwidget` + """ + model = MdsSubwidget() + if include_optional: + return MdsSubwidget( + id = '' + ) + else: + return MdsSubwidget( + ) + """ + + def testMdsSubwidget(self): + """Test MdsSubwidget""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_value.py b/edu_sharing_openapi/test/test_mds_value.py new file mode 100644 index 00000000..28c58a40 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_value.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_value import MdsValue + +class TestMdsValue(unittest.TestCase): + """MdsValue unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsValue: + """Test MdsValue + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsValue` + """ + model = MdsValue() + if include_optional: + return MdsValue( + id = '', + caption = '', + description = '', + parent = '', + url = '', + alternative_ids = [ + '' + ] + ) + else: + return MdsValue( + id = '', + ) + """ + + def testMdsValue(self): + """Test MdsValue""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_view.py b/edu_sharing_openapi/test/test_mds_view.py new file mode 100644 index 00000000..6b4a0a30 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_view.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_view import MdsView + +class TestMdsView(unittest.TestCase): + """MdsView unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsView: + """Test MdsView + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsView` + """ + model = MdsView() + if include_optional: + return MdsView( + id = '', + caption = '', + icon = '', + html = '', + rel = 'suggestions', + hide_if_empty = True, + is_extended = True + ) + else: + return MdsView( + ) + """ + + def testMdsView(self): + """Test MdsView""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_widget.py b/edu_sharing_openapi/test/test_mds_widget.py new file mode 100644 index 00000000..3834dbdf --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_widget.py @@ -0,0 +1,104 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_widget import MdsWidget + +class TestMdsWidget(unittest.TestCase): + """MdsWidget unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsWidget: + """Test MdsWidget + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsWidget` + """ + model = MdsWidget() + if include_optional: + return MdsWidget( + ids = { + 'key' : '' + }, + link = '', + configuration = '', + format = '', + allow_valuespace_suggestions = True, + count_defaultvalue_as_filter = True, + condition = edu_sharing_client.models.mds_widget_condition.MdsWidgetCondition( + type = 'PROPERTY', + value = '', + negate = True, + dynamic = True, + pattern = '', ), + maxlength = 56, + interaction_type = 'Input', + filter_mode = 'disabled', + expandable = 'disabled', + subwidgets = [ + edu_sharing_client.models.mds_subwidget.MdsSubwidget( + id = '', ) + ], + required = 'mandatory', + id = '', + caption = '', + bottom_caption = '', + icon = '', + type = '', + template = '', + has_values = True, + values = [ + edu_sharing_client.models.mds_value.MdsValue( + id = '', + caption = '', + description = '', + parent = '', + url = '', + alternative_ids = [ + '' + ], ) + ], + placeholder = '', + unit = '', + min = 56, + max = 56, + default_min = 56, + default_max = 56, + step = 56, + is_required = 'mandatory', + allowempty = True, + defaultvalue = '', + is_extended = True, + is_searchable = True, + hide_if_empty = True + ) + else: + return MdsWidget( + ) + """ + + def testMdsWidget(self): + """Test MdsWidget""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mds_widget_condition.py b/edu_sharing_openapi/test/test_mds_widget_condition.py new file mode 100644 index 00000000..6c6c0496 --- /dev/null +++ b/edu_sharing_openapi/test/test_mds_widget_condition.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mds_widget_condition import MdsWidgetCondition + +class TestMdsWidgetCondition(unittest.TestCase): + """MdsWidgetCondition unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MdsWidgetCondition: + """Test MdsWidgetCondition + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MdsWidgetCondition` + """ + model = MdsWidgetCondition() + if include_optional: + return MdsWidgetCondition( + type = 'PROPERTY', + value = '', + negate = True, + dynamic = True, + pattern = '' + ) + else: + return MdsWidgetCondition( + type = 'PROPERTY', + value = '', + negate = True, + dynamic = True, + ) + """ + + def testMdsWidgetCondition(self): + """Test MdsWidgetCondition""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mdsv1_api.py b/edu_sharing_openapi/test/test_mdsv1_api.py new file mode 100644 index 00000000..6a4993d2 --- /dev/null +++ b/edu_sharing_openapi/test/test_mdsv1_api.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.mdsv1_api import MDSV1Api + + +class TestMDSV1Api(unittest.TestCase): + """MDSV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = MDSV1Api() + + def tearDown(self) -> None: + pass + + def test_get_metadata_set(self) -> None: + """Test case for get_metadata_set + + Get metadata set new. + """ + pass + + def test_get_metadata_sets(self) -> None: + """Test case for get_metadata_sets + + Get metadata sets V2 of repository. + """ + pass + + def test_get_values(self) -> None: + """Test case for get_values + + Get values. + """ + pass + + def test_get_values4_keys(self) -> None: + """Test case for get_values4_keys + + Get values for keys. + """ + pass + + def test_suggest_value(self) -> None: + """Test case for suggest_value + + Suggest a value. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mediacenter.py b/edu_sharing_openapi/test/test_mediacenter.py new file mode 100644 index 00000000..d76e09cb --- /dev/null +++ b/edu_sharing_openapi/test/test_mediacenter.py @@ -0,0 +1,107 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mediacenter import Mediacenter + +class TestMediacenter(unittest.TestCase): + """Mediacenter unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Mediacenter: + """Test Mediacenter + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Mediacenter` + """ + model = Mediacenter() + if include_optional: + return Mediacenter( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True + ) + else: + return Mediacenter( + authority_name = '', + ) + """ + + def testMediacenter(self): + """Test Mediacenter""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mediacenter_profile_extension.py b/edu_sharing_openapi/test/test_mediacenter_profile_extension.py new file mode 100644 index 00000000..5182003f --- /dev/null +++ b/edu_sharing_openapi/test/test_mediacenter_profile_extension.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mediacenter_profile_extension import MediacenterProfileExtension + +class TestMediacenterProfileExtension(unittest.TestCase): + """MediacenterProfileExtension unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MediacenterProfileExtension: + """Test MediacenterProfileExtension + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MediacenterProfileExtension` + """ + model = MediacenterProfileExtension() + if include_optional: + return MediacenterProfileExtension( + id = '', + location = '', + district_abbreviation = '', + main_url = '', + catalogs = [ + edu_sharing_client.models.catalog.Catalog( + name = '', + url = '', ) + ], + content_status = 'Activated' + ) + else: + return MediacenterProfileExtension( + ) + """ + + def testMediacenterProfileExtension(self): + """Test MediacenterProfileExtension""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mediacenters_import_result.py b/edu_sharing_openapi/test/test_mediacenters_import_result.py new file mode 100644 index 00000000..0e933c12 --- /dev/null +++ b/edu_sharing_openapi/test/test_mediacenters_import_result.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.mediacenters_import_result import MediacentersImportResult + +class TestMediacentersImportResult(unittest.TestCase): + """MediacentersImportResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MediacentersImportResult: + """Test MediacentersImportResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MediacentersImportResult` + """ + model = MediacentersImportResult() + if include_optional: + return MediacentersImportResult( + rows = 56 + ) + else: + return MediacentersImportResult( + ) + """ + + def testMediacentersImportResult(self): + """Test MediacentersImportResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_mediacenterv1_api.py b/edu_sharing_openapi/test/test_mediacenterv1_api.py new file mode 100644 index 00000000..4dc813f5 --- /dev/null +++ b/edu_sharing_openapi/test/test_mediacenterv1_api.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.mediacenterv1_api import MEDIACENTERV1Api + + +class TestMEDIACENTERV1Api(unittest.TestCase): + """MEDIACENTERV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = MEDIACENTERV1Api() + + def tearDown(self) -> None: + pass + + def test_add_mediacenter_group(self) -> None: + """Test case for add_mediacenter_group + + add a group that is managed by the given mediacenter + """ + pass + + def test_create_mediacenter(self) -> None: + """Test case for create_mediacenter + + create new mediacenter in repository. + """ + pass + + def test_delete_mediacenter(self) -> None: + """Test case for delete_mediacenter + + delete a mediacenter group and it's admin group and proxy group + """ + pass + + def test_edit_mediacenter(self) -> None: + """Test case for edit_mediacenter + + edit a mediacenter in repository. + """ + pass + + def test_export_mediacenter_licensed_nodes(self) -> None: + """Test case for export_mediacenter_licensed_nodes + + get nodes that are licensed by the given mediacenter + """ + pass + + def test_get_mediacenter_groups(self) -> None: + """Test case for get_mediacenter_groups + + get groups that are managed by the given mediacenter + """ + pass + + def test_get_mediacenter_licensed_nodes(self) -> None: + """Test case for get_mediacenter_licensed_nodes + + get nodes that are licensed by the given mediacenter + """ + pass + + def test_get_mediacenters(self) -> None: + """Test case for get_mediacenters + + get mediacenters in the repository. + """ + pass + + def test_import_mc_org_connections(self) -> None: + """Test case for import_mc_org_connections + + Import Mediacenter Organisation Connection + """ + pass + + def test_import_mediacenters(self) -> None: + """Test case for import_mediacenters + + Import mediacenters + """ + pass + + def test_import_organisations(self) -> None: + """Test case for import_organisations + + Import Organisations + """ + pass + + def test_remove_mediacenter_group(self) -> None: + """Test case for remove_mediacenter_group + + delete a group that is managed by the given mediacenter + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_menu_entry.py b/edu_sharing_openapi/test/test_menu_entry.py new file mode 100644 index 00000000..48096831 --- /dev/null +++ b/edu_sharing_openapi/test/test_menu_entry.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.menu_entry import MenuEntry + +class TestMenuEntry(unittest.TestCase): + """MenuEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MenuEntry: + """Test MenuEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MenuEntry` + """ + model = MenuEntry() + if include_optional: + return MenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + path = '', + scope = '' + ) + else: + return MenuEntry( + ) + """ + + def testMenuEntry(self): + """Test MenuEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_message.py b/edu_sharing_openapi/test/test_message.py new file mode 100644 index 00000000..915e77c2 --- /dev/null +++ b/edu_sharing_openapi/test/test_message.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.message import Message + +class TestMessage(unittest.TestCase): + """Message unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Message: + """Test Message + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Message` + """ + model = Message() + if include_optional: + return Message( + type = '', + placements = [ + '' + ] + ) + else: + return Message( + ) + """ + + def testMessage(self): + """Test Message""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_metadata_set_info.py b/edu_sharing_openapi/test/test_metadata_set_info.py new file mode 100644 index 00000000..0b6d8836 --- /dev/null +++ b/edu_sharing_openapi/test/test_metadata_set_info.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.metadata_set_info import MetadataSetInfo + +class TestMetadataSetInfo(unittest.TestCase): + """MetadataSetInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MetadataSetInfo: + """Test MetadataSetInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MetadataSetInfo` + """ + model = MetadataSetInfo() + if include_optional: + return MetadataSetInfo( + id = '', + name = '' + ) + else: + return MetadataSetInfo( + id = '', + name = '', + ) + """ + + def testMetadataSetInfo(self): + """Test MetadataSetInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_metadata_suggestion_event_dto.py b/edu_sharing_openapi/test/test_metadata_suggestion_event_dto.py new file mode 100644 index 00000000..d0754b05 --- /dev/null +++ b/edu_sharing_openapi/test/test_metadata_suggestion_event_dto.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.metadata_suggestion_event_dto import MetadataSuggestionEventDTO + +class TestMetadataSuggestionEventDTO(unittest.TestCase): + """MetadataSuggestionEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> MetadataSuggestionEventDTO: + """Test MetadataSuggestionEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `MetadataSuggestionEventDTO` + """ + model = MetadataSuggestionEventDTO() + if include_optional: + return MetadataSuggestionEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + caption_id = '', + caption = '', + parent_id = '', + parent_caption = '', + widget = edu_sharing_client.models.widget_data_dto.WidgetDataDTO( + id = '', + caption = '', ) + ) + else: + return MetadataSuggestionEventDTO( + ) + """ + + def testMetadataSuggestionEventDTO(self): + """Test MetadataSuggestionEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_networkv1_api.py b/edu_sharing_openapi/test/test_networkv1_api.py new file mode 100644 index 00000000..ddd1256c --- /dev/null +++ b/edu_sharing_openapi/test/test_networkv1_api.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.networkv1_api import NETWORKV1Api + + +class TestNETWORKV1Api(unittest.TestCase): + """NETWORKV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = NETWORKV1Api() + + def tearDown(self) -> None: + pass + + def test_add_service(self) -> None: + """Test case for add_service + + Register service. + """ + pass + + def test_get_repositories(self) -> None: + """Test case for get_repositories + + Get repositories. + """ + pass + + def test_get_service(self) -> None: + """Test case for get_service + + Get own service. + """ + pass + + def test_get_services(self) -> None: + """Test case for get_services + + Get services. + """ + pass + + def test_update_service(self) -> None: + """Test case for update_service + + Update a service. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node.py b/edu_sharing_openapi/test/test_node.py new file mode 100644 index 00000000..51994b69 --- /dev/null +++ b/edu_sharing_openapi/test/test_node.py @@ -0,0 +1,692 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node import Node + +class TestNode(unittest.TestCase): + """Node unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Node: + """Test Node + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Node` + """ + model = Node() + if include_optional: + return Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + type = '', + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + is_public = True + ) + else: + return Node( + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + name = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + ) + """ + + def testNode(self): + """Test Node""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_collection_proposal_count.py b/edu_sharing_openapi/test/test_node_collection_proposal_count.py new file mode 100644 index 00000000..b7849efc --- /dev/null +++ b/edu_sharing_openapi/test/test_node_collection_proposal_count.py @@ -0,0 +1,696 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_collection_proposal_count import NodeCollectionProposalCount + +class TestNodeCollectionProposalCount(unittest.TestCase): + """NodeCollectionProposalCount unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeCollectionProposalCount: + """Test NodeCollectionProposalCount + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeCollectionProposalCount` + """ + model = NodeCollectionProposalCount() + if include_optional: + return NodeCollectionProposalCount( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + proposal_counts = { + 'key' : 56 + }, + proposal_count = { + 'key' : 56 + }, + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + type = '', + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + is_public = True + ) + else: + return NodeCollectionProposalCount( + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + name = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + ) + """ + + def testNodeCollectionProposalCount(self): + """Test NodeCollectionProposalCount""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_data.py b/edu_sharing_openapi/test/test_node_data.py new file mode 100644 index 00000000..38080350 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_data.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_data import NodeData + +class TestNodeData(unittest.TestCase): + """NodeData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeData: + """Test NodeData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeData` + """ + model = NodeData() + if include_optional: + return NodeData( + timestamp = '', + counts = { + 'key' : 56 + } + ) + else: + return NodeData( + ) + """ + + def testNodeData(self): + """Test NodeData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_data_dto.py b/edu_sharing_openapi/test/test_node_data_dto.py new file mode 100644 index 00000000..81a7e81b --- /dev/null +++ b/edu_sharing_openapi/test/test_node_data_dto.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_data_dto import NodeDataDTO + +class TestNodeDataDTO(unittest.TestCase): + """NodeDataDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeDataDTO: + """Test NodeDataDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeDataDTO` + """ + model = NodeDataDTO() + if include_optional: + return NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + } + ) + else: + return NodeDataDTO( + ) + """ + + def testNodeDataDTO(self): + """Test NodeDataDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_entries.py b/edu_sharing_openapi/test/test_node_entries.py new file mode 100644 index 00000000..4c2ebdfa --- /dev/null +++ b/edu_sharing_openapi/test/test_node_entries.py @@ -0,0 +1,468 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_entries import NodeEntries + +class TestNodeEntries(unittest.TestCase): + """NodeEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeEntries: + """Test NodeEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeEntries` + """ + model = NodeEntries() + if include_optional: + return NodeEntries( + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return NodeEntries( + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testNodeEntries(self): + """Test NodeEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_entry.py b/edu_sharing_openapi/test/test_node_entry.py new file mode 100644 index 00000000..97d7feb5 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_entry.py @@ -0,0 +1,456 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_entry import NodeEntry + +class TestNodeEntry(unittest.TestCase): + """NodeEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeEntry: + """Test NodeEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeEntry` + """ + model = NodeEntry() + if include_optional: + return NodeEntry( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ) + else: + return NodeEntry( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + ) + """ + + def testNodeEntry(self): + """Test NodeEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_issue_event_dto.py b/edu_sharing_openapi/test/test_node_issue_event_dto.py new file mode 100644 index 00000000..1c6ebdc5 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_issue_event_dto.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_issue_event_dto import NodeIssueEventDTO + +class TestNodeIssueEventDTO(unittest.TestCase): + """NodeIssueEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeIssueEventDTO: + """Test NodeIssueEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeIssueEventDTO` + """ + model = NodeIssueEventDTO() + if include_optional: + return NodeIssueEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + reason = '', + user_comment = '' + ) + else: + return NodeIssueEventDTO( + ) + """ + + def testNodeIssueEventDTO(self): + """Test NodeIssueEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_locked.py b/edu_sharing_openapi/test/test_node_locked.py new file mode 100644 index 00000000..398ee0d3 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_locked.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_locked import NodeLocked + +class TestNodeLocked(unittest.TestCase): + """NodeLocked unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeLocked: + """Test NodeLocked + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeLocked` + """ + model = NodeLocked() + if include_optional: + return NodeLocked( + is_locked = True + ) + else: + return NodeLocked( + is_locked = True, + ) + """ + + def testNodeLocked(self): + """Test NodeLocked""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_lti_deep_link.py b/edu_sharing_openapi/test/test_node_lti_deep_link.py new file mode 100644 index 00000000..b3d6d05b --- /dev/null +++ b/edu_sharing_openapi/test/test_node_lti_deep_link.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_lti_deep_link import NodeLTIDeepLink + +class TestNodeLTIDeepLink(unittest.TestCase): + """NodeLTIDeepLink unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeLTIDeepLink: + """Test NodeLTIDeepLink + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeLTIDeepLink` + """ + model = NodeLTIDeepLink() + if include_optional: + return NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '' + ) + else: + return NodeLTIDeepLink( + ) + """ + + def testNodeLTIDeepLink(self): + """Test NodeLTIDeepLink""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_permission_entry.py b/edu_sharing_openapi/test/test_node_permission_entry.py new file mode 100644 index 00000000..021ea3cb --- /dev/null +++ b/edu_sharing_openapi/test/test_node_permission_entry.py @@ -0,0 +1,154 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_permission_entry import NodePermissionEntry + +class TestNodePermissionEntry(unittest.TestCase): + """NodePermissionEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodePermissionEntry: + """Test NodePermissionEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodePermissionEntry` + """ + model = NodePermissionEntry() + if include_optional: + return NodePermissionEntry( + permissions = edu_sharing_client.models.node_permissions.NodePermissions( + local_permissions = edu_sharing_client.models.acl.ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], ), + inherited_permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + editable = True, + authority_name = '', + authority_type = 'USER', ), + permissions = [ + '' + ], ) + ], ) + ) + else: + return NodePermissionEntry( + permissions = edu_sharing_client.models.node_permissions.NodePermissions( + local_permissions = edu_sharing_client.models.acl.ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], ), + inherited_permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + editable = True, + authority_name = '', + authority_type = 'USER', ), + permissions = [ + '' + ], ) + ], ), + ) + """ + + def testNodePermissionEntry(self): + """Test NodePermissionEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_permissions.py b/edu_sharing_openapi/test/test_node_permissions.py new file mode 100644 index 00000000..18be3004 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_permissions.py @@ -0,0 +1,206 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_permissions import NodePermissions + +class TestNodePermissions(unittest.TestCase): + """NodePermissions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodePermissions: + """Test NodePermissions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodePermissions` + """ + model = NodePermissions() + if include_optional: + return NodePermissions( + local_permissions = edu_sharing_client.models.acl.ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], ), + inherited_permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ] + ) + else: + return NodePermissions( + local_permissions = edu_sharing_client.models.acl.ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], ), + inherited_permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], + ) + """ + + def testNodePermissions(self): + """Test NodePermissions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_ref.py b/edu_sharing_openapi/test/test_node_ref.py new file mode 100644 index 00000000..2a7e8835 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_ref.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_ref import NodeRef + +class TestNodeRef(unittest.TestCase): + """NodeRef unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeRef: + """Test NodeRef + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeRef` + """ + model = NodeRef() + if include_optional: + return NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True + ) + else: + return NodeRef( + repo = '', + id = '', + archived = True, + ) + """ + + def testNodeRef(self): + """Test NodeRef""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_relation.py b/edu_sharing_openapi/test/test_node_relation.py new file mode 100644 index 00000000..802000e8 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_relation.py @@ -0,0 +1,486 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_relation import NodeRelation + +class TestNodeRelation(unittest.TestCase): + """NodeRelation unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeRelation: + """Test NodeRelation + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeRelation` + """ + model = NodeRelation() + if include_optional: + return NodeRelation( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + relations = [ + edu_sharing_client.models.relation_data.RelationData( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + creator = edu_sharing_client.models.user.User( + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + administration_access = True, + shared_folder = , ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + home_folder = , + shared_folders = [ + + ], ), + timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + type = 'isPartOf', ) + ] + ) + else: + return NodeRelation( + ) + """ + + def testNodeRelation(self): + """Test NodeRelation""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_remote.py b/edu_sharing_openapi/test/test_node_remote.py new file mode 100644 index 00000000..4cfe036e --- /dev/null +++ b/edu_sharing_openapi/test/test_node_remote.py @@ -0,0 +1,862 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_remote import NodeRemote + +class TestNodeRemote(unittest.TestCase): + """NodeRemote unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeRemote: + """Test NodeRemote + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeRemote` + """ + model = NodeRemote() + if include_optional: + return NodeRemote( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + remote = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ) + else: + return NodeRemote( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + remote = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + ) + """ + + def testNodeRemote(self): + """Test NodeRemote""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_share.py b/edu_sharing_openapi/test/test_node_share.py new file mode 100644 index 00000000..77494ed6 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_share.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_share import NodeShare + +class TestNodeShare(unittest.TestCase): + """NodeShare unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeShare: + """Test NodeShare + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeShare` + """ + model = NodeShare() + if include_optional: + return NodeShare( + password = True, + token = '', + email = '', + expiry_date = 56, + invited_at = 56, + download_count = 56, + url = '', + share_id = '' + ) + else: + return NodeShare( + ) + """ + + def testNodeShare(self): + """Test NodeShare""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_stats.py b/edu_sharing_openapi/test/test_node_stats.py new file mode 100644 index 00000000..76b5e448 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_stats.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_stats import NodeStats + +class TestNodeStats(unittest.TestCase): + """NodeStats unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeStats: + """Test NodeStats + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeStats` + """ + model = NodeStats() + if include_optional: + return NodeStats( + total = { + 'key' : 56 + } + ) + else: + return NodeStats( + ) + """ + + def testNodeStats(self): + """Test NodeStats""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_text.py b/edu_sharing_openapi/test/test_node_text.py new file mode 100644 index 00000000..3dfb1864 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_text.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_text import NodeText + +class TestNodeText(unittest.TestCase): + """NodeText unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeText: + """Test NodeText + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeText` + """ + model = NodeText() + if include_optional: + return NodeText( + text = '', + html = '', + raw = '' + ) + else: + return NodeText( + ) + """ + + def testNodeText(self): + """Test NodeText""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_version.py b/edu_sharing_openapi/test/test_node_version.py new file mode 100644 index 00000000..8b6924a9 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_version.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_version import NodeVersion + +class TestNodeVersion(unittest.TestCase): + """NodeVersion unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeVersion: + """Test NodeVersion + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeVersion` + """ + model = NodeVersion() + if include_optional: + return NodeVersion( + properties = { + 'key' : [ + '' + ] + }, + version = edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ), + comment = '', + modified_at = '', + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + content_url = '' + ) + else: + return NodeVersion( + version = edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ), + comment = '', + modified_at = '', + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + ) + """ + + def testNodeVersion(self): + """Test NodeVersion""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_version_entries.py b/edu_sharing_openapi/test/test_node_version_entries.py new file mode 100644 index 00000000..887d4ae9 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_version_entries.py @@ -0,0 +1,130 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_version_entries import NodeVersionEntries + +class TestNodeVersionEntries(unittest.TestCase): + """NodeVersionEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeVersionEntries: + """Test NodeVersionEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeVersionEntries` + """ + model = NodeVersionEntries() + if include_optional: + return NodeVersionEntries( + versions = [ + edu_sharing_client.models.node_version.NodeVersion( + properties = { + 'key' : [ + '' + ] + }, + version = edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ), + comment = '', + modified_at = '', + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + content_url = '', ) + ] + ) + else: + return NodeVersionEntries( + versions = [ + edu_sharing_client.models.node_version.NodeVersion( + properties = { + 'key' : [ + '' + ] + }, + version = edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ), + comment = '', + modified_at = '', + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + content_url = '', ) + ], + ) + """ + + def testNodeVersionEntries(self): + """Test NodeVersionEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_version_entry.py b/edu_sharing_openapi/test/test_node_version_entry.py new file mode 100644 index 00000000..22fab8b6 --- /dev/null +++ b/edu_sharing_openapi/test/test_node_version_entry.py @@ -0,0 +1,126 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_version_entry import NodeVersionEntry + +class TestNodeVersionEntry(unittest.TestCase): + """NodeVersionEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeVersionEntry: + """Test NodeVersionEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeVersionEntry` + """ + model = NodeVersionEntry() + if include_optional: + return NodeVersionEntry( + version = edu_sharing_client.models.node_version.NodeVersion( + properties = { + 'key' : [ + '' + ] + }, + version = edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ), + comment = '', + modified_at = '', + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + content_url = '', ) + ) + else: + return NodeVersionEntry( + version = edu_sharing_client.models.node_version.NodeVersion( + properties = { + 'key' : [ + '' + ] + }, + version = edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ), + comment = '', + modified_at = '', + modified_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + content_url = '', ), + ) + """ + + def testNodeVersionEntry(self): + """Test NodeVersionEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_version_ref.py b/edu_sharing_openapi/test/test_node_version_ref.py new file mode 100644 index 00000000..ae5da75b --- /dev/null +++ b/edu_sharing_openapi/test/test_node_version_ref.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_version_ref import NodeVersionRef + +class TestNodeVersionRef(unittest.TestCase): + """NodeVersionRef unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeVersionRef: + """Test NodeVersionRef + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeVersionRef` + """ + model = NodeVersionRef() + if include_optional: + return NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56 + ) + else: + return NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, + ) + """ + + def testNodeVersionRef(self): + """Test NodeVersionRef""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_node_version_ref_entries.py b/edu_sharing_openapi/test/test_node_version_ref_entries.py new file mode 100644 index 00000000..126ea4fa --- /dev/null +++ b/edu_sharing_openapi/test/test_node_version_ref_entries.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.node_version_ref_entries import NodeVersionRefEntries + +class TestNodeVersionRefEntries(unittest.TestCase): + """NodeVersionRefEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NodeVersionRefEntries: + """Test NodeVersionRefEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NodeVersionRefEntries` + """ + model = NodeVersionRefEntries() + if include_optional: + return NodeVersionRefEntries( + versions = [ + edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ) + ] + ) + else: + return NodeVersionRefEntries( + versions = [ + edu_sharing_client.models.node_version_ref.NodeVersionRef( + node = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + major = 56, + minor = 56, ) + ], + ) + """ + + def testNodeVersionRefEntries(self): + """Test NodeVersionRefEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_nodev1_api.py b/edu_sharing_openapi/test/test_nodev1_api.py new file mode 100644 index 00000000..b042d242 --- /dev/null +++ b/edu_sharing_openapi/test/test_nodev1_api.py @@ -0,0 +1,360 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.nodev1_api import NODEV1Api + + +class TestNODEV1Api(unittest.TestCase): + """NODEV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = NODEV1Api() + + def tearDown(self) -> None: + pass + + def test_add_aspects(self) -> None: + """Test case for add_aspects + + Add aspect to node. + """ + pass + + def test_add_workflow_history(self) -> None: + """Test case for add_workflow_history + + Add workflow. + """ + pass + + def test_change_content1(self) -> None: + """Test case for change_content1 + + Change content of node. + """ + pass + + def test_change_content_as_text(self) -> None: + """Test case for change_content_as_text + + Change content of node as text. + """ + pass + + def test_change_metadata(self) -> None: + """Test case for change_metadata + + Change metadata of node. + """ + pass + + def test_change_metadata_with_versioning(self) -> None: + """Test case for change_metadata_with_versioning + + Change metadata of node (new version). + """ + pass + + def test_change_preview(self) -> None: + """Test case for change_preview + + Change preview of node. + """ + pass + + def test_change_template_metadata(self) -> None: + """Test case for change_template_metadata + + Set the metadata template for this folder. + """ + pass + + def test_copy_metadata(self) -> None: + """Test case for copy_metadata + + Copy metadata from another node. + """ + pass + + def test_create_child(self) -> None: + """Test case for create_child + + Create a new child. + """ + pass + + def test_create_child_by_copying(self) -> None: + """Test case for create_child_by_copying + + Create a new child by copying. + """ + pass + + def test_create_child_by_moving(self) -> None: + """Test case for create_child_by_moving + + Create a new child by moving. + """ + pass + + def test_create_fork_of_node(self) -> None: + """Test case for create_fork_of_node + + Create a copy of a node by creating a forked version (variant). + """ + pass + + def test_create_share(self) -> None: + """Test case for create_share + + Create a share for a node. + """ + pass + + def test_delete(self) -> None: + """Test case for delete + + Delete node. + """ + pass + + def test_delete_preview(self) -> None: + """Test case for delete_preview + + Delete preview of node. + """ + pass + + def test_get_assocs(self) -> None: + """Test case for get_assocs + + Get related nodes. + """ + pass + + def test_get_children(self) -> None: + """Test case for get_children + + Get children of node. + """ + pass + + def test_get_lrmi_data(self) -> None: + """Test case for get_lrmi_data + + Get lrmi data. + """ + pass + + def test_get_metadata(self) -> None: + """Test case for get_metadata + + Get metadata of node. + """ + pass + + def test_get_nodes(self) -> None: + """Test case for get_nodes + + Searching nodes. + """ + pass + + def test_get_notify_list(self) -> None: + """Test case for get_notify_list + + Get notifys (sharing history) of the node. + """ + pass + + def test_get_parents(self) -> None: + """Test case for get_parents + + Get parents of node. + """ + pass + + def test_get_permission(self) -> None: + """Test case for get_permission + + Get all permission of node. + """ + pass + + def test_get_published_copies(self) -> None: + """Test case for get_published_copies + + Publish + """ + pass + + def test_get_shares(self) -> None: + """Test case for get_shares + + Get shares of node. + """ + pass + + def test_get_stats(self) -> None: + """Test case for get_stats + + Get statistics of node. + """ + pass + + def test_get_template_metadata(self) -> None: + """Test case for get_template_metadata + + Get the metadata template + status for this folder. + """ + pass + + def test_get_text_content(self) -> None: + """Test case for get_text_content + + Get the text content of a document. + """ + pass + + def test_get_version_metadata(self) -> None: + """Test case for get_version_metadata + + Get metadata of node version. + """ + pass + + def test_get_versions(self) -> None: + """Test case for get_versions + + Get all versions of node. + """ + pass + + def test_get_versions1(self) -> None: + """Test case for get_versions1 + + Get all versions of node, including it's metadata. + """ + pass + + def test_get_workflow_history(self) -> None: + """Test case for get_workflow_history + + Get workflow history. + """ + pass + + def test_has_permission(self) -> None: + """Test case for has_permission + + Which permissions has user/group for node. + """ + pass + + def test_import_node(self) -> None: + """Test case for import_node + + Import node + """ + pass + + def test_islocked(self) -> None: + """Test case for islocked + + locked status of a node. + """ + pass + + def test_prepare_usage(self) -> None: + """Test case for prepare_usage + + create remote object and get properties. + """ + pass + + def test_publish_copy(self) -> None: + """Test case for publish_copy + + Publish + """ + pass + + def test_remove_share(self) -> None: + """Test case for remove_share + + Remove share of a node. + """ + pass + + def test_report_node(self) -> None: + """Test case for report_node + + Report the node. + """ + pass + + def test_revert_version(self) -> None: + """Test case for revert_version + + Revert to node version. + """ + pass + + def test_set_owner(self) -> None: + """Test case for set_owner + + Set owner of node. + """ + pass + + def test_set_permission(self) -> None: + """Test case for set_permission + + Set local permissions of node. + """ + pass + + def test_set_property(self) -> None: + """Test case for set_property + + Set single property of node. + """ + pass + + def test_store_x_api_data(self) -> None: + """Test case for store_x_api_data + + Store xApi-Conform data for a given node + """ + pass + + def test_unlock(self) -> None: + """Test case for unlock + + unlock node. + """ + pass + + def test_update_share(self) -> None: + """Test case for update_share + + update share of a node. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_notification_config.py b/edu_sharing_openapi/test/test_notification_config.py new file mode 100644 index 00000000..9cf8db8e --- /dev/null +++ b/edu_sharing_openapi/test/test_notification_config.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.notification_config import NotificationConfig + +class TestNotificationConfig(unittest.TestCase): + """NotificationConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotificationConfig: + """Test NotificationConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotificationConfig` + """ + model = NotificationConfig() + if include_optional: + return NotificationConfig( + config_mode = 'uniformly', + default_interval = 'immediately', + intervals = edu_sharing_client.models.notification_intervals.NotificationIntervals( + add_to_collection_event = 'immediately', + propose_for_collection_event = 'immediately', + comment_event = 'immediately', + invite_event = 'immediately', + node_issue_event = 'immediately', + rating_event = 'immediately', + workflow_event = 'immediately', + metadata_suggestion_event = 'immediately', ) + ) + else: + return NotificationConfig( + ) + """ + + def testNotificationConfig(self): + """Test NotificationConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_notification_event_dto.py b/edu_sharing_openapi/test/test_notification_event_dto.py new file mode 100644 index 00000000..9d123d6a --- /dev/null +++ b/edu_sharing_openapi/test/test_notification_event_dto.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.notification_event_dto import NotificationEventDTO + +class TestNotificationEventDTO(unittest.TestCase): + """NotificationEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotificationEventDTO: + """Test NotificationEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotificationEventDTO` + """ + model = NotificationEventDTO() + if include_optional: + return NotificationEventDTO( + timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + creator = edu_sharing_client.models.user_data_dto.UserDataDTO( + id = '', + first_name = '', + last_name = '', + mailbox = '', ), + receiver = edu_sharing_client.models.user_data_dto.UserDataDTO( + id = '', + first_name = '', + last_name = '', + mailbox = '', ), + status = 'PENDING', + id = '', + var_class = '' + ) + else: + return NotificationEventDTO( + var_class = '', + ) + """ + + def testNotificationEventDTO(self): + """Test NotificationEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_notification_intervals.py b/edu_sharing_openapi/test/test_notification_intervals.py new file mode 100644 index 00000000..a5fe447c --- /dev/null +++ b/edu_sharing_openapi/test/test_notification_intervals.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.notification_intervals import NotificationIntervals + +class TestNotificationIntervals(unittest.TestCase): + """NotificationIntervals unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotificationIntervals: + """Test NotificationIntervals + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotificationIntervals` + """ + model = NotificationIntervals() + if include_optional: + return NotificationIntervals( + add_to_collection_event = 'immediately', + propose_for_collection_event = 'immediately', + comment_event = 'immediately', + invite_event = 'immediately', + node_issue_event = 'immediately', + rating_event = 'immediately', + workflow_event = 'immediately', + metadata_suggestion_event = 'immediately' + ) + else: + return NotificationIntervals( + ) + """ + + def testNotificationIntervals(self): + """Test NotificationIntervals""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_notification_response_page.py b/edu_sharing_openapi/test/test_notification_response_page.py new file mode 100644 index 00000000..da4bab8c --- /dev/null +++ b/edu_sharing_openapi/test/test_notification_response_page.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.notification_response_page import NotificationResponsePage + +class TestNotificationResponsePage(unittest.TestCase): + """NotificationResponsePage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotificationResponsePage: + """Test NotificationResponsePage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotificationResponsePage` + """ + model = NotificationResponsePage() + if include_optional: + return NotificationResponsePage( + content = [ + edu_sharing_client.models.notification_event_dto.NotificationEventDTO( + timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + creator = edu_sharing_client.models.user_data_dto.UserDataDTO( + id = '', + first_name = '', + last_name = '', + mailbox = '', ), + receiver = edu_sharing_client.models.user_data_dto.UserDataDTO( + id = '', + first_name = '', + last_name = '', + mailbox = '', ), + status = 'PENDING', + _id = '', + _class = '', ) + ], + pageable = edu_sharing_client.models.pageable.Pageable( + page_number = 56, + unpaged = True, + offset = 56, + sort = edu_sharing_client.models.sort.Sort( + sorted = True, + empty = True, + unsorted = True, ), + paged = True, + page_size = 56, ), + total_elements = 56, + total_pages = 56, + last = True, + number_of_elements = 56, + first = True, + size = 56, + number = 56, + sort = edu_sharing_client.models.sort.Sort( + sorted = True, + empty = True, + unsorted = True, ), + empty = True + ) + else: + return NotificationResponsePage( + ) + """ + + def testNotificationResponsePage(self): + """Test NotificationResponsePage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_notificationv1_api.py b/edu_sharing_openapi/test/test_notificationv1_api.py new file mode 100644 index 00000000..e2631772 --- /dev/null +++ b/edu_sharing_openapi/test/test_notificationv1_api.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.notificationv1_api import NOTIFICATIONV1Api + + +class TestNOTIFICATIONV1Api(unittest.TestCase): + """NOTIFICATIONV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = NOTIFICATIONV1Api() + + def tearDown(self) -> None: + pass + + def test_delete_notification(self) -> None: + """Test case for delete_notification + + Endpoint to delete notification by id + """ + pass + + def test_get_config2(self) -> None: + """Test case for get_config2 + + get the config for notifications of the current user + """ + pass + + def test_get_notifications(self) -> None: + """Test case for get_notifications + + Retrieve stored notification, filtered by receiver and status + """ + pass + + def test_set_config1(self) -> None: + """Test case for set_config1 + + Update the config for notifications of the current user + """ + pass + + def test_update_notification_status(self) -> None: + """Test case for update_notification_status + + Endpoint to update the notification status + """ + pass + + def test_update_notification_status_by_receiver_id(self) -> None: + """Test case for update_notification_status_by_receiver_id + + Endpoint to update the notification status + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_notify_entry.py b/edu_sharing_openapi/test/test_notify_entry.py new file mode 100644 index 00000000..2828e461 --- /dev/null +++ b/edu_sharing_openapi/test/test_notify_entry.py @@ -0,0 +1,262 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.notify_entry import NotifyEntry + +class TestNotifyEntry(unittest.TestCase): + """NotifyEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> NotifyEntry: + """Test NotifyEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `NotifyEntry` + """ + model = NotifyEntry() + if include_optional: + return NotifyEntry( + var_date = 56, + permissions = edu_sharing_client.models.acl.ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], ), + user = edu_sharing_client.models.user.User( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + home_folder = , + shared_folders = [ + + ], ), + action = '' + ) + else: + return NotifyEntry( + var_date = 56, + permissions = edu_sharing_client.models.acl.ACL( + inherited = True, + permissions = [ + edu_sharing_client.models.ace.ACE( + editable = True, + authority = edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ), + user = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + group = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + permissions = [ + '' + ], ) + ], ), + user = edu_sharing_client.models.user.User( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + home_folder = , + shared_folders = [ + + ], ), + action = '', + ) + """ + + def testNotifyEntry(self): + """Test NotifyEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_open_id_configuration.py b/edu_sharing_openapi/test/test_open_id_configuration.py new file mode 100644 index 00000000..3ba0026d --- /dev/null +++ b/edu_sharing_openapi/test/test_open_id_configuration.py @@ -0,0 +1,89 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.open_id_configuration import OpenIdConfiguration + +class TestOpenIdConfiguration(unittest.TestCase): + """OpenIdConfiguration unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OpenIdConfiguration: + """Test OpenIdConfiguration + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OpenIdConfiguration` + """ + model = OpenIdConfiguration() + if include_optional: + return OpenIdConfiguration( + issuer = '', + token_endpoint = '', + token_endpoint_auth_methods_supported = [ + '' + ], + token_endpoint_auth_signing_alg_values_supported = [ + '' + ], + jwks_uri = '', + authorization_endpoint = '', + registration_endpoint = '', + scopes_supported = [ + '' + ], + response_types_supported = [ + '' + ], + subject_types_supported = [ + '' + ], + id_token_signing_alg_values_supported = [ + '' + ], + claims_supported = [ + '' + ], + https__purl_imsglobal_org_spec_lti_platform_configuration = edu_sharing_client.models.lti_platform_configuration.LTIPlatformConfiguration( + product_family_code = '', + version = '', + messages_supported = [ + edu_sharing_client.models.message.Message( + type = '', + placements = [ + '' + ], ) + ], + variables = [ + '' + ], ) + ) + else: + return OpenIdConfiguration( + ) + """ + + def testOpenIdConfiguration(self): + """Test OpenIdConfiguration""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_open_id_registration_result.py b/edu_sharing_openapi/test/test_open_id_registration_result.py new file mode 100644 index 00000000..0b525513 --- /dev/null +++ b/edu_sharing_openapi/test/test_open_id_registration_result.py @@ -0,0 +1,76 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.open_id_registration_result import OpenIdRegistrationResult + +class TestOpenIdRegistrationResult(unittest.TestCase): + """OpenIdRegistrationResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OpenIdRegistrationResult: + """Test OpenIdRegistrationResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OpenIdRegistrationResult` + """ + model = OpenIdRegistrationResult() + if include_optional: + return OpenIdRegistrationResult( + client_id = '', + response_types = [ + '' + ], + jwks_uri = '', + initiate_login_uri = '', + grant_types = [ + '' + ], + redirect_uris = [ + '' + ], + application_type = '', + token_endpoint_auth_method = '', + client_name = '', + logo_uri = '', + scope = '', + https__purl_imsglobal_org_spec_lti_tool_configuration = edu_sharing_client.models.lti_tool_configuration.LTIToolConfiguration( + version = '', + deployment_id = '', + target_link_uri = '', + domain = '', + description = '', + claims = [ + '' + ], ) + ) + else: + return OpenIdRegistrationResult( + ) + """ + + def testOpenIdRegistrationResult(self): + """Test OpenIdRegistrationResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_organisations_import_result.py b/edu_sharing_openapi/test/test_organisations_import_result.py new file mode 100644 index 00000000..4bd62404 --- /dev/null +++ b/edu_sharing_openapi/test/test_organisations_import_result.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.organisations_import_result import OrganisationsImportResult + +class TestOrganisationsImportResult(unittest.TestCase): + """OrganisationsImportResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OrganisationsImportResult: + """Test OrganisationsImportResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OrganisationsImportResult` + """ + model = OrganisationsImportResult() + if include_optional: + return OrganisationsImportResult( + rows = 56 + ) + else: + return OrganisationsImportResult( + ) + """ + + def testOrganisationsImportResult(self): + """Test OrganisationsImportResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_organization.py b/edu_sharing_openapi/test/test_organization.py new file mode 100644 index 00000000..cb57b438 --- /dev/null +++ b/edu_sharing_openapi/test/test_organization.py @@ -0,0 +1,80 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.organization import Organization + +class TestOrganization(unittest.TestCase): + """Organization unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Organization: + """Test Organization + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Organization` + """ + model = Organization() + if include_optional: + return Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ) + ) + else: + return Organization( + authority_name = '', + ) + """ + + def testOrganization(self): + """Test Organization""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_organization_entries.py b/edu_sharing_openapi/test/test_organization_entries.py new file mode 100644 index 00000000..667e49c3 --- /dev/null +++ b/edu_sharing_openapi/test/test_organization_entries.py @@ -0,0 +1,123 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.organization_entries import OrganizationEntries + +class TestOrganizationEntries(unittest.TestCase): + """OrganizationEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> OrganizationEntries: + """Test OrganizationEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `OrganizationEntries` + """ + model = OrganizationEntries() + if include_optional: + return OrganizationEntries( + organizations = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + can_create = True + ) + else: + return OrganizationEntries( + organizations = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testOrganizationEntries(self): + """Test OrganizationEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_organizationv1_api.py b/edu_sharing_openapi/test/test_organizationv1_api.py new file mode 100644 index 00000000..83ce6879 --- /dev/null +++ b/edu_sharing_openapi/test/test_organizationv1_api.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.organizationv1_api import ORGANIZATIONV1Api + + +class TestORGANIZATIONV1Api(unittest.TestCase): + """ORGANIZATIONV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = ORGANIZATIONV1Api() + + def tearDown(self) -> None: + pass + + def test_create_organizations(self) -> None: + """Test case for create_organizations + + create organization in repository. + """ + pass + + def test_delete_organizations(self) -> None: + """Test case for delete_organizations + + Delete organization of repository. + """ + pass + + def test_get_organization(self) -> None: + """Test case for get_organization + + Get organization by id. + """ + pass + + def test_get_organizations(self) -> None: + """Test case for get_organizations + + Get organizations of repository. + """ + pass + + def test_remove_from_organization(self) -> None: + """Test case for remove_from_organization + + Remove member from organization. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_pageable.py b/edu_sharing_openapi/test/test_pageable.py new file mode 100644 index 00000000..79b672d6 --- /dev/null +++ b/edu_sharing_openapi/test/test_pageable.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.pageable import Pageable + +class TestPageable(unittest.TestCase): + """Pageable unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Pageable: + """Test Pageable + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Pageable` + """ + model = Pageable() + if include_optional: + return Pageable( + page_number = 56, + unpaged = True, + offset = 56, + sort = edu_sharing_client.models.sort.Sort( + sorted = True, + empty = True, + unsorted = True, ), + paged = True, + page_size = 56 + ) + else: + return Pageable( + ) + """ + + def testPageable(self): + """Test Pageable""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_pagination.py b/edu_sharing_openapi/test/test_pagination.py new file mode 100644 index 00000000..512a0dc7 --- /dev/null +++ b/edu_sharing_openapi/test/test_pagination.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.pagination import Pagination + +class TestPagination(unittest.TestCase): + """Pagination unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Pagination: + """Test Pagination + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Pagination` + """ + model = Pagination() + if include_optional: + return Pagination( + total = 56, + var_from = 56, + count = 56 + ) + else: + return Pagination( + total = 56, + var_from = 56, + count = 56, + ) + """ + + def testPagination(self): + """Test Pagination""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_parameters.py b/edu_sharing_openapi/test/test_parameters.py new file mode 100644 index 00000000..93147c0b --- /dev/null +++ b/edu_sharing_openapi/test/test_parameters.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.parameters import Parameters + +class TestParameters(unittest.TestCase): + """Parameters unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Parameters: + """Test Parameters + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Parameters` + """ + model = Parameters() + if include_optional: + return Parameters( + general = edu_sharing_client.models.general.General( + referenced_in_name = '', + referenced_in_type = '', + referenced_in_instance = '', ) + ) + else: + return Parameters( + ) + """ + + def testParameters(self): + """Test Parameters""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_parent_entries.py b/edu_sharing_openapi/test/test_parent_entries.py new file mode 100644 index 00000000..864204f5 --- /dev/null +++ b/edu_sharing_openapi/test/test_parent_entries.py @@ -0,0 +1,469 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.parent_entries import ParentEntries + +class TestParentEntries(unittest.TestCase): + """ParentEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ParentEntries: + """Test ParentEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ParentEntries` + """ + model = ParentEntries() + if include_optional: + return ParentEntries( + scope = '', + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return ParentEntries( + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testParentEntries(self): + """Test ParentEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_person.py b/edu_sharing_openapi/test/test_person.py new file mode 100644 index 00000000..bc512624 --- /dev/null +++ b/edu_sharing_openapi/test/test_person.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.person import Person + +class TestPerson(unittest.TestCase): + """Person unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Person: + """Test Person + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Person` + """ + model = Person() + if include_optional: + return Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '' + ) + else: + return Person( + ) + """ + + def testPerson(self): + """Test Person""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_person_delete_options.py b/edu_sharing_openapi/test/test_person_delete_options.py new file mode 100644 index 00000000..857be60e --- /dev/null +++ b/edu_sharing_openapi/test/test_person_delete_options.py @@ -0,0 +1,76 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.person_delete_options import PersonDeleteOptions + +class TestPersonDeleteOptions(unittest.TestCase): + """PersonDeleteOptions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PersonDeleteOptions: + """Test PersonDeleteOptions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PersonDeleteOptions` + """ + model = PersonDeleteOptions() + if include_optional: + return PersonDeleteOptions( + cleanup_metadata = True, + home_folder = edu_sharing_client.models.home_folder_options.HomeFolderOptions( + folders = 'none', + private_files = 'none', + cc_files = 'none', + keep_folder_structure = True, ), + shared_folders = edu_sharing_client.models.shared_folder_options.SharedFolderOptions( + folders = 'none', + private_files = 'none', + cc_files = 'none', + move = True, ), + collections = edu_sharing_client.models.collection_options.CollectionOptions( + private_collections = 'none', + public_collections = 'none', ), + ratings = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + comments = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + collection_feedback = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + statistics = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + stream = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + receiver = '', + receiver_group = '' + ) + else: + return PersonDeleteOptions( + ) + """ + + def testPersonDeleteOptions(self): + """Test PersonDeleteOptions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_person_delete_result.py b/edu_sharing_openapi/test/test_person_delete_result.py new file mode 100644 index 00000000..c10320be --- /dev/null +++ b/edu_sharing_openapi/test/test_person_delete_result.py @@ -0,0 +1,87 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.person_delete_result import PersonDeleteResult + +class TestPersonDeleteResult(unittest.TestCase): + """PersonDeleteResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PersonDeleteResult: + """Test PersonDeleteResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PersonDeleteResult` + """ + model = PersonDeleteResult() + if include_optional: + return PersonDeleteResult( + authority_name = '', + deleted_name = '', + home_folder = { + 'key' : edu_sharing_client.models.counts.Counts( + elements = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], ) + }, + shared_folders = { + 'key' : edu_sharing_client.models.counts.Counts( + elements = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], ) + }, + collections = edu_sharing_client.models.collection_counts.CollectionCounts( + refs = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], + collections = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], ), + comments = 56, + ratings = 56, + collection_feedback = 56, + stream = 56 + ) + else: + return PersonDeleteResult( + ) + """ + + def testPersonDeleteResult(self): + """Test PersonDeleteResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_person_report.py b/edu_sharing_openapi/test/test_person_report.py new file mode 100644 index 00000000..a3cb9bbd --- /dev/null +++ b/edu_sharing_openapi/test/test_person_report.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.person_report import PersonReport + +class TestPersonReport(unittest.TestCase): + """PersonReport unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PersonReport: + """Test PersonReport + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PersonReport` + """ + model = PersonReport() + if include_optional: + return PersonReport( + options = edu_sharing_client.models.person_delete_options.PersonDeleteOptions( + cleanup_metadata = True, + home_folder = edu_sharing_client.models.home_folder_options.HomeFolderOptions( + folders = 'none', + private_files = 'none', + cc_files = 'none', + keep_folder_structure = True, ), + shared_folders = edu_sharing_client.models.shared_folder_options.SharedFolderOptions( + folders = 'none', + private_files = 'none', + cc_files = 'none', + move = True, ), + collections = edu_sharing_client.models.collection_options.CollectionOptions( + private_collections = 'none', + public_collections = 'none', ), + ratings = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + comments = edu_sharing_client.models.delete_option.DeleteOption( + delete = True, ), + collection_feedback = , + statistics = , + stream = , + receiver = '', + receiver_group = '', ), + results = [ + edu_sharing_client.models.person_delete_result.PersonDeleteResult( + authority_name = '', + deleted_name = '', + home_folder = { + 'key' : edu_sharing_client.models.counts.Counts( + elements = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], ) + }, + shared_folders = { + 'key' : edu_sharing_client.models.counts.Counts() + }, + collections = edu_sharing_client.models.collection_counts.CollectionCounts( + refs = [ + edu_sharing_client.models.element.Element( + id = '', + name = '', + type = '', ) + ], ), + comments = 56, + ratings = 56, + collection_feedback = 56, + stream = 56, ) + ] + ) + else: + return PersonReport( + ) + """ + + def testPersonReport(self): + """Test PersonReport""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_plugin_info.py b/edu_sharing_openapi/test/test_plugin_info.py new file mode 100644 index 00000000..ef6ed2ab --- /dev/null +++ b/edu_sharing_openapi/test/test_plugin_info.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.plugin_info import PluginInfo + +class TestPluginInfo(unittest.TestCase): + """PluginInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PluginInfo: + """Test PluginInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PluginInfo` + """ + model = PluginInfo() + if include_optional: + return PluginInfo( + id = '' + ) + else: + return PluginInfo( + ) + """ + + def testPluginInfo(self): + """Test PluginInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_plugin_status.py b/edu_sharing_openapi/test/test_plugin_status.py new file mode 100644 index 00000000..be4d36b1 --- /dev/null +++ b/edu_sharing_openapi/test/test_plugin_status.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.plugin_status import PluginStatus + +class TestPluginStatus(unittest.TestCase): + """PluginStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> PluginStatus: + """Test PluginStatus + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `PluginStatus` + """ + model = PluginStatus() + if include_optional: + return PluginStatus( + version = '', + name = '', + enabled = True + ) + else: + return PluginStatus( + ) + """ + + def testPluginStatus(self): + """Test PluginStatus""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_preferences.py b/edu_sharing_openapi/test/test_preferences.py new file mode 100644 index 00000000..bdf3ebda --- /dev/null +++ b/edu_sharing_openapi/test/test_preferences.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.preferences import Preferences + +class TestPreferences(unittest.TestCase): + """Preferences unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Preferences: + """Test Preferences + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Preferences` + """ + model = Preferences() + if include_optional: + return Preferences( + preferences = '' + ) + else: + return Preferences( + ) + """ + + def testPreferences(self): + """Test Preferences""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_preview.py b/edu_sharing_openapi/test/test_preview.py new file mode 100644 index 00000000..70ecc290 --- /dev/null +++ b/edu_sharing_openapi/test/test_preview.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.preview import Preview + +class TestPreview(unittest.TestCase): + """Preview unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Preview: + """Test Preview + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Preview` + """ + model = Preview() + if include_optional: + return Preview( + is_icon = True, + is_generated = True, + type = '', + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56 + ) + else: + return Preview( + is_icon = True, + url = '', + width = 56, + height = 56, + ) + """ + + def testPreview(self): + """Test Preview""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_profile.py b/edu_sharing_openapi/test/test_profile.py new file mode 100644 index 00000000..f9b4621f --- /dev/null +++ b/edu_sharing_openapi/test/test_profile.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.profile import Profile + +class TestProfile(unittest.TestCase): + """Profile unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Profile: + """Test Profile + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Profile` + """ + model = Profile() + if include_optional: + return Profile( + group_email = '', + mediacenter = edu_sharing_client.models.mediacenter_profile_extension.MediacenterProfileExtension( + id = '', + location = '', + district_abbreviation = '', + main_url = '', + catalogs = [ + edu_sharing_client.models.catalog.Catalog( + name = '', + url = '', ) + ], + content_status = 'Activated', ), + display_name = '', + group_type = '', + scope_type = '' + ) + else: + return Profile( + ) + """ + + def testProfile(self): + """Test Profile""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_profile_settings.py b/edu_sharing_openapi/test/test_profile_settings.py new file mode 100644 index 00000000..12ee010d --- /dev/null +++ b/edu_sharing_openapi/test/test_profile_settings.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.profile_settings import ProfileSettings + +class TestProfileSettings(unittest.TestCase): + """ProfileSettings unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ProfileSettings: + """Test ProfileSettings + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ProfileSettings` + """ + model = ProfileSettings() + if include_optional: + return ProfileSettings( + show_email = True + ) + else: + return ProfileSettings( + show_email = True, + ) + """ + + def testProfileSettings(self): + """Test ProfileSettings""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_propose_for_collection_event_dto.py b/edu_sharing_openapi/test/test_propose_for_collection_event_dto.py new file mode 100644 index 00000000..d99dd993 --- /dev/null +++ b/edu_sharing_openapi/test/test_propose_for_collection_event_dto.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.propose_for_collection_event_dto import ProposeForCollectionEventDTO + +class TestProposeForCollectionEventDTO(unittest.TestCase): + """ProposeForCollectionEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ProposeForCollectionEventDTO: + """Test ProposeForCollectionEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ProposeForCollectionEventDTO` + """ + model = ProposeForCollectionEventDTO() + if include_optional: + return ProposeForCollectionEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + collection = edu_sharing_client.models.collection_dto.CollectionDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ) + ) + else: + return ProposeForCollectionEventDTO( + ) + """ + + def testProposeForCollectionEventDTO(self): + """Test ProposeForCollectionEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_provider.py b/edu_sharing_openapi/test/test_provider.py new file mode 100644 index 00000000..424acf6d --- /dev/null +++ b/edu_sharing_openapi/test/test_provider.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.provider import Provider + +class TestProvider(unittest.TestCase): + """Provider unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Provider: + """Test Provider + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Provider` + """ + model = Provider() + if include_optional: + return Provider( + legal_name = '', + url = '', + email = '', + area_served = 'Organization', + location = edu_sharing_client.models.location.Location( + geo = edu_sharing_client.models.geo.Geo( + longitude = 1.337, + latitude = 1.337, + address_country = '', ), ) + ) + else: + return Provider( + ) + """ + + def testProvider(self): + """Test Provider""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_query.py b/edu_sharing_openapi/test/test_query.py new file mode 100644 index 00000000..ca554181 --- /dev/null +++ b/edu_sharing_openapi/test/test_query.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.query import Query + +class TestQuery(unittest.TestCase): + """Query unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Query: + """Test Query + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Query` + """ + model = Query() + if include_optional: + return Query( + condition = edu_sharing_client.models.condition.Condition( + type = 'TOOLPERMISSION', + negate = True, + value = '', ), + query = '' + ) + else: + return Query( + ) + """ + + def testQuery(self): + """Test Query""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rating_data.py b/edu_sharing_openapi/test/test_rating_data.py new file mode 100644 index 00000000..6888de10 --- /dev/null +++ b/edu_sharing_openapi/test/test_rating_data.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rating_data import RatingData + +class TestRatingData(unittest.TestCase): + """RatingData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RatingData: + """Test RatingData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RatingData` + """ + model = RatingData() + if include_optional: + return RatingData( + sum = 1.337, + count = 56, + rating = 1.337 + ) + else: + return RatingData( + ) + """ + + def testRatingData(self): + """Test RatingData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rating_details.py b/edu_sharing_openapi/test/test_rating_details.py new file mode 100644 index 00000000..5548405f --- /dev/null +++ b/edu_sharing_openapi/test/test_rating_details.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rating_details import RatingDetails + +class TestRatingDetails(unittest.TestCase): + """RatingDetails unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RatingDetails: + """Test RatingDetails + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RatingDetails` + """ + model = RatingDetails() + if include_optional: + return RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ) + }, + user = 1.337 + ) + else: + return RatingDetails( + ) + """ + + def testRatingDetails(self): + """Test RatingDetails""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rating_event_dto.py b/edu_sharing_openapi/test/test_rating_event_dto.py new file mode 100644 index 00000000..b425f832 --- /dev/null +++ b/edu_sharing_openapi/test/test_rating_event_dto.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rating_event_dto import RatingEventDTO + +class TestRatingEventDTO(unittest.TestCase): + """RatingEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RatingEventDTO: + """Test RatingEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RatingEventDTO` + """ + model = RatingEventDTO() + if include_optional: + return RatingEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + new_rating = 1.337, + rating_sum = 1.337, + rating_count = 56 + ) + else: + return RatingEventDTO( + ) + """ + + def testRatingEventDTO(self): + """Test RatingEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rating_history.py b/edu_sharing_openapi/test/test_rating_history.py new file mode 100644 index 00000000..9d71fab8 --- /dev/null +++ b/edu_sharing_openapi/test/test_rating_history.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rating_history import RatingHistory + +class TestRatingHistory(unittest.TestCase): + """RatingHistory unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RatingHistory: + """Test RatingHistory + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RatingHistory` + """ + model = RatingHistory() + if include_optional: + return RatingHistory( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, + rating = 1.337, ) + }, + timestamp = '' + ) + else: + return RatingHistory( + ) + """ + + def testRatingHistory(self): + """Test RatingHistory""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_ratingv1_api.py b/edu_sharing_openapi/test/test_ratingv1_api.py new file mode 100644 index 00000000..b1c67638 --- /dev/null +++ b/edu_sharing_openapi/test/test_ratingv1_api.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.ratingv1_api import RATINGV1Api + + +class TestRATINGV1Api(unittest.TestCase): + """RATINGV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = RATINGV1Api() + + def tearDown(self) -> None: + pass + + def test_add_or_update_rating(self) -> None: + """Test case for add_or_update_rating + + create or update a rating + """ + pass + + def test_delete_rating(self) -> None: + """Test case for delete_rating + + delete a comment + """ + pass + + def test_get_accumulated_ratings(self) -> None: + """Test case for get_accumulated_ratings + + get the range of nodes which had tracked actions since a given timestamp + """ + pass + + def test_get_nodes_altered_in_range(self) -> None: + """Test case for get_nodes_altered_in_range + + get the range of nodes which had tracked actions since a given timestamp + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_reference_entries.py b/edu_sharing_openapi/test/test_reference_entries.py new file mode 100644 index 00000000..cc933950 --- /dev/null +++ b/edu_sharing_openapi/test/test_reference_entries.py @@ -0,0 +1,526 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.reference_entries import ReferenceEntries + +class TestReferenceEntries(unittest.TestCase): + """ReferenceEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ReferenceEntries: + """Test ReferenceEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ReferenceEntries` + """ + model = ReferenceEntries() + if include_optional: + return ReferenceEntries( + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + references = [ + edu_sharing_client.models.collection_reference.CollectionReference( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = , + access_original = [ + '' + ], + original_restricted_access = True, + ref = , + parent = , + type = '', + aspects = , + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = , + download_url = '', + properties = , + mimetype = '', + mediatype = '', + size = '', + preview = , + icon_url = '', + collection = , + owner = , + original_id = '', + is_public = True, ) + ] + ) + else: + return ReferenceEntries( + references = [ + edu_sharing_client.models.collection_reference.CollectionReference( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = , + access_original = [ + '' + ], + original_restricted_access = True, + ref = , + parent = , + type = '', + aspects = , + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = , + download_url = '', + properties = , + mimetype = '', + mediatype = '', + size = '', + preview = , + icon_url = '', + collection = , + owner = , + original_id = '', + is_public = True, ) + ], + ) + """ + + def testReferenceEntries(self): + """Test ReferenceEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_register.py b/edu_sharing_openapi/test/test_register.py new file mode 100644 index 00000000..3df5d920 --- /dev/null +++ b/edu_sharing_openapi/test/test_register.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.register import Register + +class TestRegister(unittest.TestCase): + """Register unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Register: + """Test Register + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Register` + """ + model = Register() + if include_optional: + return Register( + local = True, + recover_password = True, + login_url = '', + recover_url = '', + required_fields = [ + '' + ] + ) + else: + return Register( + ) + """ + + def testRegister(self): + """Test Register""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_register_exists.py b/edu_sharing_openapi/test/test_register_exists.py new file mode 100644 index 00000000..61eac022 --- /dev/null +++ b/edu_sharing_openapi/test/test_register_exists.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.register_exists import RegisterExists + +class TestRegisterExists(unittest.TestCase): + """RegisterExists unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RegisterExists: + """Test RegisterExists + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RegisterExists` + """ + model = RegisterExists() + if include_optional: + return RegisterExists( + exists = True + ) + else: + return RegisterExists( + ) + """ + + def testRegisterExists(self): + """Test RegisterExists""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_register_information.py b/edu_sharing_openapi/test/test_register_information.py new file mode 100644 index 00000000..3e21330d --- /dev/null +++ b/edu_sharing_openapi/test/test_register_information.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.register_information import RegisterInformation + +class TestRegisterInformation(unittest.TestCase): + """RegisterInformation unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RegisterInformation: + """Test RegisterInformation + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RegisterInformation` + """ + model = RegisterInformation() + if include_optional: + return RegisterInformation( + vcard = '', + first_name = '', + last_name = '', + email = '', + password = '', + organization = '', + allow_notifications = True, + authority_name = '' + ) + else: + return RegisterInformation( + ) + """ + + def testRegisterInformation(self): + """Test RegisterInformation""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_registerv1_api.py b/edu_sharing_openapi/test/test_registerv1_api.py new file mode 100644 index 00000000..84c3936c --- /dev/null +++ b/edu_sharing_openapi/test/test_registerv1_api.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.registerv1_api import REGISTERV1Api + + +class TestREGISTERV1Api(unittest.TestCase): + """REGISTERV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = REGISTERV1Api() + + def tearDown(self) -> None: + pass + + def test_activate(self) -> None: + """Test case for activate + + Activate a new user (by using a supplied key) + """ + pass + + def test_mail_exists(self) -> None: + """Test case for mail_exists + + Check if the given mail is already successfully registered + """ + pass + + def test_recover_password(self) -> None: + """Test case for recover_password + + Send a mail to recover/reset password + """ + pass + + def test_register(self) -> None: + """Test case for register + + Register a new user + """ + pass + + def test_resend_mail(self) -> None: + """Test case for resend_mail + + Resend a registration mail for a given mail address + """ + pass + + def test_reset_password(self) -> None: + """Test case for reset_password + + Send a mail to recover/reset password + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_registration_url.py b/edu_sharing_openapi/test/test_registration_url.py new file mode 100644 index 00000000..3a1cee1a --- /dev/null +++ b/edu_sharing_openapi/test/test_registration_url.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.registration_url import RegistrationUrl + +class TestRegistrationUrl(unittest.TestCase): + """RegistrationUrl unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RegistrationUrl: + """Test RegistrationUrl + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RegistrationUrl` + """ + model = RegistrationUrl() + if include_optional: + return RegistrationUrl( + url = '' + ) + else: + return RegistrationUrl( + ) + """ + + def testRegistrationUrl(self): + """Test RegistrationUrl""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_relation_data.py b/edu_sharing_openapi/test/test_relation_data.py new file mode 100644 index 00000000..91d881ac --- /dev/null +++ b/edu_sharing_openapi/test/test_relation_data.py @@ -0,0 +1,319 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.relation_data import RelationData + +class TestRelationData(unittest.TestCase): + """RelationData unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RelationData: + """Test RelationData + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RelationData` + """ + model = RelationData() + if include_optional: + return RelationData( + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + creator = edu_sharing_client.models.user.User( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + home_folder = , + shared_folders = [ + + ], ), + timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + type = 'isPartOf' + ) + else: + return RelationData( + ) + """ + + def testRelationData(self): + """Test RelationData""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_relationv1_api.py b/edu_sharing_openapi/test/test_relationv1_api.py new file mode 100644 index 00000000..c0dc732c --- /dev/null +++ b/edu_sharing_openapi/test/test_relationv1_api.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.relationv1_api import RELATIONV1Api + + +class TestRELATIONV1Api(unittest.TestCase): + """RELATIONV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = RELATIONV1Api() + + def tearDown(self) -> None: + pass + + def test_create_relation(self) -> None: + """Test case for create_relation + + create a relation between nodes + """ + pass + + def test_delete_relation(self) -> None: + """Test case for delete_relation + + delete a relation between nodes + """ + pass + + def test_get_relations(self) -> None: + """Test case for get_relations + + get all relation of the node + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_remote.py b/edu_sharing_openapi/test/test_remote.py new file mode 100644 index 00000000..3169ec52 --- /dev/null +++ b/edu_sharing_openapi/test/test_remote.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.remote import Remote + +class TestRemote(unittest.TestCase): + """Remote unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Remote: + """Test Remote + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Remote` + """ + model = Remote() + if include_optional: + return Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '' + ) + else: + return Remote( + ) + """ + + def testRemote(self): + """Test Remote""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_remote_auth_description.py b/edu_sharing_openapi/test/test_remote_auth_description.py new file mode 100644 index 00000000..7e36a943 --- /dev/null +++ b/edu_sharing_openapi/test/test_remote_auth_description.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.remote_auth_description import RemoteAuthDescription + +class TestRemoteAuthDescription(unittest.TestCase): + """RemoteAuthDescription unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RemoteAuthDescription: + """Test RemoteAuthDescription + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RemoteAuthDescription` + """ + model = RemoteAuthDescription() + if include_optional: + return RemoteAuthDescription( + url = '', + token = '' + ) + else: + return RemoteAuthDescription( + ) + """ + + def testRemoteAuthDescription(self): + """Test RemoteAuthDescription""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rendering.py b/edu_sharing_openapi/test/test_rendering.py new file mode 100644 index 00000000..737f51c4 --- /dev/null +++ b/edu_sharing_openapi/test/test_rendering.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rendering import Rendering + +class TestRendering(unittest.TestCase): + """Rendering unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Rendering: + """Test Rendering + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Rendering` + """ + model = Rendering() + if include_optional: + return Rendering( + show_preview = True, + show_download_button = True, + prerender = True, + gdpr = [ + edu_sharing_client.models.rendering_gdpr.RenderingGdpr( + matcher = '', + name = '', + privacy_information_url = '', ) + ] + ) + else: + return Rendering( + ) + """ + + def testRendering(self): + """Test Rendering""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rendering_details_entry.py b/edu_sharing_openapi/test/test_rendering_details_entry.py new file mode 100644 index 00000000..81006448 --- /dev/null +++ b/edu_sharing_openapi/test/test_rendering_details_entry.py @@ -0,0 +1,460 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rendering_details_entry import RenderingDetailsEntry + +class TestRenderingDetailsEntry(unittest.TestCase): + """RenderingDetailsEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RenderingDetailsEntry: + """Test RenderingDetailsEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RenderingDetailsEntry` + """ + model = RenderingDetailsEntry() + if include_optional: + return RenderingDetailsEntry( + details_snippet = '', + mime_type = '', + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ) + else: + return RenderingDetailsEntry( + details_snippet = '', + mime_type = '', + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + ) + """ + + def testRenderingDetailsEntry(self): + """Test RenderingDetailsEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_rendering_gdpr.py b/edu_sharing_openapi/test/test_rendering_gdpr.py new file mode 100644 index 00000000..800d1491 --- /dev/null +++ b/edu_sharing_openapi/test/test_rendering_gdpr.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.rendering_gdpr import RenderingGdpr + +class TestRenderingGdpr(unittest.TestCase): + """RenderingGdpr unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RenderingGdpr: + """Test RenderingGdpr + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RenderingGdpr` + """ + model = RenderingGdpr() + if include_optional: + return RenderingGdpr( + matcher = '', + name = '', + privacy_information_url = '' + ) + else: + return RenderingGdpr( + ) + """ + + def testRenderingGdpr(self): + """Test RenderingGdpr""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_renderingv1_api.py b/edu_sharing_openapi/test/test_renderingv1_api.py new file mode 100644 index 00000000..66a303c2 --- /dev/null +++ b/edu_sharing_openapi/test/test_renderingv1_api.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.renderingv1_api import RENDERINGV1Api + + +class TestRENDERINGV1Api(unittest.TestCase): + """RENDERINGV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = RENDERINGV1Api() + + def tearDown(self) -> None: + pass + + def test_get_details_snippet1(self) -> None: + """Test case for get_details_snippet1 + + Get metadata of node. + """ + pass + + def test_get_details_snippet_with_parameters(self) -> None: + """Test case for get_details_snippet_with_parameters + + Get metadata of node. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_repo.py b/edu_sharing_openapi/test/test_repo.py new file mode 100644 index 00000000..b9bab59b --- /dev/null +++ b/edu_sharing_openapi/test/test_repo.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.repo import Repo + +class TestRepo(unittest.TestCase): + """Repo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Repo: + """Test Repo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Repo` + """ + model = Repo() + if include_optional: + return Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True + ) + else: + return Repo( + ) + """ + + def testRepo(self): + """Test Repo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_repo_entries.py b/edu_sharing_openapi/test/test_repo_entries.py new file mode 100644 index 00000000..6799987b --- /dev/null +++ b/edu_sharing_openapi/test/test_repo_entries.py @@ -0,0 +1,70 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.repo_entries import RepoEntries + +class TestRepoEntries(unittest.TestCase): + """RepoEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RepoEntries: + """Test RepoEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RepoEntries` + """ + model = RepoEntries() + if include_optional: + return RepoEntries( + repositories = [ + edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ) + ] + ) + else: + return RepoEntries( + repositories = [ + edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ) + ], + ) + """ + + def testRepoEntries(self): + """Test RepoEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_repository_config.py b/edu_sharing_openapi/test/test_repository_config.py new file mode 100644 index 00000000..7aaff05c --- /dev/null +++ b/edu_sharing_openapi/test/test_repository_config.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.repository_config import RepositoryConfig + +class TestRepositoryConfig(unittest.TestCase): + """RepositoryConfig unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RepositoryConfig: + """Test RepositoryConfig + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RepositoryConfig` + """ + model = RepositoryConfig() + if include_optional: + return RepositoryConfig( + frontpage = edu_sharing_client.models.frontpage.Frontpage( + total_count = 56, + display_count = 56, + mode = 'collection', + timespan = 56, + timespan_all = True, + queries = [ + edu_sharing_client.models.query.Query( + condition = edu_sharing_client.models.condition.Condition( + type = 'TOOLPERMISSION', + negate = True, + value = '', ), + query = '', ) + ], + collection = '', ) + ) + else: + return RepositoryConfig( + ) + """ + + def testRepositoryConfig(self): + """Test RepositoryConfig""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_repository_version_info.py b/edu_sharing_openapi/test/test_repository_version_info.py new file mode 100644 index 00000000..7c990b65 --- /dev/null +++ b/edu_sharing_openapi/test/test_repository_version_info.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.repository_version_info import RepositoryVersionInfo + +class TestRepositoryVersionInfo(unittest.TestCase): + """RepositoryVersionInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RepositoryVersionInfo: + """Test RepositoryVersionInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RepositoryVersionInfo` + """ + model = RepositoryVersionInfo() + if include_optional: + return RepositoryVersionInfo( + version = edu_sharing_client.models.version.Version( + full = '', + major = '', + minor = '', + patch = '', + qualifier = '', + build = '', ), + maven = edu_sharing_client.models.version_maven.VersionMaven( + bom = { + 'key' : '' + }, + project = edu_sharing_client.models.version_project.VersionProject( + artifact_id = '', + group_id = '', + version = '', ), ), + git = edu_sharing_client.models.version_git.VersionGit( + branch = '', + commit = edu_sharing_client.models.version_git_commit.VersionGitCommit( + id = '', + timestamp = edu_sharing_client.models.version_timestamp.VersionTimestamp( + datetime = '', ), ), ), + build = edu_sharing_client.models.version_build.VersionBuild( + timestamp = '', ) + ) + else: + return RepositoryVersionInfo( + ) + """ + + def testRepositoryVersionInfo(self): + """Test RepositoryVersionInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_restore_result.py b/edu_sharing_openapi/test/test_restore_result.py new file mode 100644 index 00000000..fa362a0e --- /dev/null +++ b/edu_sharing_openapi/test/test_restore_result.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.restore_result import RestoreResult + +class TestRestoreResult(unittest.TestCase): + """RestoreResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RestoreResult: + """Test RestoreResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RestoreResult` + """ + model = RestoreResult() + if include_optional: + return RestoreResult( + archive_node_id = '', + node_id = '', + parent = '', + path = '', + name = '', + restore_status = '' + ) + else: + return RestoreResult( + archive_node_id = '', + node_id = '', + parent = '', + path = '', + name = '', + restore_status = '', + ) + """ + + def testRestoreResult(self): + """Test RestoreResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_restore_results.py b/edu_sharing_openapi/test/test_restore_results.py new file mode 100644 index 00000000..90ec4850 --- /dev/null +++ b/edu_sharing_openapi/test/test_restore_results.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.restore_results import RestoreResults + +class TestRestoreResults(unittest.TestCase): + """RestoreResults unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> RestoreResults: + """Test RestoreResults + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `RestoreResults` + """ + model = RestoreResults() + if include_optional: + return RestoreResults( + results = [ + edu_sharing_client.models.restore_result.RestoreResult( + archive_node_id = '', + node_id = '', + parent = '', + path = '', + name = '', + restore_status = '', ) + ] + ) + else: + return RestoreResults( + results = [ + edu_sharing_client.models.restore_result.RestoreResult( + archive_node_id = '', + node_id = '', + parent = '', + path = '', + name = '', + restore_status = '', ) + ], + ) + """ + + def testRestoreResults(self): + """Test RestoreResults""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_parameters.py b/edu_sharing_openapi/test/test_search_parameters.py new file mode 100644 index 00000000..5877c463 --- /dev/null +++ b/edu_sharing_openapi/test/test_search_parameters.py @@ -0,0 +1,79 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_parameters import SearchParameters + +class TestSearchParameters(unittest.TestCase): + """SearchParameters unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchParameters: + """Test SearchParameters + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchParameters` + """ + model = SearchParameters() + if include_optional: + return SearchParameters( + permissions = [ + '' + ], + resolve_collections = True, + resolve_usernames = True, + return_suggestions = True, + excludes = [ + '' + ], + facets = [ + '' + ], + facet_min_count = 56, + facet_limit = 56, + facet_suggest = '', + criteria = [ + edu_sharing_client.models.mds_query_criteria.MdsQueryCriteria( + property = '', + values = [ + '' + ], ) + ] + ) + else: + return SearchParameters( + criteria = [ + edu_sharing_client.models.mds_query_criteria.MdsQueryCriteria( + property = '', + values = [ + '' + ], ) + ], + ) + """ + + def testSearchParameters(self): + """Test SearchParameters""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_parameters_facets.py b/edu_sharing_openapi/test/test_search_parameters_facets.py new file mode 100644 index 00000000..d9e5aa8c --- /dev/null +++ b/edu_sharing_openapi/test/test_search_parameters_facets.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_parameters_facets import SearchParametersFacets + +class TestSearchParametersFacets(unittest.TestCase): + """SearchParametersFacets unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchParametersFacets: + """Test SearchParametersFacets + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchParametersFacets` + """ + model = SearchParametersFacets() + if include_optional: + return SearchParametersFacets( + facets = [ + '' + ], + facet_min_count = 56, + facet_limit = 56, + facet_suggest = '', + criteria = [ + edu_sharing_client.models.mds_query_criteria.MdsQueryCriteria( + property = '', + values = [ + '' + ], ) + ] + ) + else: + return SearchParametersFacets( + facets = [ + '' + ], + criteria = [ + edu_sharing_client.models.mds_query_criteria.MdsQueryCriteria( + property = '', + values = [ + '' + ], ) + ], + ) + """ + + def testSearchParametersFacets(self): + """Test SearchParametersFacets""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_result.py b/edu_sharing_openapi/test/test_search_result.py new file mode 100644 index 00000000..ce4ef0fc --- /dev/null +++ b/edu_sharing_openapi/test/test_search_result.py @@ -0,0 +1,488 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_result import SearchResult + +class TestSearchResult(unittest.TestCase): + """SearchResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchResult: + """Test SearchResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchResult` + """ + model = SearchResult() + if include_optional: + return SearchResult( + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ] + ) + else: + return SearchResult( + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ) + """ + + def testSearchResult(self): + """Test SearchResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_result_elastic.py b/edu_sharing_openapi/test/test_search_result_elastic.py new file mode 100644 index 00000000..d67f8c97 --- /dev/null +++ b/edu_sharing_openapi/test/test_search_result_elastic.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_result_elastic import SearchResultElastic + +class TestSearchResultElastic(unittest.TestCase): + """SearchResultElastic unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchResultElastic: + """Test SearchResultElastic + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchResultElastic` + """ + model = SearchResultElastic() + if include_optional: + return SearchResultElastic( + suggests = [ + edu_sharing_client.models.suggest.Suggest( + text = '', + highlighted = '', + score = 1.337, ) + ], + elastic_response = '', + nodes = [ + None + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ignored = [ + '' + ] + ) + else: + return SearchResultElastic( + nodes = [ + None + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ) + """ + + def testSearchResultElastic(self): + """Test SearchResultElastic""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_result_lrmi.py b/edu_sharing_openapi/test/test_search_result_lrmi.py new file mode 100644 index 00000000..3e87f910 --- /dev/null +++ b/edu_sharing_openapi/test/test_search_result_lrmi.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_result_lrmi import SearchResultLrmi + +class TestSearchResultLrmi(unittest.TestCase): + """SearchResultLrmi unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchResultLrmi: + """Test SearchResultLrmi + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchResultLrmi` + """ + model = SearchResultLrmi() + if include_optional: + return SearchResultLrmi( + suggests = [ + edu_sharing_client.models.suggest.Suggest( + text = '', + highlighted = '', + score = 1.337, ) + ], + nodes = [ + '' + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ignored = [ + '' + ] + ) + else: + return SearchResultLrmi( + nodes = [ + '' + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ) + """ + + def testSearchResultLrmi(self): + """Test SearchResultLrmi""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_result_node.py b/edu_sharing_openapi/test/test_search_result_node.py new file mode 100644 index 00000000..4b161efd --- /dev/null +++ b/edu_sharing_openapi/test/test_search_result_node.py @@ -0,0 +1,497 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_result_node import SearchResultNode + +class TestSearchResultNode(unittest.TestCase): + """SearchResultNode unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchResultNode: + """Test SearchResultNode + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchResultNode` + """ + model = SearchResultNode() + if include_optional: + return SearchResultNode( + suggests = [ + edu_sharing_client.models.suggest.Suggest( + text = '', + highlighted = '', + score = 1.337, ) + ], + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ignored = [ + '' + ] + ) + else: + return SearchResultNode( + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + facets = [ + edu_sharing_client.models.facet.Facet( + property = '', + values = [ + edu_sharing_client.models.value.Value( + value = '', + count = 56, ) + ], + sum_other_doc_count = 56, ) + ], + ) + """ + + def testSearchResultNode(self): + """Test SearchResultNode""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_search_v_card.py b/edu_sharing_openapi/test/test_search_v_card.py new file mode 100644 index 00000000..37ca0d31 --- /dev/null +++ b/edu_sharing_openapi/test/test_search_v_card.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.search_v_card import SearchVCard + +class TestSearchVCard(unittest.TestCase): + """SearchVCard unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SearchVCard: + """Test SearchVCard + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SearchVCard` + """ + model = SearchVCard() + if include_optional: + return SearchVCard( + vcard = '' + ) + else: + return SearchVCard( + ) + """ + + def testSearchVCard(self): + """Test SearchVCard""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_searchv1_api.py b/edu_sharing_openapi/test/test_searchv1_api.py new file mode 100644 index 00000000..399775a5 --- /dev/null +++ b/edu_sharing_openapi/test/test_searchv1_api.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.searchv1_api import SEARCHV1Api + + +class TestSEARCHV1Api(unittest.TestCase): + """SEARCHV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = SEARCHV1Api() + + def tearDown(self) -> None: + pass + + def test_get_metdata(self) -> None: + """Test case for get_metdata + + get nodes with metadata and collections + """ + pass + + def test_get_relevant_nodes(self) -> None: + """Test case for get_relevant_nodes + + Get relevant nodes for the current user + """ + pass + + def test_load_save_search(self) -> None: + """Test case for load_save_search + + Load a saved search query. + """ + pass + + def test_save_search(self) -> None: + """Test case for save_search + + Save a search query. + """ + pass + + def test_search(self) -> None: + """Test case for search + + Perform queries based on metadata sets. + """ + pass + + def test_search_by_property(self) -> None: + """Test case for search_by_property + + Search for custom properties with custom values + """ + pass + + def test_search_contributor(self) -> None: + """Test case for search_contributor + + Search for contributors + """ + pass + + def test_search_facets(self) -> None: + """Test case for search_facets + + Search in facets. + """ + pass + + def test_search_fingerprint(self) -> None: + """Test case for search_fingerprint + + Perform queries based on metadata sets. + """ + pass + + def test_search_lrmi(self) -> None: + """Test case for search_lrmi + + Perform queries based on metadata sets. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_server_update_info.py b/edu_sharing_openapi/test/test_server_update_info.py new file mode 100644 index 00000000..3f5f6bee --- /dev/null +++ b/edu_sharing_openapi/test/test_server_update_info.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.server_update_info import ServerUpdateInfo + +class TestServerUpdateInfo(unittest.TestCase): + """ServerUpdateInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ServerUpdateInfo: + """Test ServerUpdateInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ServerUpdateInfo` + """ + model = ServerUpdateInfo() + if include_optional: + return ServerUpdateInfo( + id = '', + description = '', + order = 56, + auto = True, + testable = True, + executed_at = 56 + ) + else: + return ServerUpdateInfo( + ) + """ + + def testServerUpdateInfo(self): + """Test ServerUpdateInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_service.py b/edu_sharing_openapi/test/test_service.py new file mode 100644 index 00000000..04e15355 --- /dev/null +++ b/edu_sharing_openapi/test/test_service.py @@ -0,0 +1,85 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.service import Service + +class TestService(unittest.TestCase): + """Service unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Service: + """Test Service + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Service` + """ + model = Service() + if include_optional: + return Service( + name = '', + url = '', + icon = '', + logo = '', + in_language = '', + type = '', + description = '', + audience = [ + edu_sharing_client.models.audience.Audience( + name = '', ) + ], + provider = edu_sharing_client.models.provider.Provider( + legal_name = '', + url = '', + email = '', + area_served = 'Organization', + location = edu_sharing_client.models.location.Location( + geo = edu_sharing_client.models.geo.Geo( + longitude = 1.337, + latitude = 1.337, + address_country = '', ), ), ), + start_date = '', + interfaces = [ + edu_sharing_client.models.interface.Interface( + url = '', + set = '', + metadata_prefix = '', + documentation = '', + format = 'Json', + type = 'Search', ) + ], + about = [ + '' + ], + is_accessible_for_free = True + ) + else: + return Service( + ) + """ + + def testService(self): + """Test Service""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_service_instance.py b/edu_sharing_openapi/test/test_service_instance.py new file mode 100644 index 00000000..f38deb53 --- /dev/null +++ b/edu_sharing_openapi/test/test_service_instance.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.service_instance import ServiceInstance + +class TestServiceInstance(unittest.TestCase): + """ServiceInstance unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ServiceInstance: + """Test ServiceInstance + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ServiceInstance` + """ + model = ServiceInstance() + if include_optional: + return ServiceInstance( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + endpoint = '' + ) + else: + return ServiceInstance( + version = edu_sharing_client.models.service_version.ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56, ), + endpoint = '', + ) + """ + + def testServiceInstance(self): + """Test ServiceInstance""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_service_version.py b/edu_sharing_openapi/test/test_service_version.py new file mode 100644 index 00000000..e8ff4af6 --- /dev/null +++ b/edu_sharing_openapi/test/test_service_version.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.service_version import ServiceVersion + +class TestServiceVersion(unittest.TestCase): + """ServiceVersion unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ServiceVersion: + """Test ServiceVersion + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ServiceVersion` + """ + model = ServiceVersion() + if include_optional: + return ServiceVersion( + repository = '', + renderservice = '', + major = 56, + minor = 56 + ) + else: + return ServiceVersion( + major = 56, + minor = 56, + ) + """ + + def testServiceVersion(self): + """Test ServiceVersion""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_services.py b/edu_sharing_openapi/test/test_services.py new file mode 100644 index 00000000..da6a2bc4 --- /dev/null +++ b/edu_sharing_openapi/test/test_services.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.services import Services + +class TestServices(unittest.TestCase): + """Services unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Services: + """Test Services + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Services` + """ + model = Services() + if include_optional: + return Services( + visualization = '' + ) + else: + return Services( + ) + """ + + def testServices(self): + """Test Services""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_shared_folder_options.py b/edu_sharing_openapi/test/test_shared_folder_options.py new file mode 100644 index 00000000..4a786e7b --- /dev/null +++ b/edu_sharing_openapi/test/test_shared_folder_options.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.shared_folder_options import SharedFolderOptions + +class TestSharedFolderOptions(unittest.TestCase): + """SharedFolderOptions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SharedFolderOptions: + """Test SharedFolderOptions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SharedFolderOptions` + """ + model = SharedFolderOptions() + if include_optional: + return SharedFolderOptions( + folders = 'none', + private_files = 'none', + cc_files = 'none', + move = True + ) + else: + return SharedFolderOptions( + ) + """ + + def testSharedFolderOptions(self): + """Test SharedFolderOptions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_sharing_info.py b/edu_sharing_openapi/test/test_sharing_info.py new file mode 100644 index 00000000..edf34795 --- /dev/null +++ b/edu_sharing_openapi/test/test_sharing_info.py @@ -0,0 +1,277 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.sharing_info import SharingInfo + +class TestSharingInfo(unittest.TestCase): + """SharingInfo unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SharingInfo: + """Test SharingInfo + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SharingInfo` + """ + model = SharingInfo() + if include_optional: + return SharingInfo( + password_matches = True, + password = True, + expired = True, + invited_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ) + else: + return SharingInfo( + ) + """ + + def testSharingInfo(self): + """Test SharingInfo""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_sharingv1_api.py b/edu_sharing_openapi/test/test_sharingv1_api.py new file mode 100644 index 00000000..908884c0 --- /dev/null +++ b/edu_sharing_openapi/test/test_sharingv1_api.py @@ -0,0 +1,45 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.sharingv1_api import SHARINGV1Api + + +class TestSHARINGV1Api(unittest.TestCase): + """SHARINGV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = SHARINGV1Api() + + def tearDown(self) -> None: + pass + + def test_get_children1(self) -> None: + """Test case for get_children1 + + Get all children of this share. + """ + pass + + def test_get_info(self) -> None: + """Test case for get_info + + Get general info of a share. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_simple_edit.py b/edu_sharing_openapi/test/test_simple_edit.py new file mode 100644 index 00000000..a6c05f6a --- /dev/null +++ b/edu_sharing_openapi/test/test_simple_edit.py @@ -0,0 +1,65 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.simple_edit import SimpleEdit + +class TestSimpleEdit(unittest.TestCase): + """SimpleEdit unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SimpleEdit: + """Test SimpleEdit + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SimpleEdit` + """ + model = SimpleEdit() + if include_optional: + return SimpleEdit( + global_groups = [ + edu_sharing_client.models.simple_edit_global_groups.SimpleEditGlobalGroups( + toolpermission = '', + groups = [ + '' + ], ) + ], + organization = edu_sharing_client.models.simple_edit_organization.SimpleEditOrganization( + group_types = [ + '' + ], ), + organization_filter = '', + licenses = [ + '' + ] + ) + else: + return SimpleEdit( + ) + """ + + def testSimpleEdit(self): + """Test SimpleEdit""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_simple_edit_global_groups.py b/edu_sharing_openapi/test/test_simple_edit_global_groups.py new file mode 100644 index 00000000..a9a7b39a --- /dev/null +++ b/edu_sharing_openapi/test/test_simple_edit_global_groups.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.simple_edit_global_groups import SimpleEditGlobalGroups + +class TestSimpleEditGlobalGroups(unittest.TestCase): + """SimpleEditGlobalGroups unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SimpleEditGlobalGroups: + """Test SimpleEditGlobalGroups + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SimpleEditGlobalGroups` + """ + model = SimpleEditGlobalGroups() + if include_optional: + return SimpleEditGlobalGroups( + toolpermission = '', + groups = [ + '' + ] + ) + else: + return SimpleEditGlobalGroups( + ) + """ + + def testSimpleEditGlobalGroups(self): + """Test SimpleEditGlobalGroups""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_simple_edit_organization.py b/edu_sharing_openapi/test/test_simple_edit_organization.py new file mode 100644 index 00000000..ecf7836f --- /dev/null +++ b/edu_sharing_openapi/test/test_simple_edit_organization.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.simple_edit_organization import SimpleEditOrganization + +class TestSimpleEditOrganization(unittest.TestCase): + """SimpleEditOrganization unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SimpleEditOrganization: + """Test SimpleEditOrganization + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SimpleEditOrganization` + """ + model = SimpleEditOrganization() + if include_optional: + return SimpleEditOrganization( + group_types = [ + '' + ] + ) + else: + return SimpleEditOrganization( + ) + """ + + def testSimpleEditOrganization(self): + """Test SimpleEditOrganization""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_sort.py b/edu_sharing_openapi/test/test_sort.py new file mode 100644 index 00000000..5c52d0ed --- /dev/null +++ b/edu_sharing_openapi/test/test_sort.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.sort import Sort + +class TestSort(unittest.TestCase): + """Sort unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Sort: + """Test Sort + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Sort` + """ + model = Sort() + if include_optional: + return Sort( + sorted = True, + empty = True, + unsorted = True + ) + else: + return Sort( + ) + """ + + def testSort(self): + """Test Sort""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistic_entity.py b/edu_sharing_openapi/test/test_statistic_entity.py new file mode 100644 index 00000000..0147355f --- /dev/null +++ b/edu_sharing_openapi/test/test_statistic_entity.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistic_entity import StatisticEntity + +class TestStatisticEntity(unittest.TestCase): + """StatisticEntity unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticEntity: + """Test StatisticEntity + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticEntity` + """ + model = StatisticEntity() + if include_optional: + return StatisticEntity( + value = '', + count = 56 + ) + else: + return StatisticEntity( + value = '', + count = 56, + ) + """ + + def testStatisticEntity(self): + """Test StatisticEntity""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistic_entry.py b/edu_sharing_openapi/test/test_statistic_entry.py new file mode 100644 index 00000000..d963d51e --- /dev/null +++ b/edu_sharing_openapi/test/test_statistic_entry.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistic_entry import StatisticEntry + +class TestStatisticEntry(unittest.TestCase): + """StatisticEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticEntry: + """Test StatisticEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticEntry` + """ + model = StatisticEntry() + if include_optional: + return StatisticEntry( + var_property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ] + ) + else: + return StatisticEntry( + var_property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], + ) + """ + + def testStatisticEntry(self): + """Test StatisticEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistics.py b/edu_sharing_openapi/test/test_statistics.py new file mode 100644 index 00000000..2f129138 --- /dev/null +++ b/edu_sharing_openapi/test/test_statistics.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistics import Statistics + +class TestStatistics(unittest.TestCase): + """Statistics unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Statistics: + """Test Statistics + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Statistics` + """ + model = Statistics() + if include_optional: + return Statistics( + entries = [ + edu_sharing_client.models.statistic_entry.StatisticEntry( + property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], ) + ] + ) + else: + return Statistics( + entries = [ + edu_sharing_client.models.statistic_entry.StatisticEntry( + property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], ) + ], + ) + """ + + def testStatistics(self): + """Test Statistics""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistics_global.py b/edu_sharing_openapi/test/test_statistics_global.py new file mode 100644 index 00000000..8ef190b7 --- /dev/null +++ b/edu_sharing_openapi/test/test_statistics_global.py @@ -0,0 +1,78 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistics_global import StatisticsGlobal + +class TestStatisticsGlobal(unittest.TestCase): + """StatisticsGlobal unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticsGlobal: + """Test StatisticsGlobal + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticsGlobal` + """ + model = StatisticsGlobal() + if include_optional: + return StatisticsGlobal( + overall = edu_sharing_client.models.statistics_group.StatisticsGroup( + count = 56, + sub_groups = [ + edu_sharing_client.models.statistics_sub_group.StatisticsSubGroup( + id = '', + count = [ + edu_sharing_client.models.sub_group_item.SubGroupItem( + key = '', + display_name = '', ) + ], ) + ], ), + groups = [ + edu_sharing_client.models.statistics_key_group.StatisticsKeyGroup( + key = '', + display_name = '', + count = 56, + sub_groups = [ + edu_sharing_client.models.statistics_sub_group.StatisticsSubGroup( + id = '', + count = [ + edu_sharing_client.models.sub_group_item.SubGroupItem( + key = '', + display_name = '', ) + ], ) + ], ) + ], + user = edu_sharing_client.models.statistics_user.StatisticsUser( + count = 56, ) + ) + else: + return StatisticsGlobal( + ) + """ + + def testStatisticsGlobal(self): + """Test StatisticsGlobal""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistics_group.py b/edu_sharing_openapi/test/test_statistics_group.py new file mode 100644 index 00000000..51bb332d --- /dev/null +++ b/edu_sharing_openapi/test/test_statistics_group.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistics_group import StatisticsGroup + +class TestStatisticsGroup(unittest.TestCase): + """StatisticsGroup unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticsGroup: + """Test StatisticsGroup + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticsGroup` + """ + model = StatisticsGroup() + if include_optional: + return StatisticsGroup( + count = 56, + sub_groups = [ + edu_sharing_client.models.statistics_sub_group.StatisticsSubGroup( + id = '', + count = [ + edu_sharing_client.models.sub_group_item.SubGroupItem( + key = '', + display_name = '', ) + ], ) + ] + ) + else: + return StatisticsGroup( + ) + """ + + def testStatisticsGroup(self): + """Test StatisticsGroup""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistics_key_group.py b/edu_sharing_openapi/test/test_statistics_key_group.py new file mode 100644 index 00000000..ae676690 --- /dev/null +++ b/edu_sharing_openapi/test/test_statistics_key_group.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistics_key_group import StatisticsKeyGroup + +class TestStatisticsKeyGroup(unittest.TestCase): + """StatisticsKeyGroup unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticsKeyGroup: + """Test StatisticsKeyGroup + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticsKeyGroup` + """ + model = StatisticsKeyGroup() + if include_optional: + return StatisticsKeyGroup( + key = '', + display_name = '', + count = 56, + sub_groups = [ + edu_sharing_client.models.statistics_sub_group.StatisticsSubGroup( + id = '', + count = [ + edu_sharing_client.models.sub_group_item.SubGroupItem( + key = '', + display_name = '', ) + ], ) + ] + ) + else: + return StatisticsKeyGroup( + ) + """ + + def testStatisticsKeyGroup(self): + """Test StatisticsKeyGroup""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistics_sub_group.py b/edu_sharing_openapi/test/test_statistics_sub_group.py new file mode 100644 index 00000000..28d48ee8 --- /dev/null +++ b/edu_sharing_openapi/test/test_statistics_sub_group.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistics_sub_group import StatisticsSubGroup + +class TestStatisticsSubGroup(unittest.TestCase): + """StatisticsSubGroup unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticsSubGroup: + """Test StatisticsSubGroup + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticsSubGroup` + """ + model = StatisticsSubGroup() + if include_optional: + return StatisticsSubGroup( + id = '', + count = [ + edu_sharing_client.models.sub_group_item.SubGroupItem( + key = '', + display_name = '', ) + ] + ) + else: + return StatisticsSubGroup( + ) + """ + + def testStatisticsSubGroup(self): + """Test StatisticsSubGroup""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statistics_user.py b/edu_sharing_openapi/test/test_statistics_user.py new file mode 100644 index 00000000..b682d56f --- /dev/null +++ b/edu_sharing_openapi/test/test_statistics_user.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.statistics_user import StatisticsUser + +class TestStatisticsUser(unittest.TestCase): + """StatisticsUser unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StatisticsUser: + """Test StatisticsUser + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StatisticsUser` + """ + model = StatisticsUser() + if include_optional: + return StatisticsUser( + count = 56 + ) + else: + return StatisticsUser( + ) + """ + + def testStatisticsUser(self): + """Test StatisticsUser""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_statisticv1_api.py b/edu_sharing_openapi/test/test_statisticv1_api.py new file mode 100644 index 00000000..5b2948bd --- /dev/null +++ b/edu_sharing_openapi/test/test_statisticv1_api.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.statisticv1_api import STATISTICV1Api + + +class TestSTATISTICV1Api(unittest.TestCase): + """STATISTICV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = STATISTICV1Api() + + def tearDown(self) -> None: + pass + + def test_get(self) -> None: + """Test case for get + + Get statistics of repository. + """ + pass + + def test_get_global_statistics(self) -> None: + """Test case for get_global_statistics + + Get stats. + """ + pass + + def test_get_node_data(self) -> None: + """Test case for get_node_data + + get the range of nodes which had tracked actions since a given timestamp + """ + pass + + def test_get_nodes_altered_in_range1(self) -> None: + """Test case for get_nodes_altered_in_range1 + + get the range of nodes which had tracked actions since a given timestamp + """ + pass + + def test_get_statistics_node(self) -> None: + """Test case for get_statistics_node + + get statistics for node actions + """ + pass + + def test_get_statistics_user(self) -> None: + """Test case for get_statistics_user + + get statistics for user actions (login, logout) + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_stored_service.py b/edu_sharing_openapi/test/test_stored_service.py new file mode 100644 index 00000000..16586f09 --- /dev/null +++ b/edu_sharing_openapi/test/test_stored_service.py @@ -0,0 +1,86 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.stored_service import StoredService + +class TestStoredService(unittest.TestCase): + """StoredService unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StoredService: + """Test StoredService + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StoredService` + """ + model = StoredService() + if include_optional: + return StoredService( + name = '', + url = '', + icon = '', + logo = '', + in_language = '', + type = '', + description = '', + audience = [ + edu_sharing_client.models.audience.Audience( + name = '', ) + ], + provider = edu_sharing_client.models.provider.Provider( + legal_name = '', + url = '', + email = '', + area_served = 'Organization', + location = edu_sharing_client.models.location.Location( + geo = edu_sharing_client.models.geo.Geo( + longitude = 1.337, + latitude = 1.337, + address_country = '', ), ), ), + start_date = '', + interfaces = [ + edu_sharing_client.models.interface.Interface( + url = '', + set = '', + metadata_prefix = '', + documentation = '', + format = 'Json', + type = 'Search', ) + ], + about = [ + '' + ], + id = '', + is_accessible_for_free = True + ) + else: + return StoredService( + ) + """ + + def testStoredService(self): + """Test StoredService""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_stream.py b/edu_sharing_openapi/test/test_stream.py new file mode 100644 index 00000000..c6b0da23 --- /dev/null +++ b/edu_sharing_openapi/test/test_stream.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.stream import Stream + +class TestStream(unittest.TestCase): + """Stream unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Stream: + """Test Stream + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Stream` + """ + model = Stream() + if include_optional: + return Stream( + enabled = True + ) + else: + return Stream( + ) + """ + + def testStream(self): + """Test Stream""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_stream_entry.py b/edu_sharing_openapi/test/test_stream_entry.py new file mode 100644 index 00000000..7fbe6875 --- /dev/null +++ b/edu_sharing_openapi/test/test_stream_entry.py @@ -0,0 +1,319 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.stream_entry import StreamEntry + +class TestStreamEntry(unittest.TestCase): + """StreamEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StreamEntry: + """Test StreamEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StreamEntry` + """ + model = StreamEntry() + if include_optional: + return StreamEntry( + id = '', + description = '', + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + properties = { + 'key' : None + }, + priority = 56, + author = edu_sharing_client.models.user_simple.UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), ), + created = 56, + modified = 56 + ) + else: + return StreamEntry( + ) + """ + + def testStreamEntry(self): + """Test StreamEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_stream_entry_input.py b/edu_sharing_openapi/test/test_stream_entry_input.py new file mode 100644 index 00000000..6c0ec268 --- /dev/null +++ b/edu_sharing_openapi/test/test_stream_entry_input.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.stream_entry_input import StreamEntryInput + +class TestStreamEntryInput(unittest.TestCase): + """StreamEntryInput unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StreamEntryInput: + """Test StreamEntryInput + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StreamEntryInput` + """ + model = StreamEntryInput() + if include_optional: + return StreamEntryInput( + id = '', + title = '', + description = '', + nodes = [ + '' + ], + properties = { + 'key' : None + }, + priority = 56 + ) + else: + return StreamEntryInput( + ) + """ + + def testStreamEntryInput(self): + """Test StreamEntryInput""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_stream_list.py b/edu_sharing_openapi/test/test_stream_list.py new file mode 100644 index 00000000..4485939b --- /dev/null +++ b/edu_sharing_openapi/test/test_stream_list.py @@ -0,0 +1,287 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.stream_list import StreamList + +class TestStreamList(unittest.TestCase): + """StreamList unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> StreamList: + """Test StreamList + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `StreamList` + """ + model = StreamList() + if include_optional: + return StreamList( + stream = [ + edu_sharing_client.models.stream_entry.StreamEntry( + id = '', + description = '', + nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + properties = { + 'key' : None + }, + priority = 56, + author = edu_sharing_client.models.user_simple.UserSimple( + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + administration_access = True, + shared_folder = , ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', ), + created = 56, + modified = 56, ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return StreamList( + ) + """ + + def testStreamList(self): + """Test StreamList""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_streamv1_api.py b/edu_sharing_openapi/test/test_streamv1_api.py new file mode 100644 index 00000000..77c83c91 --- /dev/null +++ b/edu_sharing_openapi/test/test_streamv1_api.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.streamv1_api import STREAMV1Api + + +class TestSTREAMV1Api(unittest.TestCase): + """STREAMV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = STREAMV1Api() + + def tearDown(self) -> None: + pass + + def test_add_entry(self) -> None: + """Test case for add_entry + + add a new stream object. + """ + pass + + def test_can_access(self) -> None: + """Test case for can_access + + test + """ + pass + + def test_delete_entry(self) -> None: + """Test case for delete_entry + + delete a stream object + """ + pass + + def test_get_property_values(self) -> None: + """Test case for get_property_values + + Get top values for a property + """ + pass + + def test_search1(self) -> None: + """Test case for search1 + + Get the stream content for the current user with the given status. + """ + pass + + def test_update_entry(self) -> None: + """Test case for update_entry + + update status for a stream object and authority + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_sub_group_item.py b/edu_sharing_openapi/test/test_sub_group_item.py new file mode 100644 index 00000000..58ad1dce --- /dev/null +++ b/edu_sharing_openapi/test/test_sub_group_item.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.sub_group_item import SubGroupItem + +class TestSubGroupItem(unittest.TestCase): + """SubGroupItem unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SubGroupItem: + """Test SubGroupItem + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SubGroupItem` + """ + model = SubGroupItem() + if include_optional: + return SubGroupItem( + key = '', + display_name = '', + count = 56 + ) + else: + return SubGroupItem( + ) + """ + + def testSubGroupItem(self): + """Test SubGroupItem""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_suggest.py b/edu_sharing_openapi/test/test_suggest.py new file mode 100644 index 00000000..e5e5f8ef --- /dev/null +++ b/edu_sharing_openapi/test/test_suggest.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.suggest import Suggest + +class TestSuggest(unittest.TestCase): + """Suggest unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Suggest: + """Test Suggest + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Suggest` + """ + model = Suggest() + if include_optional: + return Suggest( + text = '', + highlighted = '', + score = 1.337 + ) + else: + return Suggest( + text = '', + score = 1.337, + ) + """ + + def testSuggest(self): + """Test Suggest""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_suggestion.py b/edu_sharing_openapi/test/test_suggestion.py new file mode 100644 index 00000000..4372e6b3 --- /dev/null +++ b/edu_sharing_openapi/test/test_suggestion.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.suggestion import Suggestion + +class TestSuggestion(unittest.TestCase): + """Suggestion unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Suggestion: + """Test Suggestion + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Suggestion` + """ + model = Suggestion() + if include_optional: + return Suggestion( + replacement_string = '', + display_string = '', + key = '' + ) + else: + return Suggestion( + replacement_string = '', + display_string = '', + ) + """ + + def testSuggestion(self): + """Test Suggestion""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_suggestion_param.py b/edu_sharing_openapi/test/test_suggestion_param.py new file mode 100644 index 00000000..f9dd210c --- /dev/null +++ b/edu_sharing_openapi/test/test_suggestion_param.py @@ -0,0 +1,61 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.suggestion_param import SuggestionParam + +class TestSuggestionParam(unittest.TestCase): + """SuggestionParam unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> SuggestionParam: + """Test SuggestionParam + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `SuggestionParam` + """ + model = SuggestionParam() + if include_optional: + return SuggestionParam( + value_parameters = edu_sharing_client.models.value_parameters.ValueParameters( + query = '', + property = '', + pattern = '', ), + criteria = [ + edu_sharing_client.models.mds_query_criteria.MdsQueryCriteria( + property = '', + values = [ + '' + ], ) + ] + ) + else: + return SuggestionParam( + ) + """ + + def testSuggestionParam(self): + """Test SuggestionParam""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_suggestions.py b/edu_sharing_openapi/test/test_suggestions.py new file mode 100644 index 00000000..3c89d924 --- /dev/null +++ b/edu_sharing_openapi/test/test_suggestions.py @@ -0,0 +1,62 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.suggestions import Suggestions + +class TestSuggestions(unittest.TestCase): + """Suggestions unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Suggestions: + """Test Suggestions + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Suggestions` + """ + model = Suggestions() + if include_optional: + return Suggestions( + values = [ + edu_sharing_client.models.suggestion.Suggestion( + replacement_string = '', + display_string = '', + key = '', ) + ] + ) + else: + return Suggestions( + values = [ + edu_sharing_client.models.suggestion.Suggestion( + replacement_string = '', + display_string = '', + key = '', ) + ], + ) + """ + + def testSuggestions(self): + """Test Suggestions""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_tool.py b/edu_sharing_openapi/test/test_tool.py new file mode 100644 index 00000000..84ab2ad2 --- /dev/null +++ b/edu_sharing_openapi/test/test_tool.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.tool import Tool + +class TestTool(unittest.TestCase): + """Tool unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Tool: + """Test Tool + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Tool` + """ + model = Tool() + if include_optional: + return Tool( + domain = '', + description = '', + app_id = '', + name = '', + logo = '', + custom_content_option = True + ) + else: + return Tool( + ) + """ + + def testTool(self): + """Test Tool""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_tools.py b/edu_sharing_openapi/test/test_tools.py new file mode 100644 index 00000000..e395d991 --- /dev/null +++ b/edu_sharing_openapi/test/test_tools.py @@ -0,0 +1,59 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.tools import Tools + +class TestTools(unittest.TestCase): + """Tools unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Tools: + """Test Tools + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Tools` + """ + model = Tools() + if include_optional: + return Tools( + tools = [ + edu_sharing_client.models.tool.Tool( + domain = '', + description = '', + app_id = '', + name = '', + logo = '', + custom_content_option = True, ) + ] + ) + else: + return Tools( + ) + """ + + def testTools(self): + """Test Tools""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_toolv1_api.py b/edu_sharing_openapi/test/test_toolv1_api.py new file mode 100644 index 00000000..57465940 --- /dev/null +++ b/edu_sharing_openapi/test/test_toolv1_api.py @@ -0,0 +1,73 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.toolv1_api import TOOLV1Api + + +class TestTOOLV1Api(unittest.TestCase): + """TOOLV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = TOOLV1Api() + + def tearDown(self) -> None: + pass + + def test_create_tool_defintition(self) -> None: + """Test case for create_tool_defintition + + Create a new tool definition object. + """ + pass + + def test_create_tool_instance(self) -> None: + """Test case for create_tool_instance + + Create a new tool Instance object. + """ + pass + + def test_create_tool_object(self) -> None: + """Test case for create_tool_object + + Create a new tool object for a given tool instance. + """ + pass + + def test_get_all_tool_definitions(self) -> None: + """Test case for get_all_tool_definitions + + Get all ToolDefinitions. + """ + pass + + def test_get_instance(self) -> None: + """Test case for get_instance + + Get Instances of a ToolDefinition. + """ + pass + + def test_get_instances(self) -> None: + """Test case for get_instances + + Get Instances of a ToolDefinition. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_tracking.py b/edu_sharing_openapi/test/test_tracking.py new file mode 100644 index 00000000..ea0f471a --- /dev/null +++ b/edu_sharing_openapi/test/test_tracking.py @@ -0,0 +1,115 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.tracking import Tracking + +class TestTracking(unittest.TestCase): + """Tracking unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Tracking: + """Test Tracking + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Tracking` + """ + model = Tracking() + if include_optional: + return Tracking( + counts = { + 'key' : 56 + }, + var_date = '', + fields = { + 'key' : None + }, + groups = { + 'key' : { + 'key' : { + 'key' : 56 + } + } + }, + authority = edu_sharing_client.models.tracking_authority.TrackingAuthority( + hash = '', + organization = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + mediacenter = [ + edu_sharing_client.models.group.Group( + editable = True, + signup_method = 'simple', + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + administration_access = True, ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', ) + ], ) + ) + else: + return Tracking( + ) + """ + + def testTracking(self): + """Test Tracking""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_tracking_authority.py b/edu_sharing_openapi/test/test_tracking_authority.py new file mode 100644 index 00000000..7075cea6 --- /dev/null +++ b/edu_sharing_openapi/test/test_tracking_authority.py @@ -0,0 +1,128 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.tracking_authority import TrackingAuthority + +class TestTrackingAuthority(unittest.TestCase): + """TrackingAuthority unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> TrackingAuthority: + """Test TrackingAuthority + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `TrackingAuthority` + """ + model = TrackingAuthority() + if include_optional: + return TrackingAuthority( + hash = '', + organization = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + mediacenter = [ + edu_sharing_client.models.group.Group( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), ) + ] + ) + else: + return TrackingAuthority( + ) + """ + + def testTrackingAuthority(self): + """Test TrackingAuthority""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_tracking_node.py b/edu_sharing_openapi/test/test_tracking_node.py new file mode 100644 index 00000000..007d8006 --- /dev/null +++ b/edu_sharing_openapi/test/test_tracking_node.py @@ -0,0 +1,318 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.tracking_node import TrackingNode + +class TestTrackingNode(unittest.TestCase): + """TrackingNode unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> TrackingNode: + """Test TrackingNode + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `TrackingNode` + """ + model = TrackingNode() + if include_optional: + return TrackingNode( + counts = { + 'key' : 56 + }, + var_date = '', + fields = { + 'key' : None + }, + groups = { + 'key' : { + 'key' : { + 'key' : 56 + } + } + }, + node = edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ), + authority = edu_sharing_client.models.tracking_authority.TrackingAuthority( + hash = '', + organization = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + mediacenter = [ + edu_sharing_client.models.group.Group( + editable = True, + signup_method = 'simple', + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + authority_name = '', + authority_type = 'USER', + group_name = '', + administration_access = True, ) + ], + authority_name = '', + authority_type = 'USER', + group_name = '', ) + ], ) + ) + else: + return TrackingNode( + ) + """ + + def testTrackingNode(self): + """Test TrackingNode""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_trackingv1_api.py b/edu_sharing_openapi/test/test_trackingv1_api.py new file mode 100644 index 00000000..a2b4e74a --- /dev/null +++ b/edu_sharing_openapi/test/test_trackingv1_api.py @@ -0,0 +1,38 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.trackingv1_api import TRACKINGV1Api + + +class TestTRACKINGV1Api(unittest.TestCase): + """TRACKINGV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = TRACKINGV1Api() + + def tearDown(self) -> None: + pass + + def test_track_event(self) -> None: + """Test case for track_event + + Track a user interaction + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_upload_result.py b/edu_sharing_openapi/test/test_upload_result.py new file mode 100644 index 00000000..6d4045bb --- /dev/null +++ b/edu_sharing_openapi/test/test_upload_result.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.upload_result import UploadResult + +class TestUploadResult(unittest.TestCase): + """UploadResult unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UploadResult: + """Test UploadResult + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UploadResult` + """ + model = UploadResult() + if include_optional: + return UploadResult( + file = '' + ) + else: + return UploadResult( + ) + """ + + def testUploadResult(self): + """Test UploadResult""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_usage.py b/edu_sharing_openapi/test/test_usage.py new file mode 100644 index 00000000..c6b88425 --- /dev/null +++ b/edu_sharing_openapi/test/test_usage.py @@ -0,0 +1,82 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.usage import Usage + +class TestUsage(unittest.TestCase): + """Usage unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Usage: + """Test Usage + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Usage` + """ + model = Usage() + if include_optional: + return Usage( + from_used = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + to_used = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + usage_counter = 56, + app_subtype = '', + app_type = '', + type = '', + created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + app_user = '', + app_user_mail = '', + course_id = '', + distinct_persons = 56, + app_id = '', + node_id = '', + parent_node_id = '', + usage_version = '', + usage_xml_params = edu_sharing_client.models.parameters.Parameters( + general = edu_sharing_client.models.general.General( + referenced_in_name = '', + referenced_in_type = '', + referenced_in_instance = '', ), ), + usage_xml_params_raw = '', + resource_id = '', + guid = '' + ) + else: + return Usage( + app_user = '', + app_user_mail = '', + course_id = '', + app_id = '', + node_id = '', + parent_node_id = '', + usage_version = '', + resource_id = '', + ) + """ + + def testUsage(self): + """Test Usage""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_usages.py b/edu_sharing_openapi/test/test_usages.py new file mode 100644 index 00000000..ab6c23f8 --- /dev/null +++ b/edu_sharing_openapi/test/test_usages.py @@ -0,0 +1,77 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.usages import Usages + +class TestUsages(unittest.TestCase): + """Usages unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Usages: + """Test Usages + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Usages` + """ + model = Usages() + if include_optional: + return Usages( + usages = [ + edu_sharing_client.models.usage.Usage( + from_used = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + to_used = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + usage_counter = 56, + app_subtype = '', + app_type = '', + type = '', + created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + app_user = '', + app_user_mail = '', + course_id = '', + distinct_persons = 56, + app_id = '', + node_id = '', + parent_node_id = '', + usage_version = '', + usage_xml_params = edu_sharing_client.models.parameters.Parameters( + general = edu_sharing_client.models.general.General( + referenced_in_name = '', + referenced_in_type = '', + referenced_in_instance = '', ), ), + usage_xml_params_raw = '', + resource_id = '', + guid = '', ) + ] + ) + else: + return Usages( + ) + """ + + def testUsages(self): + """Test Usages""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_usagev1_api.py b/edu_sharing_openapi/test/test_usagev1_api.py new file mode 100644 index 00000000..c90af974 --- /dev/null +++ b/edu_sharing_openapi/test/test_usagev1_api.py @@ -0,0 +1,79 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.api.usagev1_api import USAGEV1Api + + +class TestUSAGEV1Api(unittest.TestCase): + """USAGEV1Api unit test stubs""" + + def setUp(self) -> None: + self.api = USAGEV1Api() + + def tearDown(self) -> None: + pass + + def test_delete_usage(self) -> None: + """Test case for delete_usage + + Delete an usage of a node. + """ + pass + + def test_get_usages(self) -> None: + """Test case for get_usages + + Get all usages of an application. + """ + pass + + def test_get_usages1(self) -> None: + """Test case for get_usages1 + + """ + pass + + def test_get_usages_by_course(self) -> None: + """Test case for get_usages_by_course + + Get all usages of an course. + """ + pass + + def test_get_usages_by_node(self) -> None: + """Test case for get_usages_by_node + + Get all usages of an node. + """ + pass + + def test_get_usages_by_node_collections(self) -> None: + """Test case for get_usages_by_node_collections + + Get all collections where this node is used. + """ + pass + + def test_set_usage(self) -> None: + """Test case for set_usage + + Set a usage for a node. app signature headers and authenticated user required. + """ + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user.py b/edu_sharing_openapi/test/test_user.py new file mode 100644 index 00000000..221326a8 --- /dev/null +++ b/edu_sharing_openapi/test/test_user.py @@ -0,0 +1,133 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user import User + +class TestUser(unittest.TestCase): + """User unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> User: + """Test User + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `User` + """ + model = User() + if include_optional: + return User( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + status = 'active', + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + home_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + shared_folders = [ + edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ) + ] + ) + else: + return User( + authority_name = '', + home_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + ) + """ + + def testUser(self): + """Test User""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_credential.py b/edu_sharing_openapi/test/test_user_credential.py new file mode 100644 index 00000000..883f289e --- /dev/null +++ b/edu_sharing_openapi/test/test_user_credential.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_credential import UserCredential + +class TestUserCredential(unittest.TestCase): + """UserCredential unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserCredential: + """Test UserCredential + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserCredential` + """ + model = UserCredential() + if include_optional: + return UserCredential( + old_password = '', + new_password = '' + ) + else: + return UserCredential( + new_password = '', + ) + """ + + def testUserCredential(self): + """Test UserCredential""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_data_dto.py b/edu_sharing_openapi/test/test_user_data_dto.py new file mode 100644 index 00000000..c7a0e0ab --- /dev/null +++ b/edu_sharing_openapi/test/test_user_data_dto.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_data_dto import UserDataDTO + +class TestUserDataDTO(unittest.TestCase): + """UserDataDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserDataDTO: + """Test UserDataDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserDataDTO` + """ + model = UserDataDTO() + if include_optional: + return UserDataDTO( + id = '', + first_name = '', + last_name = '', + mailbox = '' + ) + else: + return UserDataDTO( + ) + """ + + def testUserDataDTO(self): + """Test UserDataDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_entries.py b/edu_sharing_openapi/test/test_user_entries.py new file mode 100644 index 00000000..f4a59c68 --- /dev/null +++ b/edu_sharing_openapi/test/test_user_entries.py @@ -0,0 +1,174 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_entries import UserEntries + +class TestUserEntries(unittest.TestCase): + """UserEntries unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserEntries: + """Test UserEntries + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserEntries` + """ + model = UserEntries() + if include_optional: + return UserEntries( + users = [ + edu_sharing_client.models.user_simple.UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ) + ) + else: + return UserEntries( + users = [ + edu_sharing_client.models.user_simple.UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), ) + ], + pagination = edu_sharing_client.models.pagination.Pagination( + total = 56, + from = 56, + count = 56, ), + ) + """ + + def testUserEntries(self): + """Test UserEntries""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_entry.py b/edu_sharing_openapi/test/test_user_entry.py new file mode 100644 index 00000000..baeb3e35 --- /dev/null +++ b/edu_sharing_openapi/test/test_user_entry.py @@ -0,0 +1,179 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_entry import UserEntry + +class TestUserEntry(unittest.TestCase): + """UserEntry unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserEntry: + """Test UserEntry + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserEntry` + """ + model = UserEntry() + if include_optional: + return UserEntry( + edit_profile = True, + person = edu_sharing_client.models.user.User( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + home_folder = , + shared_folders = [ + + ], ) + ) + else: + return UserEntry( + person = edu_sharing_client.models.user.User( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + quota = edu_sharing_client.models.user_quota.UserQuota( + enabled = True, + size_current = 56, + size_quota = 56, ), + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + home_folder = , + shared_folders = [ + + ], ), + ) + """ + + def testUserEntry(self): + """Test UserEntry""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_profile.py b/edu_sharing_openapi/test/test_user_profile.py new file mode 100644 index 00000000..3f484706 --- /dev/null +++ b/edu_sharing_openapi/test/test_user_profile.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_profile import UserProfile + +class TestUserProfile(unittest.TestCase): + """UserProfile unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserProfile: + """Test UserProfile + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserProfile` + """ + model = UserProfile() + if include_optional: + return UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '' + ) + else: + return UserProfile( + ) + """ + + def testUserProfile(self): + """Test UserProfile""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_profile_app_auth.py b/edu_sharing_openapi/test/test_user_profile_app_auth.py new file mode 100644 index 00000000..154c3b6f --- /dev/null +++ b/edu_sharing_openapi/test/test_user_profile_app_auth.py @@ -0,0 +1,71 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_profile_app_auth import UserProfileAppAuth + +class TestUserProfileAppAuth(unittest.TestCase): + """UserProfileAppAuth unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserProfileAppAuth: + """Test UserProfileAppAuth + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserProfileAppAuth` + """ + model = UserProfileAppAuth() + if include_optional: + return UserProfileAppAuth( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + extended_attributes = { + 'key' : [ + '' + ] + }, + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '' + ) + else: + return UserProfileAppAuth( + ) + """ + + def testUserProfileAppAuth(self): + """Test UserProfileAppAuth""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_profile_edit.py b/edu_sharing_openapi/test/test_user_profile_edit.py new file mode 100644 index 00000000..8a589a1f --- /dev/null +++ b/edu_sharing_openapi/test/test_user_profile_edit.py @@ -0,0 +1,67 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_profile_edit import UserProfileEdit + +class TestUserProfileEdit(unittest.TestCase): + """UserProfileEdit unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserProfileEdit: + """Test UserProfileEdit + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserProfileEdit` + """ + model = UserProfileEdit() + if include_optional: + return UserProfileEdit( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + size_quota = 56, + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '' + ) + else: + return UserProfileEdit( + ) + """ + + def testUserProfileEdit(self): + """Test UserProfileEdit""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_quota.py b/edu_sharing_openapi/test/test_user_quota.py new file mode 100644 index 00000000..a3d2df22 --- /dev/null +++ b/edu_sharing_openapi/test/test_user_quota.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_quota import UserQuota + +class TestUserQuota(unittest.TestCase): + """UserQuota unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserQuota: + """Test UserQuota + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserQuota` + """ + model = UserQuota() + if include_optional: + return UserQuota( + enabled = True, + size_current = 56, + size_quota = 56 + ) + else: + return UserQuota( + ) + """ + + def testUserQuota(self): + """Test UserQuota""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_simple.py b/edu_sharing_openapi/test/test_user_simple.py new file mode 100644 index 00000000..6335b656 --- /dev/null +++ b/edu_sharing_openapi/test/test_user_simple.py @@ -0,0 +1,111 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_simple import UserSimple + +class TestUserSimple(unittest.TestCase): + """UserSimple unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserSimple: + """Test UserSimple + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserSimple` + """ + model = UserSimple() + if include_optional: + return UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ) + ) + else: + return UserSimple( + authority_name = '', + ) + """ + + def testUserSimple(self): + """Test UserSimple""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_stats.py b/edu_sharing_openapi/test/test_user_stats.py new file mode 100644 index 00000000..521ce96d --- /dev/null +++ b/edu_sharing_openapi/test/test_user_stats.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_stats import UserStats + +class TestUserStats(unittest.TestCase): + """UserStats unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserStats: + """Test UserStats + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserStats` + """ + model = UserStats() + if include_optional: + return UserStats( + node_count = 56, + node_count_cc = 56, + collection_count = 56 + ) + else: + return UserStats( + ) + """ + + def testUserStats(self): + """Test UserStats""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_user_status.py b/edu_sharing_openapi/test/test_user_status.py new file mode 100644 index 00000000..adb36cb7 --- /dev/null +++ b/edu_sharing_openapi/test/test_user_status.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.user_status import UserStatus + +class TestUserStatus(unittest.TestCase): + """UserStatus unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> UserStatus: + """Test UserStatus + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `UserStatus` + """ + model = UserStatus() + if include_optional: + return UserStatus( + status = 'active', + var_date = 56 + ) + else: + return UserStatus( + ) + """ + + def testUserStatus(self): + """Test UserStatus""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_value.py b/edu_sharing_openapi/test/test_value.py new file mode 100644 index 00000000..f7f28347 --- /dev/null +++ b/edu_sharing_openapi/test/test_value.py @@ -0,0 +1,54 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.value import Value + +class TestValue(unittest.TestCase): + """Value unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Value: + """Test Value + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Value` + """ + model = Value() + if include_optional: + return Value( + value = '', + count = 56 + ) + else: + return Value( + value = '', + count = 56, + ) + """ + + def testValue(self): + """Test Value""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_value_parameters.py b/edu_sharing_openapi/test/test_value_parameters.py new file mode 100644 index 00000000..508b46e3 --- /dev/null +++ b/edu_sharing_openapi/test/test_value_parameters.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.value_parameters import ValueParameters + +class TestValueParameters(unittest.TestCase): + """ValueParameters unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> ValueParameters: + """Test ValueParameters + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `ValueParameters` + """ + model = ValueParameters() + if include_optional: + return ValueParameters( + query = '', + var_property = '', + pattern = '' + ) + else: + return ValueParameters( + query = '', + var_property = '', + pattern = '', + ) + """ + + def testValueParameters(self): + """Test ValueParameters""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_values.py b/edu_sharing_openapi/test/test_values.py new file mode 100644 index 00000000..a44479f4 --- /dev/null +++ b/edu_sharing_openapi/test/test_values.py @@ -0,0 +1,301 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.values import Values + +class TestValues(unittest.TestCase): + """Values unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Values: + """Test Values + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Values` + """ + model = Values() + if include_optional: + return Values( + supported_languages = [ + '' + ], + extension = '', + login_url = '', + login_allow_local = True, + login_providers_url = '', + login_provider_target_url = '', + register = edu_sharing_client.models.register.Register( + local = True, + recover_password = True, + login_url = '', + recover_url = '', + required_fields = [ + '' + ], ), + recover_password_url = '', + imprint_url = '', + privacy_information_url = '', + help_url = '', + whats_new_url = '', + edit_profile_url = '', + edit_profile = True, + workspace_columns = [ + '' + ], + workspace_shared_to_me_default_all = True, + hide_main_menu = [ + '' + ], + logout = edu_sharing_client.models.logout_info.LogoutInfo( + url = '', + destroy_session = True, + ajax = True, + next = '', ), + menu_entries = [ + edu_sharing_client.models.menu_entry.MenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + path = '', + scope = '', ) + ], + custom_options = [ + edu_sharing_client.models.context_menu_entry.ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + scopes = [ + 'Render' + ], + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update', ) + ], + user_menu_overrides = [ + edu_sharing_client.models.context_menu_entry.ContextMenuEntry( + position = 56, + icon = '', + name = '', + url = '', + is_disabled = True, + open_in_new = True, + is_separate = True, + is_separate_bottom = True, + only_desktop = True, + only_web = True, + mode = '', + scopes = [ + 'Render' + ], + ajax = True, + group = '', + permission = '', + toolpermission = '', + is_directory = True, + show_as_action = True, + multiple = True, + change_strategy = 'update', ) + ], + allowed_licenses = [ + '' + ], + custom_licenses = [ + edu_sharing_client.models.license.License( + icon = '', + url = '', ) + ], + workflow = edu_sharing_client.models.config_workflow.ConfigWorkflow( + default_receiver = '', + default_status = '', + comment_required = True, + workflows = [ + edu_sharing_client.models.config_workflow_list.ConfigWorkflowList( + id = '', + color = '', + has_receiver = True, + next = [ + '' + ], ) + ], ), + license_dialog_on_upload = True, + node_report = True, + branding = True, + rating = edu_sharing_client.models.config_rating.ConfigRating( + mode = 'none', ), + publishing_notice = True, + site_title = '', + user_display_name = '', + user_secondary_display_name = '', + user_affiliation = True, + default_username = '', + default_password = '', + banner = edu_sharing_client.models.banner.Banner( + url = '', + href = '', + components = [ + '' + ], ), + available_mds = [ + edu_sharing_client.models.available_mds.AvailableMds( + repository = '', + mds = [ + '' + ], ) + ], + available_repositories = [ + '' + ], + search_view_type = 56, + workspace_view_type = 56, + items_per_request = 56, + rendering = edu_sharing_client.models.rendering.Rendering( + show_preview = True, + show_download_button = True, + prerender = True, + gdpr = [ + edu_sharing_client.models.rendering_gdpr.RenderingGdpr( + matcher = '', + name = '', + privacy_information_url = '', ) + ], ), + session_expired_dialog = edu_sharing_client.models.session_expired_dialog.SessionExpiredDialog(), + login_default_location = '', + search_group_results = True, + mainnav = edu_sharing_client.models.mainnav.Mainnav( + icon = edu_sharing_client.models.icon.Icon( + url = '', ), + main_menu_style = '', ), + search_sidenav_mode = '', + guest = edu_sharing_client.models.guest.Guest( + enabled = True, ), + collections = edu_sharing_client.models.collections.Collections( + colors = [ + '' + ], ), + license_agreement = edu_sharing_client.models.license_agreement.LicenseAgreement( + node_id = [ + edu_sharing_client.models.license_agreement_node.LicenseAgreementNode( + language = '', + value = '', ) + ], ), + services = edu_sharing_client.models.services.Services( + visualization = '', ), + help_menu_options = [ + edu_sharing_client.models.help_menu_options.HelpMenuOptions( + key = '', + icon = '', + url = '', ) + ], + images = [ + edu_sharing_client.models.image.Image( + src = '', + replace = '', ) + ], + icons = [ + edu_sharing_client.models.font_icon.FontIcon( + original = '', + replace = '', + css_class = '', ) + ], + stream = edu_sharing_client.models.stream.Stream( + enabled = True, ), + admin = edu_sharing_client.models.admin.Admin( + statistics = edu_sharing_client.models.statistics.Statistics( + entries = [ + edu_sharing_client.models.statistic_entry.StatisticEntry( + property = '', + entities = [ + edu_sharing_client.models.statistic_entity.StatisticEntity( + value = '', + count = 56, ) + ], ) + ], ), + editor_type = 'Textarea', ), + simple_edit = edu_sharing_client.models.simple_edit.SimpleEdit( + global_groups = [ + edu_sharing_client.models.simple_edit_global_groups.SimpleEditGlobalGroups( + toolpermission = '', + groups = [ + '' + ], ) + ], + organization = edu_sharing_client.models.simple_edit_organization.SimpleEditOrganization( + group_types = [ + '' + ], ), + organization_filter = '', + licenses = [ + '' + ], ), + frontpage = edu_sharing_client.models.config_frontpage.ConfigFrontpage( + enabled = True, ), + upload = edu_sharing_client.models.config_upload.ConfigUpload( + post_dialog = 'SimpleEdit', ), + publish = edu_sharing_client.models.config_publish.ConfigPublish( + license_mandatory = True, + author_mandatory = True, ), + remote = edu_sharing_client.models.config_remote.ConfigRemote( + rocketchat = edu_sharing_client.models.config_remote_rocketchat.ConfigRemoteRocketchat(), ), + custom_css = '', + theme_colors = edu_sharing_client.models.config_theme_colors.ConfigThemeColors( + color = [ + edu_sharing_client.models.config_theme_color.ConfigThemeColor( + variable = '', + value = '', ) + ], ), + privacy = edu_sharing_client.models.config_privacy.ConfigPrivacy( + cookie_disclaimer = True, ), + tutorial = edu_sharing_client.models.config_tutorial.ConfigTutorial( + enabled = True, ) + ) + else: + return Values( + ) + """ + + def testValues(self): + """Test Values""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_variables.py b/edu_sharing_openapi/test/test_variables.py new file mode 100644 index 00000000..9fae75f4 --- /dev/null +++ b/edu_sharing_openapi/test/test_variables.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.variables import Variables + +class TestVariables(unittest.TestCase): + """Variables unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Variables: + """Test Variables + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Variables` + """ + model = Variables() + if include_optional: + return Variables( + var_global = { + 'key' : '' + }, + current = { + 'key' : '' + } + ) + else: + return Variables( + ) + """ + + def testVariables(self): + """Test Variables""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version.py b/edu_sharing_openapi/test/test_version.py new file mode 100644 index 00000000..0dfe17e8 --- /dev/null +++ b/edu_sharing_openapi/test/test_version.py @@ -0,0 +1,56 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version import Version + +class TestVersion(unittest.TestCase): + """Version unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> Version: + """Test Version + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `Version` + """ + model = Version() + if include_optional: + return Version( + full = '', + major = '', + minor = '', + patch = '', + qualifier = '', + build = '' + ) + else: + return Version( + ) + """ + + def testVersion(self): + """Test Version""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version_build.py b/edu_sharing_openapi/test/test_version_build.py new file mode 100644 index 00000000..5ce4e99a --- /dev/null +++ b/edu_sharing_openapi/test/test_version_build.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version_build import VersionBuild + +class TestVersionBuild(unittest.TestCase): + """VersionBuild unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VersionBuild: + """Test VersionBuild + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VersionBuild` + """ + model = VersionBuild() + if include_optional: + return VersionBuild( + timestamp = '' + ) + else: + return VersionBuild( + ) + """ + + def testVersionBuild(self): + """Test VersionBuild""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version_git.py b/edu_sharing_openapi/test/test_version_git.py new file mode 100644 index 00000000..96157114 --- /dev/null +++ b/edu_sharing_openapi/test/test_version_git.py @@ -0,0 +1,55 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version_git import VersionGit + +class TestVersionGit(unittest.TestCase): + """VersionGit unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VersionGit: + """Test VersionGit + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VersionGit` + """ + model = VersionGit() + if include_optional: + return VersionGit( + branch = '', + commit = edu_sharing_client.models.version_git_commit.VersionGitCommit( + id = '', + timestamp = edu_sharing_client.models.version_timestamp.VersionTimestamp( + datetime = '', ), ) + ) + else: + return VersionGit( + ) + """ + + def testVersionGit(self): + """Test VersionGit""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version_git_commit.py b/edu_sharing_openapi/test/test_version_git_commit.py new file mode 100644 index 00000000..b5bb2405 --- /dev/null +++ b/edu_sharing_openapi/test/test_version_git_commit.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version_git_commit import VersionGitCommit + +class TestVersionGitCommit(unittest.TestCase): + """VersionGitCommit unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VersionGitCommit: + """Test VersionGitCommit + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VersionGitCommit` + """ + model = VersionGitCommit() + if include_optional: + return VersionGitCommit( + id = '', + timestamp = edu_sharing_client.models.version_timestamp.VersionTimestamp( + datetime = '', ) + ) + else: + return VersionGitCommit( + ) + """ + + def testVersionGitCommit(self): + """Test VersionGitCommit""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version_maven.py b/edu_sharing_openapi/test/test_version_maven.py new file mode 100644 index 00000000..344a605d --- /dev/null +++ b/edu_sharing_openapi/test/test_version_maven.py @@ -0,0 +1,57 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version_maven import VersionMaven + +class TestVersionMaven(unittest.TestCase): + """VersionMaven unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VersionMaven: + """Test VersionMaven + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VersionMaven` + """ + model = VersionMaven() + if include_optional: + return VersionMaven( + bom = { + 'key' : '' + }, + project = edu_sharing_client.models.version_project.VersionProject( + artifact_id = '', + group_id = '', + version = '', ) + ) + else: + return VersionMaven( + ) + """ + + def testVersionMaven(self): + """Test VersionMaven""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version_project.py b/edu_sharing_openapi/test/test_version_project.py new file mode 100644 index 00000000..842df29f --- /dev/null +++ b/edu_sharing_openapi/test/test_version_project.py @@ -0,0 +1,53 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version_project import VersionProject + +class TestVersionProject(unittest.TestCase): + """VersionProject unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VersionProject: + """Test VersionProject + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VersionProject` + """ + model = VersionProject() + if include_optional: + return VersionProject( + artifact_id = '', + group_id = '', + version = '' + ) + else: + return VersionProject( + ) + """ + + def testVersionProject(self): + """Test VersionProject""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_version_timestamp.py b/edu_sharing_openapi/test/test_version_timestamp.py new file mode 100644 index 00000000..40bc66f5 --- /dev/null +++ b/edu_sharing_openapi/test/test_version_timestamp.py @@ -0,0 +1,51 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.version_timestamp import VersionTimestamp + +class TestVersionTimestamp(unittest.TestCase): + """VersionTimestamp unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> VersionTimestamp: + """Test VersionTimestamp + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `VersionTimestamp` + """ + model = VersionTimestamp() + if include_optional: + return VersionTimestamp( + datetime = '' + ) + else: + return VersionTimestamp( + ) + """ + + def testVersionTimestamp(self): + """Test VersionTimestamp""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_website_information.py b/edu_sharing_openapi/test/test_website_information.py new file mode 100644 index 00000000..549d6dfa --- /dev/null +++ b/edu_sharing_openapi/test/test_website_information.py @@ -0,0 +1,262 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.website_information import WebsiteInformation + +class TestWebsiteInformation(unittest.TestCase): + """WebsiteInformation unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WebsiteInformation: + """Test WebsiteInformation + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WebsiteInformation` + """ + model = WebsiteInformation() + if include_optional: + return WebsiteInformation( + duplicate_nodes = [ + edu_sharing_client.models.node.Node( + node_lti_deep_link = edu_sharing_client.models.node_lti_deep_link.NodeLTIDeepLink( + lti_deep_link_return_url = '', + jwt_deep_link_response = '', ), + remote = edu_sharing_client.models.remote.Remote( + repository = edu_sharing_client.models.repo.Repo( + repository_type = '', + rendering_supported = True, + id = '', + title = '', + icon = '', + logo = '', + is_home_repo = True, ), + id = '', ), + content = edu_sharing_client.models.content.Content( + url = '', + hash = '', + version = '', ), + license = edu_sharing_client.models.license.License( + icon = '', + url = '', ), + is_directory = True, + comment_count = 56, + rating = edu_sharing_client.models.rating_details.RatingDetails( + overall = edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ), + affiliation = { + 'key' : edu_sharing_client.models.rating_data.RatingData( + sum = 1.337, + count = 56, ) + }, + user = 1.337, ), + used_in_collections = [ + edu_sharing_client.models.node.Node( + is_directory = True, + comment_count = 56, + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + parent = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = edu_sharing_client.models.person.Person( + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), + first_name = '', + last_name = '', + mailbox = '', ), + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = edu_sharing_client.models.person.Person( + first_name = '', + last_name = '', + mailbox = '', ), + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + relations = { + 'key' : + }, + contributors = [ + edu_sharing_client.models.contributor.Contributor( + property = '', + firstname = '', + lastname = '', + email = '', + vcard = '', + org = '', ) + ], + ref = , + parent = , + type = '', + aspects = [ + '' + ], + name = '', + title = '', + metadataset = '', + repository_type = '', + created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + created_by = , + modified_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), + modified_by = , + access = [ + '' + ], + download_url = '', + properties = { + 'key' : [ + '' + ] + }, + mimetype = '', + mediatype = '', + size = '', + preview = edu_sharing_client.models.preview.Preview( + is_icon = True, + is_generated = True, + mimetype = '', + data = 'YQ==', + url = '', + width = 56, + height = 56, ), + icon_url = '', + collection = edu_sharing_client.models.collection.Collection( + scope = '', + author_freetext = '', + order_ascending = True, + level0 = True, + title = '', + description = '', + type = '', + viewtype = '', + order_mode = '', + x = 56, + y = 56, + z = 56, + color = '', + from_user = True, + pinned = True, + child_collections_count = 56, + child_references_count = 56, ), + owner = , + is_public = True, ) + ], + title = '', + page = '', + description = '', + license = '', + keywords = [ + '' + ] + ) + else: + return WebsiteInformation( + ) + """ + + def testWebsiteInformation(self): + """Test WebsiteInformation""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_widget_data_dto.py b/edu_sharing_openapi/test/test_widget_data_dto.py new file mode 100644 index 00000000..33e78b89 --- /dev/null +++ b/edu_sharing_openapi/test/test_widget_data_dto.py @@ -0,0 +1,52 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.widget_data_dto import WidgetDataDTO + +class TestWidgetDataDTO(unittest.TestCase): + """WidgetDataDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WidgetDataDTO: + """Test WidgetDataDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WidgetDataDTO` + """ + model = WidgetDataDTO() + if include_optional: + return WidgetDataDTO( + id = '', + caption = '' + ) + else: + return WidgetDataDTO( + ) + """ + + def testWidgetDataDTO(self): + """Test WidgetDataDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_workflow_event_dto.py b/edu_sharing_openapi/test/test_workflow_event_dto.py new file mode 100644 index 00000000..28ca468b --- /dev/null +++ b/edu_sharing_openapi/test/test_workflow_event_dto.py @@ -0,0 +1,60 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.workflow_event_dto import WorkflowEventDTO + +class TestWorkflowEventDTO(unittest.TestCase): + """WorkflowEventDTO unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WorkflowEventDTO: + """Test WorkflowEventDTO + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WorkflowEventDTO` + """ + model = WorkflowEventDTO() + if include_optional: + return WorkflowEventDTO( + node = edu_sharing_client.models.node_data_dto.NodeDataDTO( + type = '', + aspects = [ + '' + ], + properties = { + 'key' : None + }, ), + workflow_status = '', + user_comment = '' + ) + else: + return WorkflowEventDTO( + ) + """ + + def testWorkflowEventDTO(self): + """Test WorkflowEventDTO""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/test/test_workflow_history.py b/edu_sharing_openapi/test/test_workflow_history.py new file mode 100644 index 00000000..2e979584 --- /dev/null +++ b/edu_sharing_openapi/test/test_workflow_history.py @@ -0,0 +1,120 @@ +# coding: utf-8 + +""" + edu-sharing Repository REST API + + The public restful API of the edu-sharing repository. + + The version of the OpenAPI document: 1.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +import unittest + +from edu_sharing_client.models.workflow_history import WorkflowHistory + +class TestWorkflowHistory(unittest.TestCase): + """WorkflowHistory unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def make_instance(self, include_optional) -> WorkflowHistory: + """Test WorkflowHistory + include_optional is a boolean, when False only required + params are included, when True both required and + optional params are included """ + # uncomment below to create an instance of `WorkflowHistory` + """ + model = WorkflowHistory() + if include_optional: + return WorkflowHistory( + time = 56, + editor = edu_sharing_client.models.user_simple.UserSimple( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + status = edu_sharing_client.models.user_status.UserStatus( + date = 56, ), + organizations = [ + edu_sharing_client.models.organization.Organization( + editable = True, + signup_method = 'simple', + ref = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), + aspects = [ + '' + ], + authority_name = '', + authority_type = 'USER', + group_name = '', + profile = edu_sharing_client.models.group_profile.GroupProfile( + group_email = '', + display_name = '', + group_type = '', + scope_type = '', ), + administration_access = True, + shared_folder = edu_sharing_client.models.node_ref.NodeRef( + repo = '', + id = '', + archived = True, + is_home_repo = True, ), ) + ], + authority_name = '', + authority_type = 'USER', + user_name = '', + profile = edu_sharing_client.models.user_profile.UserProfile( + primary_affiliation = '', + skills = [ + '' + ], + types = [ + '' + ], + vcard = '', + type = [ + '' + ], + first_name = '', + last_name = '', + email = '', + avatar = '', + about = '', ), ), + receiver = [ + edu_sharing_client.models.authority.Authority( + properties = { + 'key' : [ + '' + ] + }, + editable = True, + authority_name = '', + authority_type = 'USER', ) + ], + status = '', + comment = '' + ) + else: + return WorkflowHistory( + ) + """ + + def testWorkflowHistory(self): + """Test WorkflowHistory""" + # inst_req_only = self.make_instance(include_optional=False) + # inst_req_and_optional = self.make_instance(include_optional=True) + +if __name__ == '__main__': + unittest.main() diff --git a/edu_sharing_openapi/tox.ini b/edu_sharing_openapi/tox.ini new file mode 100644 index 00000000..f8c0ba73 --- /dev/null +++ b/edu_sharing_openapi/tox.ini @@ -0,0 +1,9 @@ +[tox] +envlist = py3 + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + +commands= + pytest --cov=edu_sharing_client From 68974e92b2afc6dc6ad6d896c696af2778fdd2c1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:57:36 +0200 Subject: [PATCH 539/590] change: rework es_connector for edu-sharing v9 (work-in-progress) --- converter/es_connector.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 0ec916b9..b02412fe 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -7,7 +7,7 @@ import uuid from asyncio import Semaphore from enum import Enum -from typing import List +from typing import List, Optional import httpx import requests @@ -18,14 +18,14 @@ from converter import env from converter.constants import Constants -from edu_sharing_client import ABOUTApi -from edu_sharing_client.api.bulk_v1_api import BULKV1Api -from edu_sharing_client.api.iam_v1_api import IAMV1Api -from edu_sharing_client.api.mediacenter_v1_api import MEDIACENTERV1Api -from edu_sharing_client.api.node_v1_api import NODEV1Api -from edu_sharing_client.api_client import ApiClient -from edu_sharing_client.configuration import Configuration -from edu_sharing_client.rest import ApiException +from edu_sharing_openapi.edu_sharing_client.api.about_api import ABOUTApi +from edu_sharing_openapi.edu_sharing_client.api.bulkv1_api import BULKV1Api +from edu_sharing_openapi.edu_sharing_client.api.iamv1_api import IAMV1Api +from edu_sharing_openapi.edu_sharing_client.api.mediacenterv1_api import MEDIACENTERV1Api +from edu_sharing_openapi.edu_sharing_client.api.nodev1_api import NODEV1Api +from edu_sharing_openapi.edu_sharing_client.api_client import ApiClient +from edu_sharing_openapi.edu_sharing_client.configuration import Configuration +from edu_sharing_openapi.edu_sharing_client.exceptions import ApiException log = logging.getLogger(__name__) @@ -49,31 +49,31 @@ class EduSharingConstants: "unknown": "ccm:lifecyclecontributer_unknown", # (= contributor in an unknown capacity ("Mitarbeiter")) } - # creating the swagger client: java -jar swagger-codegen-cli-3.0.20.jar generate -l python -i http://localhost:8080/edu-sharing/rest/swagger.json -o edu_sharing_swagger -c edu-sharing-swagger.config.json +# ToDo: document API-Client generation via "openapi-generator-cli" with all necessary settings class ESApiClient(ApiClient): COOKIE_REBUILD_THRESHOLD = 60 * 5 lastRequestTime = 0 - def deserialize(self, response, response_type): + def deserialize(self, response_text: str, response_type: str, content_type: Optional[str]): """Deserializes response into an object. - :param response: RESTResponse object to be deserialized. - :param response_type: class literal for - deserialized object, or string of class name. + :param response_text: RESTResponse object to be deserialized. + :param response_type: class literal for the deserialized object, or string of class name. + :param content_type: content type of response :return: deserialized object. """ # handle file downloading # save response body into a tmp file and return the instance if response_type == "file": - return self.__deserialize_file(response) + return self.__deserialize_file(response_text) # fetch data from response object try: - data = json.loads(response.data) + data = json.loads(response_text) except ValueError: - data = response.data + data = response_text # workaround for es: simply return to prevent error throwing # return self.__deserialize(data, response_type) return data @@ -139,7 +139,7 @@ def sync_node(self, spider, type, properties): groupBy = ["ccm:replicationsourceorigin"] try: response = EduSharing.bulkApi.sync( - body=properties, + request_body=properties, match=["ccm:replicationsource", "ccm:replicationsourceid"], type=type, group=spider.name, @@ -877,6 +877,7 @@ def find_item(self, id, spider): self.init_api_client() return None if e.status == 404: + # ToDo: handle "edu_sharing_client.exceptions.NotFoundException" try: error_dict: dict = json.loads(e.body) error_name: str = error_dict["error"] From 50254192ff7ad4530a5fdc90984fad9306095cc8 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:16:13 +0200 Subject: [PATCH 540/590] change: filepath of edu-sharing v9.x openAPI client --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 11e06088..88060761 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ COPY scrapy.cfg scrapy.cfg COPY setup.cfg setup.cfg COPY converter/ converter/ COPY csv/ csv/ -COPY edu_sharing_client/ edu_sharing_client/ +COPY edu_sharing_openapi/ edu_sharing_openapi/ COPY valuespace_converter/ valuespace_converter/ From 559940ba8dd7e2e6ffc128c75d586246c6ec6404 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 27 Aug 2024 19:18:07 +0200 Subject: [PATCH 541/590] add edu_sharing_client related dependencies and update lockfile --- poetry.lock | 1053 +++++++++++++++++++++++++--------------------- pyproject.toml | 6 + requirements.txt | 47 +-- 3 files changed, 612 insertions(+), 494 deletions(-) diff --git a/poetry.lock b/poetry.lock index ed84c748..797e8f3b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anyio" version = "4.4.0" @@ -55,38 +66,34 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "automat" -version = "22.10.0" +version = "24.8.1" description = "Self-service finite-state machines for the programmer on the go." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, - {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, + {file = "Automat-24.8.1-py3-none-any.whl", hash = "sha256:bf029a7bc3da1e2c24da2343e7598affaa9f10bf0ab63ff808566ce90551e02a"}, + {file = "automat-24.8.1.tar.gz", hash = "sha256:b34227cf63f6325b8ad2399ede780675083e439b20c323d376373d8ee6306d88"}, ] -[package.dependencies] -attrs = ">=19.2.0" -six = "*" - [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] @@ -184,63 +191,78 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, + {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, + {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, + {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, + {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, + {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, + {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, + {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, + {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, + {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, + {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, + {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, + {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, + {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, + {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [package.dependencies] @@ -383,59 +405,57 @@ files = [ [[package]] name = "courlan" -version = "1.2.0" +version = "1.3.0" description = "Clean, filter and sample URLs to optimize data collection – includes spam, content type and language filters." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "courlan-1.2.0-py3-none-any.whl", hash = "sha256:df9d3735b611e717c52a813a49d17a8b4d3a9d8b87bbace9065171fc5d084397"}, - {file = "courlan-1.2.0.tar.gz", hash = "sha256:0cbc9cac83970c651b937a7823a5d92cbebb6b601454ea0fb6cb4d0ee5d1845d"}, + {file = "courlan-1.3.0-py3-none-any.whl", hash = "sha256:bb30982108ef987731b127f1ecf5dfd5b7e46c825630e3c9313c80b4a454954c"}, + {file = "courlan-1.3.0.tar.gz", hash = "sha256:3868f388122f2b09d154802043fe92dfd62c3ea7a700eaae8abc05198cf8bc25"}, ] [package.dependencies] -babel = ">=2.11.0" -tld = {version = ">=0.13", markers = "python_version >= \"3.7\""} -urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} +babel = ">=2.15.0" +tld = ">=0.13" +urllib3 = ">=1.26,<3" + +[package.extras] +dev = ["black", "mypy", "pytest", "pytest-cov"] [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, + {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, + {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, + {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, + {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, + {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, + {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, + {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, + {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, + {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, + {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, + {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, + {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, ] [package.dependencies] @@ -448,7 +468,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -497,17 +517,17 @@ files = [ [[package]] name = "django" -version = "5.0.6" +version = "5.1" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ - {file = "Django-5.0.6-py3-none-any.whl", hash = "sha256:8363ac062bb4ef7c3f12d078f6fa5d154031d129a15170a1066412af49d30905"}, - {file = "Django-5.0.6.tar.gz", hash = "sha256:ff1b61005004e476e0aeea47c7f79b85864c70124030e95146315396f1e7951f"}, + {file = "Django-5.1-py3-none-any.whl", hash = "sha256:d3b811bf5371a26def053d7ee42a9df1267ef7622323fe70a601936725aa4557"}, + {file = "Django-5.1.tar.gz", hash = "sha256:848a5980e8efb76eea70872fb0e4bc5e371619c70fffbe48e3e1b50b2c09455d"}, ] [package.dependencies] -asgiref = ">=3.7.0,<4" +asgiref = ">=3.8.1,<4" sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -517,13 +537,13 @@ bcrypt = ["bcrypt"] [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -715,24 +735,25 @@ lxml = ["lxml"] [[package]] name = "htmldate" -version = "1.8.1" +version = "1.9.0" description = "Fast and robust extraction of original and updated publication dates from URLs and web pages." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "htmldate-1.8.1-py3-none-any.whl", hash = "sha256:b1209dedfa7bc9bb4d0b812a3f0983ea5d39f1bdfe21745659ad26af4f8b7f32"}, - {file = "htmldate-1.8.1.tar.gz", hash = "sha256:caf1686cf75c61dd1f061ede5d7a46e759b15d5f9987cd8e13c8c4237511263d"}, + {file = "htmldate-1.9.0-py3-none-any.whl", hash = "sha256:750dd97acb8cf6d5912082e65cc188acc8d1a737ca495e0ee7a33aa1d06484c2"}, + {file = "htmldate-1.9.0.tar.gz", hash = "sha256:90bc3c66cbb49be21888f54b9a20c0b6739497399a87789e64247fc4e04c292f"}, ] [package.dependencies] -charset-normalizer = {version = ">=3.3.2", markers = "python_version >= \"3.7\""} +charset-normalizer = ">=3.3.2" dateparser = ">=1.1.2" -lxml = {version = ">=4.9.4,<6", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} +lxml = {version = ">=5.2.2,<6", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} python-dateutil = ">=2.8.2" -urllib3 = {version = ">=1.26,<3", markers = "python_version >= \"3.7\""} +urllib3 = ">=1.26,<3" [package.extras] -all = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urllib3[brotli]"] +all = ["htmldate[dev]", "htmldate[speed]"] +dev = ["black", "mypy", "pytest", "pytest-cov", "types-dateparser", "types-lxml", "types-python-dateutil", "types-urllib3"] speed = ["backports-datetime-fromisoformat", "faust-cchardet (>=2.1.19)", "urllib3[brotli]"] [[package]] @@ -796,13 +817,13 @@ idna = ">=2.5" [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] @@ -822,18 +843,21 @@ six = "*" [[package]] name = "incremental" -version = "22.10.0" -description = "\"A small library that versions your Python projects.\"" +version = "24.7.2" +description = "A small library that versions your Python projects." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, - {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, + {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, + {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, ] +[package.dependencies] +setuptools = ">=61.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} + [package.extras] -mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] -scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] +scripts = ["click (>=6.0)"] [[package]] name = "iniconfig" @@ -961,153 +985,149 @@ test = ["pytest", "pytest-cov"] [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.dependencies] @@ -1118,17 +1138,17 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "lxml-html-clean" -version = "0.1.1" +version = "0.2.0" description = "HTML cleaner from lxml project" optional = false python-versions = "*" files = [ - {file = "lxml_html_clean-0.1.1-py3-none-any.whl", hash = "sha256:58c04176593c9caf72ec92e033d2f38859e918b3eff0cc0f8051ad27dc2ab8ef"}, - {file = "lxml_html_clean-0.1.1.tar.gz", hash = "sha256:8a644ed01dbbe132fabddb9467f077f6dad12a1d4f3a6a553e280f3815fa46df"}, + {file = "lxml_html_clean-0.2.0-py3-none-any.whl", hash = "sha256:80bdc730b288b8e68f0bf86b99f4bbef129c5ec59b694c6681422be4c1eeb3c5"}, + {file = "lxml_html_clean-0.2.0.tar.gz", hash = "sha256:47c323f39d95d4cbf4956da62929c89a79313074467efaa4821013c97bf95628"}, ] [package.dependencies] @@ -1490,13 +1510,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" -version = "2.12.0" +version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, - {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -1510,6 +1530,129 @@ files = [ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +[[package]] +name = "pydantic" +version = "2.8.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.20.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pydispatcher" version = "2.0.7" @@ -1554,17 +1697,17 @@ files = [ [[package]] name = "pyopenssl" -version = "24.1.0" +version = "24.2.1" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, - {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, + {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, + {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, ] [package.dependencies] -cryptography = ">=41.0.5,<43" +cryptography = ">=41.0.5,<44" [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] @@ -1572,13 +1715,13 @@ test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -1596,19 +1739,19 @@ files = [ [[package]] name = "pyrdfa3" -version = "3.6.2" +version = "3.6.4" description = "pyRdfa distiller/parser library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyRdfa3-3.6.2-py3-none-any.whl", hash = "sha256:290c2fa966ddd1b45ac94a727da144f5a233ed58c63c370e3d68e6d00b0dee5d"}, - {file = "pyRdfa3-3.6.2.tar.gz", hash = "sha256:73681dab957f60901696767388b956a5769c730bc451da6ffb2f0e36f18314c2"}, + {file = "pyRdfa3-3.6.4-py3-none-any.whl", hash = "sha256:ed11affa5567ab7afdbc939a58f9286a274447f3ab2999c260c56b5c6e87fb2f"}, + {file = "pyrdfa3-3.6.4.tar.gz", hash = "sha256:64712d1a4bf21829652b39715bada6e7c03bcf19cb49f962c190a38f46172243"}, ] [package.dependencies] html5lib = ">=1.1" -rdflib = ">=6.1.1" -requests = ">=2.25.1" +rdflib = ">=7.0.0" +requests = ">=2.32.3" [[package]] name = "pytest" @@ -1705,90 +1848,90 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "regex" -version = "2024.5.15" +version = "2024.7.24" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, - {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, - {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, - {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, - {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, - {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, - {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, - {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, - {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, - {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, - {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, - {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, - {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, - {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, - {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, - {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, - {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, - {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, - {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, - {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, - {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, - {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, - {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, - {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, - {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, - {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, - {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, + {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, + {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, + {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, + {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, + {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, + {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, + {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, + {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, + {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, + {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, + {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, + {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, + {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, + {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, + {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, + {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, + {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, + {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, + {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, + {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, + {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, + {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, + {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, + {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, + {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, + {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, ] [[package]] @@ -1895,18 +2038,19 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "70.1.1" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, - {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "six" @@ -1932,24 +2076,24 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sqlparse" -version = "0.5.0" +version = "0.5.1" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" files = [ - {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, - {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, + {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, + {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, ] [package.extras] @@ -2025,13 +2169,13 @@ gui = ["Gooey (>=1.0.1)"] [[package]] name = "twisted" -version = "24.3.0" +version = "24.7.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" files = [ - {file = "twisted-24.3.0-py3-none-any.whl", hash = "sha256:039f2e6a49ab5108abd94de187fa92377abe5985c7a72d68d0ad266ba19eae63"}, - {file = "twisted-24.3.0.tar.gz", hash = "sha256:6b38b6ece7296b5e122c9eb17da2eeab3d98a198f50ca9efd00fb03e5b4fd4ae"}, + {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, + {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, ] [package.dependencies] @@ -2039,53 +2183,24 @@ attrs = ">=21.3.0" automat = ">=0.8.0" constantly = ">=15.1" hyperlink = ">=17.1.1" -incremental = ">=22.10.0" -twisted-iocpsupport = {version = ">=1.0.2,<2", markers = "platform_system == \"Windows\""} +incremental = ">=24.7.0" typing-extensions = ">=4.2.0" zope-interface = ">=5" [package.extras] -all-non-platform = ["twisted[conch,http2,serial,test,tls]", "twisted[conch,http2,serial,test,tls]"] +all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] -dev = ["coverage (>=6b1,<7)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "twisted[dev-release]", "twistedchecker (>=0.7,<1.0)"] +dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] -gtk-platform = ["pygobject", "pygobject", "twisted[all-non-platform]", "twisted[all-non-platform]"] +gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "twisted[all-non-platform]", "twisted[all-non-platform]"] -mypy = ["mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "twisted[all-non-platform,dev]", "types-pyopenssl", "types-setuptools"] -osx-platform = ["twisted[macos-platform]", "twisted[macos-platform]"] +macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] +osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["pywin32 (!=226)", "pywin32 (!=226)", "twisted[all-non-platform]", "twisted[all-non-platform]"] - -[[package]] -name = "twisted-iocpsupport" -version = "1.0.4" -description = "An extension for use in the twisted I/O Completion Ports reactor." -optional = false -python-versions = "*" -files = [ - {file = "twisted-iocpsupport-1.0.4.tar.gz", hash = "sha256:858096c0d15e33f15ac157f455d8f86f2f2cdd223963e58c0f682a3af8362d89"}, - {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win32.whl", hash = "sha256:afa2b630797f9ed2f27f3d9f55e3f72b4244911e45a8c82756f44babbf0b243e"}, - {file = "twisted_iocpsupport-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:0058c963c8957bcd3deda62122e89953c9de1e867a274facc9b15dde1a9f31e8"}, - {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win32.whl", hash = "sha256:196f7c7ccad4ba4d1783b1c4e1d1b22d93c04275cd780bf7498d16c77319ad6e"}, - {file = "twisted_iocpsupport-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:4e5f97bcbabdd79cbaa969b63439b89801ea560f11d42b0a387634275c633623"}, - {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win32.whl", hash = "sha256:6081bd7c2f4fcf9b383dcdb3b3385d75a26a7c9d2be25b6950c3d8ea652d2d2d"}, - {file = "twisted_iocpsupport-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:76f7e67cec1f1d097d1f4ed7de41be3d74546e1a4ede0c7d56e775c4dce5dfb0"}, - {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win32.whl", hash = "sha256:3d306fc4d88a6bcf61ce9d572c738b918578121bfd72891625fab314549024b5"}, - {file = "twisted_iocpsupport-1.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:391ac4d6002a80e15f35adc4ad6056f4fe1c17ceb0d1f98ba01b0f4f917adfd7"}, - {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win32.whl", hash = "sha256:0c1b5cf37f0b2d96cc3c9bc86fff16613b9f5d0ca565c96cf1f1fb8cfca4b81c"}, - {file = "twisted_iocpsupport-1.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:3c5dc11d72519e55f727320e3cee535feedfaee09c0f0765ed1ca7badff1ab3c"}, - {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win32.whl", hash = "sha256:cc86c2ef598c15d824a243c2541c29459881c67fc3c0adb6efe2242f8f0ec3af"}, - {file = "twisted_iocpsupport-1.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c27985e949b9b1a1fb4c20c71d315c10ea0f93fdf3ccdd4a8c158b5926edd8c8"}, - {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win32.whl", hash = "sha256:e311dfcb470696e3c077249615893cada598e62fa7c4e4ca090167bd2b7d331f"}, - {file = "twisted_iocpsupport-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:4574eef1f3bb81501fb02f911298af3c02fe8179c31a33b361dd49180c3e644d"}, - {file = "twisted_iocpsupport-1.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:872747a3b64e2909aee59c803ccd0bceb9b75bf27915520ebd32d69687040fa2"}, - {file = "twisted_iocpsupport-1.0.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:c2712b778bacf1db434e3e065adfed3db300754186a29aecac1efae9ef4bcaff"}, - {file = "twisted_iocpsupport-1.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c66fa0aa4236b27b3c61cb488662d85dae746a6d1c7b0d91cf7aae118445adf"}, - {file = "twisted_iocpsupport-1.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:300437af17396a945a58dcfffd77863303a8b6d9e65c6e81f1d2eed55b50d444"}, -] +windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] [[package]] name = "typing-extensions" @@ -2206,47 +2321,45 @@ files = [ [[package]] name = "zope-interface" -version = "6.4.post2" +version = "7.0.2" description = "Interfaces for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"}, - {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"}, - {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"}, - {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"}, - {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"}, - {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"}, - {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"}, - {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"}, - {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"}, - {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"}, - {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"}, - {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"}, - {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"}, - {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"}, - {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"}, - {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"}, - {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"}, - {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"}, - {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"}, - {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"}, - {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"}, - {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"}, - {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"}, - {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"}, + {file = "zope.interface-7.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:093ab9a2c5105d826755c43a76770b69353dbe95ec27a0b5e88ab4f63d7744b8"}, + {file = "zope.interface-7.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3246cccb9e4ce34c9b32ad55a53098043af5e7185623bf5de8e6ec5d8e71415e"}, + {file = "zope.interface-7.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375258373bc3879a6c509281487063cf14add7129fc867eb1c287c0db46ca007"}, + {file = "zope.interface-7.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf2746931a6f83370fdc4005dbea4e39e3a3d0333da42897040698c1ff282e9c"}, + {file = "zope.interface-7.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deac72b653817a68b96079c1428ae84860c76f653af03668a02f97b74f8a465b"}, + {file = "zope.interface-7.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:9da2fb807a20cd4fe381e23e2f906f0a0f4acece6d9abac65d5fc0a1f8383ed8"}, + {file = "zope.interface-7.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f5e39373952e1d689476b6e43d779553b165ce332d0fde9c36d9b095f28d052"}, + {file = "zope.interface-7.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:13aacff95c59000ecd562d9717a87eca8211f6bc74bea6b8ca68e742d1f8f13d"}, + {file = "zope.interface-7.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67e0ff3f2e02d6131535956b22795be2ec5af6762f4fe682f67eb723fbc16273"}, + {file = "zope.interface-7.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c47a5068df03f0c9215d3525b166c9d8d4f6d03cbe4e60339818f8c393e3e3e"}, + {file = "zope.interface-7.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceaf7ee95735b0d6ac3f5bba0209d056e686999732dc32bd463a53d4488ccdb"}, + {file = "zope.interface-7.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dd28ba1e2deb0c339881ee7755db649433347bdf3c4f3d885f029fcf10aacdf7"}, + {file = "zope.interface-7.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4b671f943d6487d6f1a6bbdce3faffae35e4f74f98ac9b865d2b7370cb6b0bd3"}, + {file = "zope.interface-7.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b76f6048c1a334e26e5d46fdb4f327d9e7e6b348ad607ee9fdce9c7325b5a635"}, + {file = "zope.interface-7.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40beb36330ef00d2cd50d212a0c74ecd57042b8c8b2b6ebd6247cc98f9808824"}, + {file = "zope.interface-7.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0821efcbdeaf48e12c66b0d19a1f9edec2ed22697ab8885d322c8f82fe5bc892"}, + {file = "zope.interface-7.0.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ab785a7af39c6968385a9d39b712d2263661fa3780bd38efec0cefdbb84036"}, + {file = "zope.interface-7.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e299f672cfad3392b097af885a552a51e60d3b44e8572f1401e87f863f8986b4"}, + {file = "zope.interface-7.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e5bf8de5a51aaeddd5e1d1c0ac0ca4f995a4f5a832abdc08bb8fbae25ac660"}, + {file = "zope.interface-7.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:185ef3a7a01fac1151622579a08995aab66590711c1a4f9b605f88129229dba1"}, + {file = "zope.interface-7.0.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91bb6b5e1a158b751e12458d5618c1af42eb3dc8472b87a613d543d9fb7660e0"}, + {file = "zope.interface-7.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f77d58cfc3af86d062b8cfa7194db74ca78a615d66bbd23b251bad1b1ecf9818"}, + {file = "zope.interface-7.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ab142cebe69e0f72bf892da040af97f61fd03c09a23ae2fc7de3ab576c5d4cd"}, + {file = "zope.interface-7.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba16bb2214f671e29b75f16d7b8b0bb1f75cdc8bce06979fdbf638edf6531586"}, + {file = "zope.interface-7.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28f29dd42819d99682e46a8d3cc2ee60461a77554d4320e0e8a37363f04208e0"}, + {file = "zope.interface-7.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:797510df26b82cf619a894dac4ff4036d11f6340bec0287c89cecb0b1b1c429e"}, + {file = "zope.interface-7.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:b9d865209cc9795d0f9f4f63b87a86e7a9e032d3cbbb10b1c13bf27343a4fc54"}, + {file = "zope.interface-7.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:08d86319fd7542984d4c0ef7865759dab58616154cb237a5a1ce758687255de0"}, + {file = "zope.interface-7.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:22c93492e5d2f09100a4a23cf709b20f0305cdbbad14f9af2f6e9311742bed8e"}, + {file = "zope.interface-7.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d20fa14f0f8682ad37a6552712e4493cfb35d66c5fb4f8052af3a50ae6cd4f77"}, + {file = "zope.interface-7.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:356a9c1c8cfece776f54806157057be759d812168395762f47f046b40901e974"}, + {file = "zope.interface-7.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738de1c72390a2caf543247013f617ed15d272e4c19731a998e81dd5a2379f1c"}, + {file = "zope.interface-7.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:8eab70e404c2416176b4630914cda275ca95678529e54e66ea45d1a0be422994"}, + {file = "zope.interface-7.0.2.tar.gz", hash = "sha256:f1146bb27a411d0d40cc0e88182a6b0e979d68ab526c8e5ae9e27c06506ed017"}, ] [package.dependencies] @@ -2260,4 +2373,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "9bca50ae86d8d0c367af2f591dc4d01c882e29076e2708ce03e8355e5bb49737" +content-hash = "1ea50a9b0e3f78464295e2b405a3de2a1d808b7db8f375239b508c447f455ecf" diff --git a/pyproject.toml b/pyproject.toml index a76b6fc2..f6b620e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,6 +91,12 @@ langcodes = {extras = ["data"], version = "^3.3.0"} httpx = "0.27" async-lru = "2.0.4" +[tool.poetry.group.edu_sharing_client.dependencies] +# these dependencies are used (and automatically generated) by the "openapi-generator-cli"-generated client +# see: /edu_sharing_openapi/pyproject.toml +pydantic = ">=2.8.2" +typing-extensions = ">=4.12.2" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt index d29056ba..f61c20f0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,24 +1,24 @@ anyio==4.4.0 ; python_version >= "3.10" and python_version < "4.0" asgiref==3.8.1 ; python_version >= "3.10" and python_version < "4.0" async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" -attrs==23.2.0 ; python_version >= "3.10" and python_version < "4.0" -automat==22.10.0 ; python_version >= "3.10" and python_version < "4.0" +attrs==24.2.0 ; python_version >= "3.10" and python_version < "4.0" +automat==24.8.1 ; python_version >= "3.10" and python_version < "4.0" babel==2.15.0 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" black==24.4.2 ; python_version >= "3.10" and python_version < "4.0" certifi==2024.6.2 ; python_version >= "3.10" and python_version < "4.0" -cffi==1.16.0 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" +cffi==1.17.0 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" -courlan==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -cryptography==42.0.8 ; python_version >= "3.10" and python_version < "4.0" +courlan==1.3.0 ; python_version >= "3.10" and python_version < "4.0" +cryptography==43.0.0 ; python_version >= "3.10" and python_version < "4.0" cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" dateparser==1.2.0 ; python_version >= "3.10" and python_version < "4.0" defusedxml==0.7.1 ; python_version >= "3.10" and python_version < "4.0" -django==5.0.6 ; python_version >= "3.10" and python_version < "4.0" -exceptiongroup==1.2.1 ; python_version >= "3.10" and python_version < "3.11" +django==5.1 ; python_version >= "3.10" and python_version < "4.0" +exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" extruct==0.17.0 ; python_version >= "3.10" and python_version < "4.0" filelock==3.15.4 ; python_version >= "3.10" and python_version < "4.0" flake8==7.1.0 ; python_version >= "3.10" and python_version < "4.0" @@ -27,13 +27,13 @@ h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" html-text==0.6.2 ; python_version >= "3.10" and python_version < "4.0" html2text==2024.2.26 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" -htmldate==1.8.1 ; python_version >= "3.10" and python_version < "4.0" +htmldate==1.9.0 ; python_version >= "3.10" and python_version < "4.0" httpcore==1.0.5 ; python_version >= "3.10" and python_version < "4.0" httpx==0.27.0 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" -idna==3.7 ; python_version >= "3.10" and python_version < "4.0" +idna==3.8 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" -incremental==22.10.0 ; python_version >= "3.10" and python_version < "4.0" +incremental==24.7.2 ; python_version >= "3.10" and python_version < "4.0" iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" isodate==0.6.1 ; python_version >= "3.10" and python_version < "4.0" itemadapter==0.9.0 ; python_version >= "3.10" and python_version < "4.0" @@ -43,9 +43,9 @@ jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" justext==3.0.1 ; python_version >= "3.10" and python_version < "4.0" langcodes[data]==3.4.0 ; python_version >= "3.10" and python_version < "4.0" language-data==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -lxml-html-clean==0.1.1 ; python_version >= "3.10" and python_version < "4.0" -lxml==5.2.2 ; python_version >= "3.10" and python_version < "4.0" -lxml[html-clean]==5.2.2 ; python_version >= "3.10" and python_version < "4.0" +lxml-html-clean==0.2.0 ; python_version >= "3.10" and python_version < "4.0" +lxml==5.3.0 ; python_version >= "3.10" and python_version < "4.0" +lxml[html-clean]==5.3.0 ; python_version >= "3.10" and python_version < "4.0" marisa-trie==1.2.0 ; python_version >= "3.10" and python_version < "4.0" mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" mf2py==2.0.1 ; python_version >= "3.10" and python_version < "4.0" @@ -60,38 +60,37 @@ pluggy==1.5.0 ; python_version >= "3.10" and python_version < "4.0" protego==0.3.1 ; python_version >= "3.10" and python_version < "4.0" pyasn1-modules==0.4.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1==0.6.0 ; python_version >= "3.10" and python_version < "4.0" -pycodestyle==2.12.0 ; python_version >= "3.10" and python_version < "4.0" +pycodestyle==2.12.1 ; python_version >= "3.10" and python_version < "4.0" pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" pyee==11.1.0 ; python_version >= "3.10" and python_version < "4.0" pyflakes==3.2.0 ; python_version >= "3.10" and python_version < "4.0" -pyopenssl==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -pyparsing==3.1.2 ; python_version >= "3.10" and python_version < "4.0" +pyopenssl==24.2.1 ; python_version >= "3.10" and python_version < "4.0" +pyparsing==3.1.4 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" -pyrdfa3==3.6.2 ; python_version >= "3.10" and python_version < "4.0" +pyrdfa3==3.6.4 ; python_version >= "3.10" and python_version < "4.0" pytest==8.2.2 ; python_version >= "3.10" and python_version < "4.0" python-dateutil==2.9.0.post0 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" pytz==2024.1 ; python_version >= "3.10" and python_version < "4.0" queuelib==1.7.0 ; python_version >= "3.10" and python_version < "4.0" rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" -regex==2024.5.15 ; python_version >= "3.10" and python_version < "4.0" +regex==2024.7.24 ; python_version >= "3.10" and python_version < "4.0" requests-file==2.1.0 ; python_version >= "3.10" and python_version < "4.0" requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" scrapy==2.11.2 ; python_version >= "3.10" and python_version < "4.0" service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==70.1.1 ; python_version >= "3.10" and python_version < "4.0" +setuptools==73.0.1 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" -soupsieve==2.5 ; python_version >= "3.10" and python_version < "4.0" -sqlparse==0.5.0 ; python_version >= "3.10" and python_version < "4.0" +soupsieve==2.6 ; python_version >= "3.10" and python_version < "4.0" +sqlparse==0.5.1 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" tldextract==5.1.2 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" trafilatura==1.11.0 ; python_version >= "3.10" and python_version < "4.0" -twisted-iocpsupport==1.0.4 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" -twisted==24.3.0 ; python_version >= "3.10" and python_version < "4.0" +twisted==24.7.0 ; python_version >= "3.10" and python_version < "4.0" typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" @@ -101,4 +100,4 @@ w3lib==2.2.1 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" wheel==0.43.0 ; python_version >= "3.10" and python_version < "4.0" xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" -zope-interface==6.4.post2 ; python_version >= "3.10" and python_version < "4.0" +zope-interface==7.0.2 ; python_version >= "3.10" and python_version < "4.0" From cf81695a7b75d41d0ecc6c427ede571d68097111 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 28 Aug 2024 09:59:44 +0200 Subject: [PATCH 542/590] DMED-119 - add `test_spider` and revert some of the changes to be more aligned with `master` branch --- converter/env.py | 1 - converter/es_connector.py | 35 +- converter/items.py | 42 +-- converter/pipelines.py | 13 +- converter/settings.py | 6 +- converter/spiders/base_classes/lom_base.py | 10 - converter/spiders/fwu_spider.py | 241 -------------- .../spiders/mediothek_pixiothek_spider.py | 310 ++---------------- converter/spiders/merlin_spider.py | 214 ++++-------- converter/spiders/oeh_spider.py | 26 +- converter/spiders/test.py | 217 ++++++++++++ schulcloud/oeh_importer.py | 10 - 12 files changed, 311 insertions(+), 814 deletions(-) delete mode 100644 converter/spiders/fwu_spider.py create mode 100644 converter/spiders/test.py diff --git a/converter/env.py b/converter/env.py index 8fe05cab..0c8450e7 100644 --- a/converter/env.py +++ b/converter/env.py @@ -3,7 +3,6 @@ from dotenv import load_dotenv from typing import NoReturn - load_dotenv() diff --git a/converter/es_connector.py b/converter/es_connector.py index 76023b2e..3ca0700b 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -17,7 +17,6 @@ from converter import env from converter.constants import Constants -from converter.spiders.utils.spider_name_converter import get_spider_friendly_name from edu_sharing_client import ABOUTApi from edu_sharing_client.api.bulk_v1_api import BULKV1Api from edu_sharing_client.api.iam_v1_api import IAMV1Api @@ -102,7 +101,7 @@ class CreateGroupType(Enum): MediaCenter = 2 cookie: str = None - resetVersion: bool = True + resetVersion: bool = False version: any apiClient: ESApiClient aboutApi: ABOUTApi @@ -145,7 +144,6 @@ def sync_node(self, spider, type, properties): group=spider.name, group_by=groupBy, reset_version=EduSharing.resetVersion, - _request_timeout=30 # prevents crawlers from hanging ) except ApiException as e: # ToDo: @@ -369,11 +367,8 @@ def map_license(self, spaces, license): def transform_item(self, uuid, spider, item): spaces = { "ccm:replicationsource": spider.name, - "ccm:replicationsource_DISPLAYNAME": get_spider_friendly_name(spider.name), "ccm:replicationsourceid": item["sourceId"], "ccm:replicationsourcehash": item["hash"], - "ccm:replicationsourcedisplayname": get_spider_friendly_name(spider.name), - "ccm:objecttype": item["type"], "ccm:replicationsourceuuid": uuid, "cm:name": item["lom"]["general"]["title"], "ccm:wwwurl": item["lom"]["technical"]["location"][0] if "location" in item["lom"]["technical"] else None, @@ -383,12 +378,6 @@ def transform_item(self, uuid, spider, item): if "aggregationLevel" in item["lom"]["general"] else None, "cclom:title": item["lom"]["general"]["title"], - "cclom:aggregationlevel": str(item["lom"]["general"]["aggregationLevel"]), - - # TODO: HPI LEGACY REMOVE (3 lines) - "ccm:replicationsource_DISPLAYNAME": get_spider_friendly_name(spider.name), - "ccm:hpi_lom_general_aggregationlevel": str(item["lom"]["general"]["aggregationLevel"]), - "ccm:hpi_searchable": str(item["searchable"]), } if "identifier" in item["lom"]["general"]: spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] @@ -397,10 +386,7 @@ def transform_item(self, uuid, spider, item): if "status" in item: spaces["ccm:editorial_state"] = item["status"] if "origin" in item: - spaces["ccm:replicationsourceorigin"] = item["origin"] # TODO currently not mapped in edu-sharing - spaces["ccm:replicationsourceorigindisplayname"] = get_spider_friendly_name(item["origin"]) - # TODO: HPI LEGACY REMOVE (1 line) - spaces["ccm:replicationsourceorigin_DISPLAYNAME"] = get_spider_friendly_name(item["origin"]) + spaces["ccm:replicationsourceorigin"] = item["origin"] # TODO currently not mapped in edu-sharing if hasattr(spider, "edu_sharing_source_template_whitelist"): # check if there were whitelisted metadata properties in the edu-sharing source template @@ -567,23 +553,6 @@ def transform_item(self, uuid, spider, item): if not type(spaces[key]) is list: spaces[key] = [spaces[key]] - # Relation information, according to the LOM-DE.doc#7 specifications: http://sodis.de/lom-de/LOM-DE.doc - if "relation" in item["lom"]: - spaces["ccm:hpi_lom_relation"] = item["lom"]["relation"] - # Since Edu-Sharing has no further information about the schema of this attribute it is better to treat it - # as a list of strings and not as a JSON. - for i, element in enumerate(spaces["ccm:hpi_lom_relation"]): - # JSON expects double quotes. - element_str = str(element).replace("\'", "\"") - # JSON to Python dictionary - element_dict = json.loads(element_str) - - # We expect and prefer single quotes in the result. - relation_value = json.dumps(element_dict, sort_keys=True).replace("\"", "\'") - # Remove redundant white spaces. - relation_value = ' '.join(relation_value.split()) - spaces["ccm:hpi_lom_relation"][i] = relation_value - return spaces def create_groups_if_not_exists(self, groups, type: CreateGroupType): diff --git a/converter/items.py b/converter/items.py index c54d19c9..ea0d08eb 100644 --- a/converter/items.py +++ b/converter/items.py @@ -193,26 +193,6 @@ class LomClassificationItem(Item): taxonPath = Field(output_processor=JoinMultivalues()) # ToDo: LOM classification 'taxonPath' has no equivalent property in edu-sharing, might be obsolete -class LomRelationResourceItem(Item): - identifier = Field(output_processor=JoinMultivalues()) - catalog = Field() - entry = Field() - description = Field() - -class LomRelationItem(Item): - """ - Following the LOM-DE.doc#7 (Relation) specifications: http://sodis.de/lom-de/LOM-DE.doc . - """ - kind = Field() - resource = Field(serializer=LomRelationResourceItem) - -class LomAnnotationItem(Item): - """ - Following the LOM-DE.doc#8 (Annotation) specifications: http://sodis.de/lom-de/LOM-DE.doc . - """ - entity = Field() - date = Field() - description = Field() class LomBaseItem(Item): """ @@ -226,7 +206,6 @@ class LomBaseItem(Item): lifecycle = Field(serializer=LomLifecycleItem, output_processor=JoinMultivalues()) # rights = Field(serializer=LomRightsItem) technical = Field(serializer=LomTechnicalItem) - relation = Field(serializer=LomRelationItem, output_processor=JoinMultivalues()) class ResponseItem(Item): @@ -339,6 +318,7 @@ class PermissionItem(Item): public = Field() """Determines if this item should be 'public' (= accessible by anyone)""" + class BaseItem(Item): """ BaseItem provides the basic data structure for any crawled item. @@ -475,27 +455,7 @@ class LomClassificationItemLoader(ItemLoader): default_item_class = LomClassificationItem default_output_processor = TakeFirst() -class LomRelationResourceItemLoader(ItemLoader): - default_item_class = LomRelationResourceItem - default_output_processor = TakeFirst() - -class LomRelationItemLoader(ItemLoader): - default_item_class = LomRelationItem - default_output_processor = TakeFirst() - class PermissionItemLoader(ItemLoader): default_item_class = PermissionItem default_output_processor = TakeFirst() - -class LomRelationResourceItemLoader(ItemLoader): - default_item_class = LomRelationResourceItem - default_output_processor = TakeFirst() - -class LomRelationItemLoader(ItemLoader): - default_item_class = LomRelationItem - default_output_processor = TakeFirst() - -class LomAnnotationItemLoader(ItemLoader): - default_item_class = LomAnnotationItem - default_output_processor = TakeFirst() \ No newline at end of file diff --git a/converter/pipelines.py b/converter/pipelines.py index 9a53644c..30f0b98c 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -319,20 +319,12 @@ def process_item(self, raw_item, spider): mapped = [] for entry in json[key]: _id = {} - valuespace: list[dict] = self.valuespaces.data[key] + valuespace = self.valuespaces.data[key] found = False for v in valuespace: labels = list(v["prefLabel"].values()) if "altLabel" in v: - # the Skohub update on 2024-04-19 generates altLabels as a list[str] per language ("de", "en) - # (for details, see: https://github.com/openeduhub/oeh-metadata-vocabs/pull/65) - alt_labels: list[list[str]] = list(v["altLabel"].values()) - if alt_labels and isinstance(alt_labels, list): - for alt_label in alt_labels: - if alt_label and isinstance(alt_label, list): - labels.extend(alt_label) - if alt_label and isinstance(alt_label, str): - labels.append(alt_label) + labels = labels + list(v["altLabel"].values()) labels = list(map(lambda x: x.casefold(), labels)) if v["id"].endswith(entry) or entry.casefold() in labels: _id = v["id"] @@ -556,7 +548,6 @@ async def process_item(self, raw_item, spider): log.warning(f"Could not read thumbnail at {url}: {str(e)} (falling back to screenshot)") raise e if "thumbnail" in item: - logging.warn("(falling back to " + ("defaultThumbnail" if "defaultThumbnail" in item else "screenshot") + ")") del item["thumbnail"] return await self.process_item(raw_item, spider) else: diff --git a/converter/settings.py b/converter/settings.py index 985d99db..a839a3ea 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -79,10 +79,6 @@ # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs DOWNLOAD_DELAY = 0 - -# Configure a delay between the parsing executions. (default: 0) -PARSE_DELAY = 0 - # The download delay setting will honor only one of: # CONCURRENT_REQUESTS_PER_DOMAIN = 16 # CONCURRENT_REQUESTS_PER_IP = 16 @@ -191,7 +187,7 @@ # "response", "sourceId", # Too much clutter - "thumbnail", + # "thumbnail", "type", "uuid", "valuespaces", diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 7dbb7cbe..e9e036bb 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -94,12 +94,6 @@ async def parse(self, response): if self.getId(response) is not None and self.getHash(response) is not None: if not self.hasChanged(response): return None - - # Avoid stressing the servers across calls of this method. - settings = get_project_settings() - if "PARSE_DELAY" in settings and float(settings.get('PARSE_DELAY')) > 0: - time.sleep(float(settings.get("PARSE_DELAY"))) - main = self.getBase(response) main.add_value("lom", self.getLOM(response).load_item()) main.add_value("valuespaces", self.getValuespaces(response).load_item()) @@ -159,7 +153,6 @@ def getLOM(self, response) -> LomBaseItemloader: lom.add_value("technical", self.getLOMTechnical(response).load_item()) lom.add_value("educational", self.getLOMEducational(response).load_item()) lom.add_value("classification", self.getLOMClassification(response).load_item()) - lom.add_value("relation", self.getLOMRelation(response).load_item()) return lom def getBase(self, response=None) -> BaseItemLoader: @@ -190,9 +183,6 @@ def getLicense(self, response=None) -> LicenseItemLoader: def getLOMClassification(self, response=None) -> LomClassificationItemLoader: return LomClassificationItemLoader(response=response) - def getLOMRelation(self, response=None) -> LomRelationItemLoader: - return LomRelationItemLoader(response=response) - def getPermissions(self, response=None) -> PermissionItemLoader: permissions = PermissionItemLoader(response=response) # default all materials to public, needs to be changed depending on the spider! diff --git a/converter/spiders/fwu_spider.py b/converter/spiders/fwu_spider.py deleted file mode 100644 index 297fb8ba..00000000 --- a/converter/spiders/fwu_spider.py +++ /dev/null @@ -1,241 +0,0 @@ -import json -import datetime -import sys -import time -import boto3 -from bs4 import BeautifulSoup - -import requests -import scrapy.http -from scrapy.spiders import CrawlSpider -from scrapy.spidermiddlewares.httperror import HttpError - -import converter.env as env -from converter.constants import Constants -from converter.items import * -from converter.spiders.base_classes.lom_base import LomBase - - -class FWUSpider(CrawlSpider, LomBase): - """ - This crawler fetches data from the S3, where the FWU contents are stored. - - For better understanding, please refer to LOM documentation(http://sodis.de/lom-de/LOM-DE.doc) and openduhub / - oeh-search-etl on Github - - Author: Team CaptnEdu - """ - # ToDo: Comment out name - # name = 'fwu_spider' - friendlyName = 'FWU' - version = '0.1' - files_index = [5501191, 5501193, 5501202, 5501207, 5501211, 5501213, 5501219, 5501222, 5501224, 5501225, 5501234, - 5501235, 5501238, 5501239, 5501245, 5501248, 5501252, 5501259, 5501267, 5501454, 5501458, 5501460, - 5501472, 5501478, 5501588, 5501595, 5501597, 5501630, 5501638, 5501649, 5501655, 5501656, 5501657, - 5501665, 5501685, 5511001, 5511002, 5511003, 5511004, 5511005, 5511006, 5511018, 5511019, 5511024, - 5511044, 5511045, 5511050, 5511057, 5511089, 5511093, 5511094, 5511095, 5511098, 5511099, 5511100, - 5511102, 5511106, 5511123, 5511128, 5511138, 5511184, 5511356, 5521211, 5521227, 5521287, 5521289, - 5521310, 5521344, 5521345, 5521348, 5521354, 5521366, 5521370, 5521405, 5521408, 5521411, 5521413, - 5521415, 5521418, 5521427] - - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) - self.s3_url = env.get('S3_ENDPOINT_URL') - self.s3_access_key = env.get('S3_ACCESS_KEY') - self.s3_secret_key = env.get('S3_SECRET_KEY') - self.s3_bucket = env.get('S3_BUCKET_NAME') - self.download_delay = float(env.get('SODIX_DOWNLOAD_DELAY', default='0.5')) - - def start_requests(self): - yield self.make_request() - - def make_request(self): - s3 = boto3.resource( - 's3', - endpoint_url=self.s3_url, - aws_access_key_id=self.s3_access_key, - aws_secret_access_key=self.s3_secret_key, - ) - - fwu_json = {} - - for index in self.files_index: - key = str(index) + '/index.html' - s3_object = s3.Object(bucket_name=self.s3_bucket, key=key) - html_string = s3_object.get()['Body'].read() - title = self.get_data(html_string, 'pname') - description = self.get_data(html_string, 'ptext') - thumbnail_path = self.get_data(html_string, 'player_outer') - - thumbnail_bytes = s3.Object(bucket_name=self.s3_bucket, key=str(index) + '/' + thumbnail_path) - thumbnail = thumbnail_bytes.get()['Body'].read() - - fwu_object = {'title': title, 'description': description, 'thumbnail': thumbnail.hex()} - fwu_json[f'fwu-object-{str(index)}'] = fwu_object - - fwu_json_all = json.dumps(fwu_json) - - # Possible Mock Response - #ToDo: It seems, that in scrapy it is forbidden to send NO URL!!!! - - return fwu_json_all - - def parse(self, response): - json_response = json.loads(response) - print(f'JSON RESPONSE: {json_response}') - # sys.exit("Just stop for the JSON output.") - metadata = json_response['data']['findAllMetadata'] - - # split response metadata into one response per metadata object - for meta_obj in metadata: - response_copy = response.copy() - response_copy.meta['item'] = meta_obj - response_copy._set_body(json.dumps(meta_obj)) - - # In order to transfer data to CSV/JSON - yield LomBase.parse(self, response_copy) - - def getBase(self, response): - # self.item_pos += 1 - # self.log_progress() - metadata = response.meta['item'] - - base = LomBase.getBase(self, response) - base.add_value('thumbnail', metadata['thumbnail']) - # ToDo: Do we need it here? Same on line 184. - base.add_value('origin', 'FWU Institut für Film und Bild in Wissenschaft und Unterricht' - ' gemeinnützige GmbH') - - return base - - def getId(self, response): - metadata = response.meta['item'] - return metadata['id'] - - def getHash(self, response): - return str(self.version) + str(datetime.datetime.now()) - - def mapResponse(self, response): - r = ResponseItemLoader(response=response) - r.add_value('status', response.status) - r.add_value('headers', response.headers) - - return r - - def getLOMEducational(self, response=None): - educational = LomBase.getLOMEducational(self, response) - # ToDo: Is this needed furthermore - educational.add_value('language', 'german') - - return educational - - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - metadata = response.meta['item'] - keywords = ['FWU', metadata['title']] - - general.add_value('aggregationLevel', '1') - general.add_value('title', metadata['title']) - general.add_value('language', 'german') - general.add_value('description', metadata['description']) - general.add_value('keyword', keywords) - - return general - - def getLicense(self, response=None): - license = LomBase.getLicense(self, response) - - # ToDo: Clarify the official license - copyright - license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) - license.replace_value('description', Constants.LICENSE_NONPUBLIC) - - return license - - def getLOMLifecycle(self, response=None) -> LomLifecycleItemloader: - lifecycle = LomBase.getLOMLifecycle(self, response) - - #ToDo: Ask PO for publisher - lifecycle.add_value('role', 'publisher') - lifecycle.add_value('organization', 'FWU Institut für Film und Bild in Wissenschaft und Unterricht' - ' gemeinnützige GmbH') - - return lifecycle - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - #ToDo: Check the right format. Sodix make 'application/pdf' - technical.add_value('format', 'application/pdf') - # ToDo: Add location. S3 URL? Looks like the wwwurl. - technical.add_value('location', '') - # ToDo: Does it make sense to specify the size? - # technical.add_value('size', metadata['media']['size']) - - return technical - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - # ToDo: Is this the right learningResourceType? Clarify with PO. - valuespaces.add_value('learningResourceType', 'http://w3id.org/openeduhub/vocabs/learningResourceType/web_page') - - return valuespaces - - def getLOMAnnotation(self, response=None) -> LomAnnotationItemLoader: - annotation = LomBase.getLOMAnnotation(self, response) - - annotation.add_value('entity', 'crawler') - annotation.add_value('description', 'searchable==1') - - return annotation - - def getLOMRelation(self, response=None) -> LomRelationItemLoader: - relation = LomBase.getLOMRelation(self, response) - - return relation - - def getPermissions(self, response): - permissions = LomBase.getPermissions(self, response) - # ToDo: Managed by the permission script. Add it to the script. - permissions.add_value('autoCreateGroups', True) - permissions.add_value('groups', ['public']) - - return permissions - - def get_data(self, body: str, class_name: str): - if not class_name == "pname" and not class_name == "ptext" and not class_name == "player_outer": - raise RuntimeError( - f'False value "{class_name}" for class_name in get_data(). Options: pname, ptext, player_outer') - - s = BeautifulSoup(body, 'html.parser') - - html_snippet = s.find_all("div", class_=class_name) - html_snippet = str(html_snippet) - - if class_name == "player_outer": - index_start = html_snippet.index("(", 0) + 1 - index_end = html_snippet.index(")", 2) - else: - index_start = html_snippet.index(">", 0) + 1 - index_end = html_snippet.index("<", 2) - - result = html_snippet[index_start:index_end] - result = result.strip() - - if class_name != "player_outer": - self.validate_result(class_name, result) - - return result - - def validate_result(self, class_name: str, result: str): - data_definition = "" - - if class_name == "pname": - data_definition = "Title" - elif class_name == "ptext": - data_definition = "Description" - - if result is None or result == "" or result == " ": - raise RuntimeError(f'{data_definition} not found in class "{class_name}"') - - -class UnexpectedResponseError(Exception): - pass diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index b1ab98f6..79316b46 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -7,15 +7,11 @@ from .base_classes import LomBase -from converter.spiders.base_classes import LomBase - - class MediothekPixiothekSpider(CrawlSpider, LomBase): """ - This crawler fetches data from the Mediothek/Pixiothek. The API request sends all results in one page. The outcome - is an JSON array which will be parsed to their elements. + This crawler fetches data from the Mediothek/Pixiothek. The API request sends all results in one page. The outcome is an JSON array which will be parsed to their elements. - Author: Ioannis Koumarelas, ioannis.koumarelas@gmail.com , Schul-Cloud, Content team. + Author: Timur Yure, timur.yure@capgemini.com , Capgemini for Schul-Cloud, Content team. """ name = "mediothek_pixiothek_spider" @@ -48,6 +44,9 @@ async def parse(self, response: scrapy.http.TextResponse, **kwargs): copy_response.meta["item"] = element yield await LomBase.parse(self, response=copy_response) + # def _if_exists_add(self, edu_dict: dict, element_dict: dict, edu_attr: str, element_attr: str): + # if element_attr in element_dict: + # edu_dict[edu_attr] = element_dict[element_attr] def getId(self, response) -> str: # Element response as a Python dict. @@ -81,8 +80,6 @@ def getBase(self, response): # portal." base.add_value("thumbnail", element_dict["previewImageUrl"]) - base.add_value("searchable", element_dict.get("searchable", "0")) - return base def getLOMGeneral(self, response): @@ -91,10 +88,9 @@ def getLOMGeneral(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - general.add_value("title", element_dict["title"]) - - general.add_value("aggregationLevel", element_dict["aggregation_level"]) - + # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? + # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel + general.add_value("title", element_dict["einzeltitel"]) # self._if_exists_add(general, element_dict, "description", "kurzinhalt") if "kurzinhalt" in element_dict: general.add_value("description", element_dict["kurzinhalt"]) @@ -139,273 +135,25 @@ def getLOMTechnical(self, response): @staticmethod def is_public(element_dict) -> bool: """ - Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, - otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups - and mediacenters accordingly. - """ - permissions = LomBase.getPermissions(self, response) - - def getPermissions(self, response): - """ - Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, - otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups - and mediacenters accordingly. - """ - permissions = LomBase.getPermissions(self, response) - - # Self-explained. Only 1 media center in this case. - permissions.add_value("autoCreateGroups", True) - # permissions.add_value("autoCreateMediacenters", True) - - element_dict = response.meta["item"] - permissions.replace_value('public', False) - if "oeffentlich" in element_dict and element_dict["oeffentlich"] == "0": # private - permissions.add_value('groups', ['Thuringia-private']) - # permissions.add_value('mediacenters', [self.name]) # only 1 mediacenter. - else: - permissions.add_value('groups', ['Thuringia-public']) - - return permissions - - def getLOMRelation(self, response=None) -> LomRelationItemLoader: - """ - Helps implement collections using relations as described in the LOM-DE.doc#7 (Relation) specifications: - http://sodis.de/lom-de/LOM-DE.doc . - """ - relation = LomBase.getLOMRelation(self, response) - - # Element response as a Python dict. - element_dict = response.meta["item"] - - relation.add_value("kind", element_dict["relation"][0]["kind"]) - - resource = LomRelationResourceItem() - resource["identifier"] = element_dict["relation"][0]["resource"]["identifier"] - relation.add_value("resource", resource) - - return relation - - def prepare_collections(self, prepared_elements): - """ - Prepares Mediothek and Pixiothek collections according to their strategies. - """ - mediothek_elements = [] - pixiothek_elements = [] - for element_dict in prepared_elements: - if element_dict["pixiothek"] == "1": - pixiothek_elements.append(element_dict) - else: - mediothek_elements.append(element_dict) - - pixiothek_elements_grouped, mediothek_elements = \ - self.group_pixiothek_elements(pixiothek_elements, mediothek_elements) - - mediothek_elements_grouped = self.group_mediothek_elements(mediothek_elements) - - collection_elements = [] - collection_elements.extend(pixiothek_elements_grouped) - collection_elements.extend(mediothek_elements_grouped) - - return collection_elements - - def group_by_elements(self, elements, group_by): - """ - This method groups the corresponding elements based on the provided group_by parameter. This changes the logic - so that every element in the end maps to an educational element in the https://www.schulportal-thueringen.de. - """ - groups = {} - for idx, element in enumerate(elements): - if group_by not in element: - logging.debug("Element " + str(element["id"]) + " does not contain information about " + group_by) - continue - group_by_value = element[group_by] - if group_by_value not in groups: - groups[group_by_value] = [] - groups[group_by_value].append(element) - - # For consistency sort all values per key. - for key in groups.keys(): - groups[key] = sorted(groups[key], key=lambda x: int(x["id"])) - - return groups - - def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements): - """ - Collection elements in Pixiothek have a "parent" (representative) Mediothek element that describes the whole - collection. Our task in this method is for every Pixiothek group to find its Mediothek element and add the - connections between it and the Pixiothek elements. These Mediothek elements will not be considered as children - of Mediothek collections. - - If we cannot find such a "parent" element among the Mediothek elements, then we select one of them as the - collection parent (representative element) and set some of its attributes accordingly. - """ - - default_download_url = "https://www.schulportal-thueringen.de/html/images/" \ - "themes/tsp2/startseite/banner_phone_startseite.jpg?id=" - - mediothek_default_download_url = "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" - - pixiothek_elements_grouped_by = self.group_by_elements(pixiothek_elements, "serientitel") - - # Group Mediothek elements by einzeltitel. We are going to use this dictionary in the following loop to find - # Pixiothek items that have this value in their serientitel. - mediothek_elements_grouped_by_einzeltitel = self.group_by_elements(mediothek_elements, "einzeltitel") - - single_element_collection_serientitel = "Mediensammlungen zur freien Verwendung im Bildungsbereich" - - collection_elements = [] - - edusharing = EduSharing() - - # Keeping track of "parent" (representative) elements to remove them from the Mediothek elements. - parent_mediothek_elements = set() - - # Generate new "representative" (parent) element. - for group_by_key in sorted(pixiothek_elements_grouped_by.keys()): - group = pixiothek_elements_grouped_by[group_by_key] - serientitel = None - if "serientitel" in group[0]: - serientitel = group[0]["serientitel"] - - # If a single Mediothek element exists with the same einzeltitel as this group's serientitel, then we shall use it - # as the parent element of this collection. - if serientitel in mediothek_elements_grouped_by_einzeltitel and \ - len(mediothek_elements_grouped_by_einzeltitel[serientitel]) == 1 and \ - mediothek_elements_grouped_by_einzeltitel[serientitel][0]["id"] not in parent_mediothek_elements: # Is not used as a parent of another collection. - - parent_element = copy.deepcopy(mediothek_elements_grouped_by_einzeltitel[serientitel][0]) - parent_mediothek_elements.add(parent_element["id"]) - parent_element["title"] = parent_element["einzeltitel"] - parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) - - # If the found Mediothek element has a serientitel equal to a predefined value, which indicates that - # this is a collection item (which should normally be a parent and not a single element), we treat - # specially and set the title equal to the einzeltitel, which already describes the collection. - if parent_element["serientitel"] == single_element_collection_serientitel: - group.append(copy.deepcopy(mediothek_elements_grouped_by_einzeltitel[serientitel][0])) - - # Else, we shall use any random element of this group as the parent element. - else: - parent_element = copy.deepcopy(group[0]) - - # We need to assign a new ID, different from the previous ones. For this purpose, we decide to modify - # the ID of the existing element and add some suffix to note that this is an artificial element. - # Clearly, such a big number for an ID will have no collisions with existing real elements. - artificial_element_suffix = "000000" - parent_element["id"] = parent_element["id"] + artificial_element_suffix - - # Assign a fake URL that we can still recognize if we ever want to allow the access of the collection - # content. - parent_element["downloadUrl"] = default_download_url + parent_element["id"] - parent_element["title"] = parent_element["serientitel"] - - parent_element["searchable"] = 1 - parent_element["aggregation_level"] = 2 - parent_element["uuid"] = edusharing.buildUUID(parent_element["downloadUrl"]) - - for element in group: - element["searchable"] = 0 - element["aggregation_level"] = 1 - element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) - - element["title"] = element["dateiBezeichnung"] - - # Add connections from parent to children elements. - parent_element, group = self.relate_parent_with_children_elements(parent_element, group) - - collection_elements.append(parent_element) - collection_elements.extend(group) - - # Remove Mediothek elements which were used as parents. We go in reverse mode as only then the indices keep - # making sense as we keep deleting elements. The other way around, every time you delete an element the - # consequent indices are not valid anymore. - for i in reversed(range(len(mediothek_elements))): - if mediothek_elements[i]["id"] in parent_mediothek_elements: - del (mediothek_elements[i]) - - return collection_elements, mediothek_elements - - def group_mediothek_elements(self, mediothek_elements): - """ - Collection elements in Mediothek have no special element to represent them (a parent element). Therefore, we - select one of them as the collection representative (parent element) and set some of its attributes accordingly. + Temporary solution to check whether the content is public and only save it if this holds. """ - mediothek_default_download_url = "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" - - mediothek_elements_grouped_by = self.group_by_elements(mediothek_elements, "mediumNummer") - - # Specifies a special case when a - single_element_collection_serientitel = "Mediensammlungen zur freien Verwendung im Bildungsbereich" - - collection_elements = [] - - edusharing = EduSharing() # Used to generate UUIDs. - - # Generate new "parent" (representative) element. - for group_by_key in sorted(mediothek_elements_grouped_by.keys()): - group = mediothek_elements_grouped_by[group_by_key] - parent_element = copy.deepcopy(group[0]) - - # We need to assign a new ID, different from the previous ones. For this purpose, we decide to modify - # the ID of the existing element and add some suffix to note that this is an artificial element. - # Clearly, such a big number for an ID will have no collisions with existing real elements. - artificial_element_suffix = "000000" - parent_element["id"] = parent_element["id"] + artificial_element_suffix - - parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) - - parent_element["title"] = parent_element["einzeltitel"] - - parent_element["searchable"] = 1 - parent_element["aggregation_level"] = 2 - parent_element["uuid"] = edusharing.buildUUID(parent_element["downloadUrl"]) - - for element in group: - element["searchable"] = 0 - element["aggregation_level"] = 1 - element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) - - element["title"] = element["dateiBezeichnung"] - - # Add connections from parent to children elements. - parent_element, group = self.relate_parent_with_children_elements(parent_element, group) - - collection_elements.append(parent_element) - collection_elements.extend(group) - - return collection_elements - - def relate_parent_with_children_elements(self, parent_element, children_elements): - # Add connections from "parent" to "children" elements. - parent_element["relation"] = [ - { - "kind": "haspart", - "resource": { - "identifier": [ - # Use the ccm:replicationsourceuuid to refer to the children elements. - element["uuid"] for element in children_elements - ] - } - } - ] - - # Add connections from "children" elements to "parent". - for element in children_elements: - element["relation"] = [ - { - "kind": "ispartof", - "resource": { - # Use the ccm:replicationsourceuuid to refer to the parent element. - "identifier": [parent_element["uuid"]] - } - } - ] - return parent_element, children_elements - - def prepare_element(self, element_dict): - # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? - # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel - # Please keep in mind that we override this value for parent elements of collections. - element_dict["title"] = element_dict["einzeltitel"] - - return element_dict \ No newline at end of file + return element_dict["oeffentlich"] == "1" + + # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. + # + # def getPermissions(self, response): + # """ + # Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, + # otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups + # and mediacenters accordingly. + # """ + # permissions = LomBase.getPermissions(self, response) + # + # element_dict = response.meta["item"] + # + # if element_dict["oeffentlich"] == "0": # private + # permissions.replace_value('public', False) + # permissions.add_value('groups', ['Thuringia']) + # permissions.add_value('mediacenters', 'mediothek') # only 1 mediacenter. + # + # return permissions diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 56e03caf..8c3dd16b 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -1,11 +1,11 @@ +from datetime import datetime + import xmltodict as xmltodict from lxml import etree from scrapy.spiders import CrawlSpider -import scrapy as scrapy - -from converter.constants import Constants from converter.items import * -from converter.spiders.base_classes import LomBase +from .base_classes import LomBase +import scrapy class MerlinSpider(CrawlSpider, LomBase): @@ -13,13 +13,13 @@ class MerlinSpider(CrawlSpider, LomBase): This crawler fetches data from the Merlin content source, which provides us paginated XML data. For every element in the returned XML array we call LomBase.parse(), which in return calls methods, such as getId(), getBase() etc. - Author: Ioannis Koumarelas, Schul-Cloud, Content team. + Author: Ioannis Koumarelas, ioannis.koumarelas@hpi.de, Schul-Cloud, Content team. """ name = "merlin_spider" url = "https://merlin.nibis.de/index.php" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "Merlin" # name as shown in the search ui - version = "0.2" # the version of your crawler, used to identify if a reimport is necessary + version = "0.1" # the version of your crawler, used to identify if a reimport is necessary apiUrl = "https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*" # * regular expression, to represent all possible values. limit = 100 @@ -49,9 +49,6 @@ async def parse(self, response: scrapy.http.Response): root = etree.XML(response.body) tree = etree.ElementTree(root) - # Get the total number of possible elements - elements_total = int(tree.xpath('/root/sum')[0].text) - # If results are returned. elements = tree.xpath("/root/items/*") if len(elements) > 0: @@ -60,31 +57,25 @@ async def parse(self, response: scrapy.http.Response): element_xml_str = etree.tostring( element, pretty_print=True, encoding="unicode" ) - try: - element_dict = xmltodict.parse(element_xml_str) - element_dict = element_dict["data"] + element_dict = xmltodict.parse(element_xml_str) - # Preparing the values here helps for all following logic across the methods. - self.prepare_element(element_dict) + # Temporary solution for public-only content. + # TODO: remove this when licensed content are enabled! + if not self.is_public(element_dict["data"]): + continue - # If there is no available county (Kreis) code, then we do not want to deal with this element. - if not("county_ids" in element_dict - and element_dict["county_ids"] is not None - and len(element_dict["county_ids"]) > 0): - continue + # TODO: It's probably a pointless attribute. + # del element_dict["data"]["score"] - # TODO: It's probably a pointless attribute. - # del element_dict["data"]["score"] + # Passing the dictionary for easier access to attributes. + copyResponse.meta["item"] = element_dict["data"] - # Passing the dictionary for easier access to attributes. - copyResponse.meta["item"] = element_dict + # In case JSON string representation is preferred: + # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) + copyResponse._set_body(element_xml_str) - # In case JSON string representation is preferred: - # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) - copyResponse._set_body(element_xml_str) - - except Exception: - pass + if self.hasChanged(copyResponse): + yield self.handleEntry(copyResponse) # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. await LomBase.parse(self, copyResponse) @@ -92,8 +83,8 @@ async def parse(self, response: scrapy.http.Response): # TODO: To not stress the Rest APIs. # time.sleep(0.1) - # If we are below the total available numbers continue fetching more pages. - if current_expected_count < elements_total: + # If the number of returned results is equal to the imposed limit, it means that there are more to be returned. + if len(elements) == self.limit: self.page += 1 url = self.apiUrl.replace("%start", str(self.page * self.limit)).replace( "%anzahl", str(self.limit) @@ -117,13 +108,13 @@ def getHash(self, response): return ( hash(self.version) + hash(self.getId(response)) - # + self._date_to_integer(datetime.date(datetime.now())) + + self._date_to_integer(datetime.date(datetime.now())) ) - # def _date_to_integer(self, dt_time): - # """ Converting the date to an integer, so it is useful in the getHash method - # Using prime numbers for less collisions. """ - # return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day + def _date_to_integer(self, dt_time): + """ Converting the date to an integer, so it is useful in the getHash method + Using prime numbers for less collisions. """ + return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day def mapResponse(self, response): r = ResponseItemLoader(response=response) @@ -137,23 +128,7 @@ async def handleEntry(self, response): def getBase(self, response): base = LomBase.getBase(self, response) - - # Element response as a Python dict. - element_dict = dict(response.meta["item"]) - - base.add_value("thumbnail", element_dict.get("thumbnail", "")) # get or default - - # As a backup, if no other thumbnail URL is available. - element_dict["hardcodedDefaultLogoUrl"] = "/logos/bs_logos/merlin.png" - - # By the order of preference. As soon as one of these default thumbnails is available you keep that. - for default_thumbnail in ["srcLogoUrl", "logo", "hardcodedDefaultLogoUrl"]: - if default_thumbnail in element_dict: - base.add_value("defaultThumbnail", "https://merlin.nibis.de" + element_dict[default_thumbnail]) - break - - # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. - base.add_value("searchable", "1") + base.add_value("thumbnail", response.xpath("/data/thumbnail/text()").get()) return base @@ -164,29 +139,12 @@ def getLOMGeneral(self, response): "description", response.xpath("/data/beschreibung/text()").get() ) - # Adding a default aggregationLevel, which can be used during filtering queries. - general.add_value("aggregationLevel", "1") - return general def getUri(self, response): location = response.xpath("/data/media_url/text()").get() return "http://merlin.nibis.de" + location - def getLicense(self, response): - license = LomBase.getLicense(self, response) - - # Element response as a Python dict. - element_dict = response.meta["item"] - - # If there is only one element and is the County code 3100, then it is public content. - if len(element_dict["county_ids"]) == 1 and str(element_dict["county_ids"][0]) == "county-3100": - license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public - else: - license.replace_value('internal', Constants.LICENSE_NONPUBLIC) # private - - return license - def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) @@ -196,17 +154,6 @@ def getLOMTechnical(self, response): return technical - def getLOMAnnotation(self, response=None) -> LomAnnotationItemLoader: - annotation = LomBase.getLOMAnnotation(self, response) - - # Element response as a Python dict. - element_dict = response.meta["item"] - - annotation.add_value("entity", element_dict["annotation"]["entity"]) - annotation.add_value("description", element_dict["annotation"]["description"]) - - return annotation - def getValuespaces(self, response): valuespaces = LomBase.getValuespaces(self, response) @@ -247,81 +194,36 @@ def getValuespaces(self, response): valuespaces.add_value("learningResourceType", resource_types) return valuespaces - def getPermissions(self, response): + def is_public(self, element_dict) -> bool: """ - In case license information, in the form of counties (Kreis codes), is available. This changes the permissions from - public to private and sets accordingly the groups and mediacenters. For more information regarding the available - Merlin county (kreis) codes please consult 'http://merlin.nibis.de/index.php?action=kreise' + Temporary solution to check whether the content is public and only save it if this holds. """ + return not ( + element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0 + ) - permissions = LomBase.getPermissions(self, response) - - element_dict = response.meta["item"] - - permissions.replace_value("public", False) - permissions.add_value("autoCreateGroups", True) - - groups = [] - - county_ids = element_dict["county_ids"] - public_county = "county-3100" - - # If there is only one element and is the County code 3100, then it is public content. - if len(county_ids) == 1 and str(county_ids[0]) == public_county: - # Add to state-wide public group. - # groups.append("state-LowerSaxony-public") - groups.append("LowerSaxony-public") - - # Add 1 group per County-code, which in this case is just "100" (3100). - groups.extend(county_ids) - else: - # Add to state-wide private/licensed group. - # groups.append("state-LowerSaxony-licensed") - groups.append("LowerSaxony-private") - - # If County code 100 (country-wide) is included in the list, remove it. - if public_county in county_ids: - county_ids.remove(public_county) - - # Add 1 group per county. - groups.extend(county_ids) - - permissions.add_value("groups", groups) - - return permissions - - def prepare_element(self, element_dict): - # Step 1. Prepare county (Kreis) codes. - if "kreis_id" in element_dict and element_dict["kreis_id"] is not None: - county_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - if not isinstance(county_ids, list): # one element - county_ids = [county_ids] - county_ids = sorted(county_ids, key=lambda x: int(x)) - - # Add prefix "3" to conform with nationally-assigned IDs: - # https://de.wikipedia.org/wiki/Liste_der_Landkreise_in_Deutschland - county_ids = ["3" + id for id in county_ids] - county_ids = ["county-" + x for x in county_ids] - element_dict["county_ids"] = county_ids - - # Step 2. Fix thumbnail URL. - thumbnail_prepared = element_dict["thumbnail"] - - # Step 2. Case a: Remove the 3 dots "...". - thumbnail_prepared = thumbnail_prepared.replace("...", "") - - # Step 2. Case b: Replace "%2F" with '/' - # TODO: check why not ALL occurrences are replaced. - thumbnail_prepared = thumbnail_prepared.replace("%2F", "/") - - # Step 2. Case c: Replace the dot after the parent identifier with a '/'. - if element_dict["parent_identifier"] is not None: - parent_identifier = element_dict["parent_identifier"] - subpath_position = thumbnail_prepared.find(parent_identifier) + len(parent_identifier) - if thumbnail_prepared[subpath_position] == ".": - thumbnail_prepared = thumbnail_prepared[:subpath_position] + "/" + thumbnail_prepared[subpath_position + 1:] - - element_dict["thumbnail"] = thumbnail_prepared - - return element_dict - + # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. + # + # def getPermissions(self, response): + # """ + # In case license information, in the form of Kreis codes, is available. This changes the permissions from + # public to private and sets accordingly the groups and mediacenters. For more information regarding the available + # Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' + # """ + # + # permissions = LomBase.getPermissions(self, response) + # + # element_dict = response.meta["item"] + # + # if element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: # private + # kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + # if not isinstance(kreis_ids, list): # one element + # kreis_ids = [kreis_ids] + # kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) + # kreis_ids = ["merlin_" + id for id in kreis_ids] # add prefix + # + # permissions.replace_value('public', False) + # permissions.add_value('groups', ['Lower Saxony']) + # permissions.add_value('mediacenters', kreis_ids) + # + # return permissions diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index d7443efa..627bfaa2 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -2,7 +2,7 @@ import converter.env as env from .base_classes import EduSharingBase -from ..items import LomLifecycleItemloader, LomAnnotationItemLoader +from ..items import LomLifecycleItemloader class OEHSpider(EduSharingBase): @@ -37,21 +37,6 @@ def getLOMTechnical(self, response): technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) return technical - def getLOMGeneral(self, response): - general = EduSharingBase.getLOMGeneral(self, response) - - # Adding a default aggregationLevel, which can be used during filtering queries. - general.replace_value("aggregationLevel", "1") - return general - - def getLOMAnnotation(self, response=None) -> LomAnnotationItemLoader: - annotation = EduSharingBase.getLOMAnnotation(self, response) - - # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. - annotation.add_value("entity", "crawler") - annotation.add_value("description", "searchable==1") - - return annotation def getLOMLifecycle(self, response): has_publisher = False @@ -108,12 +93,3 @@ def shouldImport(self, response=None): ) return False return True - - def getPermissions(self, response): - permissions = EduSharingBase.getPermissions(self, response) - - permissions.replace_value("public", False) - permissions.add_value("autoCreateGroups", True) - permissions.add_value("groups", ["public"]) - - return permissions diff --git a/converter/spiders/test.py b/converter/spiders/test.py new file mode 100644 index 00000000..49e27042 --- /dev/null +++ b/converter/spiders/test.py @@ -0,0 +1,217 @@ +import datetime +import json +import logging +import requests +import scrapy +import sys +import vobject + +from converter.constants import Constants +from converter.es_connector import EduSharingConstants +from converter.items import LomAgeRangeItemLoader +from converter.spiders.base_classes.lom_base import LomBase + +class TestSpider(LomBase, scrapy.Spider): + name = "test_spider" + allowed_domains = ["redaktion.openeduhub.net"] + + API_URL = 'https://redaktion.openeduhub.net/edu-sharing/' + MDS_ID = 'mds_oeh' + + total = -1 + offset = 0 + count = 100 + + def __init__(self, **kwargs): + LomBase.__init__(self, **kwargs) + + self.log = logging.getLogger('OehImporter') + self.log.setLevel(logging.DEBUG) + self.log.addHandler(logging.FileHandler('oeh2_output.txt')) + + self.fake_request = scrapy.http.Request(self.API_URL) + self.fake_response = scrapy.http.Response(self.API_URL, request=self.fake_request) + + def start_requests(self): + url = f'https://redaktion.openeduhub.net/edu-sharing/rest/search/v1/queries/-home-/{self.MDS_ID}/ngsearch?contentType=FILES&maxItems={self.count}&skipCount={self.offset}&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-' + headers = { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + } + body = { + 'criteria': [] + } + yield scrapy.Request(url=url, body=json.dumps(body), headers=headers, method='POST', callback=self.parse) + + async def parse(self, response): + data = json.loads(response.body) + + if self.total == -1: + self.total = data['pagination']['total'] + + nodes = data['nodes'] + for j in range(len(nodes)): + node = nodes[j] + self.log.debug(f'{datetime.datetime.now()} {self.offset+j} / {self.total} :: {node["ccm:replicationsource"] if "ccm:replicationsource" in node else ""} :: {node["name"]}') + ending = node['name'].rsplit('.', 1)[-1] + if ending in ('mp4', 'h5p'): + self.log.info('skipped') + continue + + response_copy = self.fake_response.replace(url=node['content']['url']) + self.fake_response.meta['item'] = node + + item = await LomBase.parse(self, response_copy) + yield item + + self.offset += len(nodes) + if self.offset < self.total: + url = f'https://redaktion.openeduhub.net/edu-sharing/rest/search/v1/queries/-home-/{self.MDS_ID}/ngsearch?contentType=FILES&maxItems={self.count}&skipCount={self.offset}&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-' + headers = { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + } + body = { + 'criteria': [] + } + yield scrapy.Request(url=url, body=json.dumps(body), headers=headers, method='POST', callback=self.parse) + + def getProperty(self, name, response): + return ( + response.meta["item"]["properties"][name] + if name in response.meta["item"]["properties"] + else None + ) + + def getBase(self, response): + base = LomBase.getBase(self, response) + base.replace_value("thumbnail", response.meta["item"]["preview"]["url"]) + base.replace_value( + "origin", self.getProperty("ccm:replicationsource", response) + ) + if self.getProperty("ccm:replicationsource", response): + # imported objects usually have the content as binary text + # TODO: Sometimes, edu-sharing redirects if no local content is found, and this should be html-parsed + if response.meta["item"]["downloadUrl"]: + try: + r = requests.get(response.meta["item"]["downloadUrl"]) + if r.status_code == 200: + base.replace_value("fulltext", r.text) + except: + logging.warning( + "error fetching data from " + str(response.meta["item"]["downloadUrl"]), + sys.exc_info()[0], + ) + # TODO + #else: + # # try to transform using alfresco + # r = requests.get( + # self.apiUrl + # + "/node/v1/nodes/" + # + response.meta["item"]["ref"]["repo"] + # + "/" + # + response.meta["item"]["ref"]["id"] + # + "/textContent", + # headers={"Accept": "application/json"}, + # ).json() + # if "text" in r: + # base.replace_value("fulltext", r["text"]) + + return base + + # fulltext is handled in base, response is not necessary + async def mapResponse(self, response, fetchData=True): + return await LomBase.mapResponse(self, response, False) + + def getId(self, response=None) -> str: + return response.meta["item"]["ref"]["id"] + + def getHash(self, response=None) -> str: + return self.version + response.meta["item"]["properties"]["cm:modified"][0] + + def getLOMGeneral(self, response): + general = LomBase.getLOMGeneral(self, response) + general.replace_value("title", response.meta["item"]["title"]) + general.add_value( + "keyword", self.getProperty("cclom:general_keyword", response) + ) + general.add_value( + "description", self.getProperty("cclom:general_description", response) + ) + general.replace_value("aggregationLevel", "1") + return general + + def getLOMEducational(self, response): + educational = LomBase.getLOMEducational(self, response) + tar_from = self.getProperty("ccm:educationaltypicalagerange_from", response) + tar_to = self.getProperty("ccm:educationaltypicalagerange_to", response) + if tar_from and tar_to: + range = LomAgeRangeItemLoader() + range.add_value("fromRange", tar_from) + range.add_value("toRange", tar_to) + educational.add_value("typicalAgeRange", range.load_item()) + return educational + + def getLOMLifecycle(self, response): + lifecycle = LomBase.getLOMLifecycle(self, response) + for role in EduSharingConstants.LIFECYCLE_ROLES_MAPPING.keys(): + entry = self.getProperty("ccm:lifecyclecontributer_" + role, response) + if entry and entry[0]: + # TODO: we currently only support one author per role + vcard = vobject.readOne(entry[0]) + if hasattr(vcard, "n"): + given = vcard.n.value.given + family = vcard.n.value.family + lifecycle.add_value("role", role) + lifecycle.add_value("firstName", given) + lifecycle.add_value("lastName", family) + return lifecycle + + def getLOMTechnical(self, response): + technical = LomBase.getLOMTechnical(self, response) + technical.replace_value("format", "text/html") + technical.replace_value("duration", self.getProperty("cclom:duration", response)) + if 'ccm:wwwurl' in response.meta['item']['properties']: + technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) + else: + technical.replace_value("location", response.url) + return technical + + def getLicense(self, response): + license = LomBase.getLicense(self, response) + license.add_value("url", response.meta["item"]["license"]["url"]) + license.add_value( + "internal", self.getProperty("ccm:commonlicense_key", response) + ) + license.add_value("author", self.getProperty("ccm:author_freetext", response)) + return license + + def getValuespaces(self, response): + valuespaces = LomBase.getValuespaces(self, response) + valuespaces.add_value("discipline", self.getProperty("ccm:taxonid", response)) + valuespaces.add_value( + "intendedEndUserRole", + self.getProperty("ccm:educationalintendedenduserrole", response), + ) + valuespaces.add_value( + "educationalContext", self.getProperty("ccm:educationalcontext", response) + ) + valuespaces.add_value( + "learningResourceType", + self.getProperty("ccm:educationallearningresourcetype", response), + ) + valuespaces.add_value( + "sourceContentType", self.getProperty("ccm:sourceContentType", response) + ) + valuespaces.add_value( + "toolCategory", self.getProperty("ccm:toolCategory", response) + ) + return valuespaces + + def getPermissions(self, response): + permissions = LomBase.getPermissions(self, response) + permissions.replace_value("public", False) + return permissions + + def shouldImport(self, response=None): + return "ccm:collection_io_reference" not in response.meta["item"]["aspects"] \ No newline at end of file diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py index 8a87f531..3db9aebf 100644 --- a/schulcloud/oeh_importer.py +++ b/schulcloud/oeh_importer.py @@ -15,7 +15,6 @@ from edu_sharing_client.rest import ApiException -from converter.items import LomAnnotationItemLoader from converter.spiders.base_classes.lom_base import LomBase, LomAgeRangeItemLoader from converter.es_connector import EduSharingConstants from converter.pipelines import EduSharingCheckPipeline, FilterSparsePipeline, LOMFillupPipeline, NormLicensePipeline,\ @@ -246,15 +245,6 @@ def getLOMTechnical(self, response): technical.replace_value("location", response.url) return technical - def getLOMAnnotation(self, response=None) -> LomAnnotationItemLoader: - annotation = LomBase.getLOMAnnotation(self, response) - - # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. - annotation.add_value("entity", "crawler") - annotation.add_value("description", "searchable==1") - - return annotation - def getLicense(self, response): license = LomBase.getLicense(self, response) license.add_value("url", response.meta["item"]["license"]["url"]) From d258b1763128281869944245ee4c52d0f76ad6b3 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 28 Aug 2024 10:34:51 +0200 Subject: [PATCH 543/590] Fix SkoHub "altLabel" processing in pipelines.py --- converter/pipelines.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 30f0b98c..c2e151b0 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -319,12 +319,20 @@ def process_item(self, raw_item, spider): mapped = [] for entry in json[key]: _id = {} - valuespace = self.valuespaces.data[key] + valuespace: list[dict] = self.valuespaces.data[key] found = False for v in valuespace: labels = list(v["prefLabel"].values()) if "altLabel" in v: - labels = labels + list(v["altLabel"].values()) + # the Skohub update on 2024-04-19 generates altLabels as a list[str] per language ("de", "en) + # (for details, see: https://github.com/openeduhub/oeh-metadata-vocabs/pull/65) + alt_labels: list[list[str]] = list(v["altLabel"].values()) + if alt_labels and isinstance(alt_labels, list): + for alt_label in alt_labels: + if alt_label and isinstance(alt_label, list): + labels.extend(alt_label) + if alt_label and isinstance(alt_label, str): + labels.append(alt_label) labels = list(map(lambda x: x.casefold(), labels)) if v["id"].endswith(entry) or entry.casefold() in labels: _id = v["id"] From ab24bbf8b047e4f7de3b6b5f75b53d2d44067ed2 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 28 Aug 2024 12:12:49 +0200 Subject: [PATCH 544/590] fix: pydantic ValidationErrors for several properties - change: removed fallbacks to "None" for empty fields - restructured the program flow to make the "transform_item()"-method easier to read - fix: fixed imports for ApiException Errors which typically occur during "find_item()"-calls when an item does not exist in the edu-sharing repository yet background information: - the new API Client uses pydantic to validate properties and throws ValidationErrors if type-requirements are not met - the previous implementation used "None" as a fallback, which is no longer possible for fields that are expecting a strict type --- converter/es_connector.py | 60 ++++++++++++++++++++------------------- 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index b02412fe..177dd8bc 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -18,6 +18,7 @@ from converter import env from converter.constants import Constants +from edu_sharing_openapi.edu_sharing_client import ApiException from edu_sharing_openapi.edu_sharing_client.api.about_api import ABOUTApi from edu_sharing_openapi.edu_sharing_client.api.bulkv1_api import BULKV1Api from edu_sharing_openapi.edu_sharing_client.api.iamv1_api import IAMV1Api @@ -25,7 +26,6 @@ from edu_sharing_openapi.edu_sharing_client.api.nodev1_api import NODEV1Api from edu_sharing_openapi.edu_sharing_client.api_client import ApiClient from edu_sharing_openapi.edu_sharing_client.configuration import Configuration -from edu_sharing_openapi.edu_sharing_client.exceptions import ApiException log = logging.getLogger(__name__) @@ -366,22 +366,29 @@ def map_license(self, spaces, license): spaces["ccm:license_to"] = [license["expirationDate"].isoformat()] def transform_item(self, uuid, spider, item): + # ToDo: additional type-checks or pipelines might be necessary + # attention: pydantic validates individual properties and type-checks them! + # - defaulting to None throws ValidationErrors + # - if a property has the value None, either delete the property or don't store it! spaces = { "ccm:replicationsource": spider.name, "ccm:replicationsourceid": item["sourceId"], "ccm:replicationsourcehash": item["hash"], "ccm:replicationsourceuuid": uuid, "cm:name": item["lom"]["general"]["title"], - "ccm:wwwurl": item["lom"]["technical"]["location"][0] if "location" in item["lom"]["technical"] else None, - "cclom:location": item["lom"]["technical"]["location"] if "location" in item["lom"]["technical"] else None, - "cclom:format": item["lom"]["technical"]["format"] if "format" in item["lom"]["technical"] else None, - "cclom:aggregationlevel": item["lom"]["general"]["aggregationLevel"] - if "aggregationLevel" in item["lom"]["general"] - else None, "cclom:title": item["lom"]["general"]["title"], } - if "identifier" in item["lom"]["general"]: - spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] + if "general" in item["lom"]: + if "aggregationLevel" in item["lom"]["general"]: + spaces["cclom:aggregationlevel"] = item["lom"]["general"]["aggregationLevel"] + if "description" in item["lom"]["general"]: + spaces["cclom:general_description"] = item["lom"]["general"]["description"] + if "identifier" in item["lom"]["general"]: + spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] + if "keyword" in item["lom"]["general"]: + spaces["cclom:general_keyword"] = item["lom"]["general"]["keyword"] + if "language" in item["lom"]["general"]: + spaces["cclom:general_language"] = item["lom"]["general"]["language"] if "notes" in item: spaces["ccm:notes"] = item["notes"] if "status" in item: @@ -414,19 +421,7 @@ def transform_item(self, uuid, spider, item): spaces[key] = item["custom"][key] self.map_license(spaces, item["license"]) - if "description" in item["lom"]["general"]: - spaces["cclom:general_description"] = item["lom"]["general"]["description"] - if "identifier" in item["lom"]["general"]: - spaces["cclom:general_identifier"] = item["lom"]["general"]["identifier"] - - if "language" in item["lom"]["general"]: - spaces["cclom:general_language"] = item["lom"]["general"]["language"] - - if "keyword" in item["lom"]["general"]: - spaces["cclom:general_keyword"] = (item["lom"]["general"]["keyword"],) - else: - spaces["cclom:general_keyword"] = None if "technical" in item["lom"]: if "duration" in item["lom"]["technical"]: duration = item["lom"]["technical"]["duration"] @@ -440,6 +435,13 @@ def transform_item(self, uuid, spider, item): ) pass spaces["cclom:duration"] = duration + if "format" in item["lom"]["technical"]: + spaces["cclom:format"] = item["lom"]["technical"]["format"] + if "location" in item["lom"]["technical"]: + # save the first URL as the main URL: + spaces["ccm:wwwurl"] = item["lom"]["technical"]["location"][0] + # copy the rest of the URLs to "cclom:location": + spaces["cclom:location"] = item["lom"]["technical"]["location"] if "lifecycle" in item["lom"]: for person in item["lom"]["lifecycle"]: @@ -671,6 +673,7 @@ def set_node_permissions(self, uuid, item): if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is False: log.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") return + # ToDo: fix pydantic ValidationError for permissions ("unexpected keyword argument") if "permissions" in item: permissions = { "inherited": True, # let inherited = true to add additional permissions via edu-sharing @@ -856,12 +859,6 @@ def find_item(self, id, spider): } try: response = EduSharing.bulkApi.find(properties) - properties = response["node"]["properties"] - if "ccm:replicationsourcehash" in properties and "ccm:replicationsourceuuid" in properties: - return [ - properties["ccm:replicationsourceuuid"][0], - properties["ccm:replicationsourcehash"][0], - ] except ApiException as e: # ToDo: # - find a way to handle statuscode 503 ("Service Temporarily Unavailable") gracefully? @@ -877,7 +874,6 @@ def find_item(self, id, spider): self.init_api_client() return None if e.status == 404: - # ToDo: handle "edu_sharing_client.exceptions.NotFoundException" try: error_dict: dict = json.loads(e.body) error_name: str = error_dict["error"] @@ -907,7 +903,13 @@ def find_item(self, id, spider): return None else: raise e - return None + + properties = response["node"]["properties"] + if "ccm:replicationsourcehash" in properties and "ccm:replicationsourceuuid" in properties: + return [ + properties["ccm:replicationsourceuuid"][0], + properties["ccm:replicationsourcehash"][0], + ] def find_source(self, spider): return True From 6765e5bfedaa51fa2314645fa362ed0717559ccb Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 28 Aug 2024 15:17:06 +0200 Subject: [PATCH 545/590] fix: pydantic ValidationError while setting permissions ("unexpected keyword argument") - the old edu-sharing API Client used the "body"-parameter in "set_permissions" for storing/setting the permissions (type: dict) - according to the new API Spec, the "acl"-parameter holds these settings from now on --- converter/es_connector.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 177dd8bc..6bc69788 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -191,7 +191,7 @@ def set_permissions(self, uuid, permissions) -> bool: EduSharing.nodeApi.set_permission( repository=EduSharingConstants.HOME, node=uuid, - body=permissions, + acl=permissions, send_mail=False, send_copy=False, ) @@ -673,7 +673,6 @@ def set_node_permissions(self, uuid, item): if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is False: log.debug("Skipping permissions, EDU_SHARING_PERMISSION_CONTROL is set to false") return - # ToDo: fix pydantic ValidationError for permissions ("unexpected keyword argument") if "permissions" in item: permissions = { "inherited": True, # let inherited = true to add additional permissions via edu-sharing From 2fcb460883a9e5d885835d7b2f0c9d7840638ab1 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 28 Aug 2024 15:46:52 +0200 Subject: [PATCH 546/590] chore: update dependencies Changelogs: - black 24.4.2 to 24.8.0 - see: https://github.com/psf/black/releases/tag/24.8.0 - certifi 2024.6.2 to 2024.7.4 - https://github.com/certifi/python-certifi/releases/tag/2024.07.04 - flake8 v7.1.0 to 7.1.1 - see: https://flake8.pycqa.org/en/latest/release-notes/index.html - httpx v0.27 to v0.27.2 - see: https://github.com/encode/httpx/blob/master/CHANGELOG.md#0272-27th-august-2024 - trafilatura v1.11 to v1.12.1 - https://github.com/adbar/trafilatura/releases/tag/v1.12.1 - pytest v8.2.2 to v8.3.2 - see: https://docs.pytest.org/en/stable/changelog.html#pytest-8-3-2-2024-07-24 - wheel v0.43 to v0.44 - see: https://wheel.readthedocs.io/en/stable/news.html#release-notes --- poetry.lock | 171 ++++++++++++++++++++++++----------------------- pyproject.toml | 14 ++-- requirements.txt | 18 ++--- 3 files changed, 104 insertions(+), 99 deletions(-) diff --git a/poetry.lock b/poetry.lock index 797e8f3b..952b3fdd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -134,33 +134,33 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -180,13 +180,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -591,13 +591,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.1.0" +version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, - {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] @@ -779,13 +779,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -800,6 +800,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "hyperlink" @@ -1755,13 +1756,13 @@ requests = ">=2.32.3" [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -1769,7 +1770,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] @@ -2038,19 +2039,23 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "73.0.1" +version = "74.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, + {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -2145,19 +2150,19 @@ files = [ [[package]] name = "trafilatura" -version = "1.11.0" +version = "1.12.1" description = "Python package and command-line tool designed to gather text on the Web, includes all necessary discovery and text processing components to perform web crawling, downloads, scraping, and extraction of main texts, metadata and comments." optional = false python-versions = ">=3.6" files = [ - {file = "trafilatura-1.11.0-py3-none-any.whl", hash = "sha256:20f016be873a2cf3e02b9798f9537d09808559fcc667d42e1c019560ca45dce7"}, - {file = "trafilatura-1.11.0.tar.gz", hash = "sha256:9334ca101c40b2904af5afcee790f0374fabca3ac388811720be65cc768787a2"}, + {file = "trafilatura-1.12.1-py3-none-any.whl", hash = "sha256:1906f2fd8b93b6869cc2325fabb38bc22ca134b980658de72f54c3f0226b557a"}, + {file = "trafilatura-1.12.1.tar.gz", hash = "sha256:89891db646dd84d98fb34a2faed7ba84c22e5490007b587d0599036d35164760"}, ] [package.dependencies] certifi = "*" charset-normalizer = {version = ">=3.2.0", markers = "python_version >= \"3.7\""} -courlan = ">=1.1.0" +courlan = ">=1.2.0" htmldate = ">=1.8.1" justext = ">=3.0.1" lxml = {version = ">=5.2.2", markers = "platform_system != \"Darwin\" or python_version > \"3.8\""} @@ -2296,13 +2301,13 @@ files = [ [[package]] name = "wheel" -version = "0.43.0" +version = "0.44.0" description = "A built-package format for Python" optional = false python-versions = ">=3.8" files = [ - {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, - {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, + {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, + {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, ] [package.extras] @@ -2321,45 +2326,45 @@ files = [ [[package]] name = "zope-interface" -version = "7.0.2" +version = "7.0.3" description = "Interfaces for Python" optional = false python-versions = ">=3.8" files = [ - {file = "zope.interface-7.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:093ab9a2c5105d826755c43a76770b69353dbe95ec27a0b5e88ab4f63d7744b8"}, - {file = "zope.interface-7.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3246cccb9e4ce34c9b32ad55a53098043af5e7185623bf5de8e6ec5d8e71415e"}, - {file = "zope.interface-7.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375258373bc3879a6c509281487063cf14add7129fc867eb1c287c0db46ca007"}, - {file = "zope.interface-7.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf2746931a6f83370fdc4005dbea4e39e3a3d0333da42897040698c1ff282e9c"}, - {file = "zope.interface-7.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deac72b653817a68b96079c1428ae84860c76f653af03668a02f97b74f8a465b"}, - {file = "zope.interface-7.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:9da2fb807a20cd4fe381e23e2f906f0a0f4acece6d9abac65d5fc0a1f8383ed8"}, - {file = "zope.interface-7.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f5e39373952e1d689476b6e43d779553b165ce332d0fde9c36d9b095f28d052"}, - {file = "zope.interface-7.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:13aacff95c59000ecd562d9717a87eca8211f6bc74bea6b8ca68e742d1f8f13d"}, - {file = "zope.interface-7.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67e0ff3f2e02d6131535956b22795be2ec5af6762f4fe682f67eb723fbc16273"}, - {file = "zope.interface-7.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c47a5068df03f0c9215d3525b166c9d8d4f6d03cbe4e60339818f8c393e3e3e"}, - {file = "zope.interface-7.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceaf7ee95735b0d6ac3f5bba0209d056e686999732dc32bd463a53d4488ccdb"}, - {file = "zope.interface-7.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dd28ba1e2deb0c339881ee7755db649433347bdf3c4f3d885f029fcf10aacdf7"}, - {file = "zope.interface-7.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4b671f943d6487d6f1a6bbdce3faffae35e4f74f98ac9b865d2b7370cb6b0bd3"}, - {file = "zope.interface-7.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b76f6048c1a334e26e5d46fdb4f327d9e7e6b348ad607ee9fdce9c7325b5a635"}, - {file = "zope.interface-7.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40beb36330ef00d2cd50d212a0c74ecd57042b8c8b2b6ebd6247cc98f9808824"}, - {file = "zope.interface-7.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0821efcbdeaf48e12c66b0d19a1f9edec2ed22697ab8885d322c8f82fe5bc892"}, - {file = "zope.interface-7.0.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ab785a7af39c6968385a9d39b712d2263661fa3780bd38efec0cefdbb84036"}, - {file = "zope.interface-7.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e299f672cfad3392b097af885a552a51e60d3b44e8572f1401e87f863f8986b4"}, - {file = "zope.interface-7.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e5bf8de5a51aaeddd5e1d1c0ac0ca4f995a4f5a832abdc08bb8fbae25ac660"}, - {file = "zope.interface-7.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:185ef3a7a01fac1151622579a08995aab66590711c1a4f9b605f88129229dba1"}, - {file = "zope.interface-7.0.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91bb6b5e1a158b751e12458d5618c1af42eb3dc8472b87a613d543d9fb7660e0"}, - {file = "zope.interface-7.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f77d58cfc3af86d062b8cfa7194db74ca78a615d66bbd23b251bad1b1ecf9818"}, - {file = "zope.interface-7.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ab142cebe69e0f72bf892da040af97f61fd03c09a23ae2fc7de3ab576c5d4cd"}, - {file = "zope.interface-7.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba16bb2214f671e29b75f16d7b8b0bb1f75cdc8bce06979fdbf638edf6531586"}, - {file = "zope.interface-7.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28f29dd42819d99682e46a8d3cc2ee60461a77554d4320e0e8a37363f04208e0"}, - {file = "zope.interface-7.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:797510df26b82cf619a894dac4ff4036d11f6340bec0287c89cecb0b1b1c429e"}, - {file = "zope.interface-7.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:b9d865209cc9795d0f9f4f63b87a86e7a9e032d3cbbb10b1c13bf27343a4fc54"}, - {file = "zope.interface-7.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:08d86319fd7542984d4c0ef7865759dab58616154cb237a5a1ce758687255de0"}, - {file = "zope.interface-7.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:22c93492e5d2f09100a4a23cf709b20f0305cdbbad14f9af2f6e9311742bed8e"}, - {file = "zope.interface-7.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d20fa14f0f8682ad37a6552712e4493cfb35d66c5fb4f8052af3a50ae6cd4f77"}, - {file = "zope.interface-7.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:356a9c1c8cfece776f54806157057be759d812168395762f47f046b40901e974"}, - {file = "zope.interface-7.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738de1c72390a2caf543247013f617ed15d272e4c19731a998e81dd5a2379f1c"}, - {file = "zope.interface-7.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:8eab70e404c2416176b4630914cda275ca95678529e54e66ea45d1a0be422994"}, - {file = "zope.interface-7.0.2.tar.gz", hash = "sha256:f1146bb27a411d0d40cc0e88182a6b0e979d68ab526c8e5ae9e27c06506ed017"}, + {file = "zope.interface-7.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b9369671a20b8d039b8e5a1a33abd12e089e319a3383b4cc0bf5c67bd05fe7b"}, + {file = "zope.interface-7.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db6237e8fa91ea4f34d7e2d16d74741187e9105a63bbb5686c61fea04cdbacca"}, + {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d678bb1c3b784edbfb0adeebfeea6bf479f54da082854406a8f295d36f8386"}, + {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3aa8fcbb0d3c2be1bfd013a0f0acd636f6ed570c287743ae2bbd467ee967154d"}, + {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6195c3c03fef9f87c0dbee0b3b6451df6e056322463cf35bca9a088e564a3c58"}, + {file = "zope.interface-7.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:11fa1382c3efb34abf16becff8cb214b0b2e3144057c90611621f2d186b7e1b7"}, + {file = "zope.interface-7.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af94e429f9d57b36e71ef4e6865182090648aada0cb2d397ae2b3f7fc478493a"}, + {file = "zope.interface-7.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dd647fcd765030638577fe6984284e0ebba1a1008244c8a38824be096e37fe3"}, + {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bee1b722077d08721005e8da493ef3adf0b7908e0cd85cc7dc836ac117d6f32"}, + {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2545d6d7aac425d528cd9bf0d9e55fcd47ab7fd15f41a64b1c4bf4c6b24946dc"}, + {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d04b11ea47c9c369d66340dbe51e9031df2a0de97d68f442305ed7625ad6493"}, + {file = "zope.interface-7.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:064ade95cb54c840647205987c7b557f75d2b2f7d1a84bfab4cf81822ef6e7d1"}, + {file = "zope.interface-7.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3fcdc76d0cde1c09c37b7c6b0f8beba2d857d8417b055d4f47df9c34ec518bdd"}, + {file = "zope.interface-7.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d4b91821305c8d8f6e6207639abcbdaf186db682e521af7855d0bea3047c8ca"}, + {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35062d93bc49bd9b191331c897a96155ffdad10744ab812485b6bad5b588d7e4"}, + {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c96b3e6b0d4f6ddfec4e947130ec30bd2c7b19db6aa633777e46c8eecf1d6afd"}, + {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0c151a6c204f3830237c59ee4770cc346868a7a1af6925e5e38650141a7f05"}, + {file = "zope.interface-7.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:3de1d553ce72868b77a7e9d598c9bff6d3816ad2b4cc81c04f9d8914603814f3"}, + {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab985c566a99cc5f73bc2741d93f1ed24a2cc9da3890144d37b9582965aff996"}, + {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d976fa7b5faf5396eb18ce6c132c98e05504b52b60784e3401f4ef0b2e66709b"}, + {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a207c6b2c58def5011768140861a73f5240f4f39800625072ba84e76c9da0b"}, + {file = "zope.interface-7.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:382d31d1e68877061daaa6499468e9eb38eb7625d4369b1615ac08d3860fe896"}, + {file = "zope.interface-7.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c4316a30e216f51acbd9fb318aa5af2e362b716596d82cbb92f9101c8f8d2e7"}, + {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e6e58078ad2799130c14a1d34ec89044ada0e1495329d72ee0407b9ae5100d"}, + {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799ef7a444aebbad5a145c3b34bff012b54453cddbde3332d47ca07225792ea4"}, + {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3b7ce6d46fb0e60897d62d1ff370790ce50a57d40a651db91a3dde74f73b738"}, + {file = "zope.interface-7.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:f418c88f09c3ba159b95a9d1cfcdbe58f208443abb1f3109f4b9b12fd60b187c"}, + {file = "zope.interface-7.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:84f8794bd59ca7d09d8fce43ae1b571be22f52748169d01a13d3ece8394d8b5b"}, + {file = "zope.interface-7.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d92920416f31786bc1b2f34cc4fc4263a35a407425319572cbf96b51e835cd3"}, + {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e5913ec718010dc0e7c215d79a9683b4990e7026828eedfda5268e74e73e11"}, + {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eeeb92cb7d95c45e726e3c1afe7707919370addae7ed14f614e22217a536958"}, + {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd32f30f40bfd8511b17666895831a51b532e93fc106bfa97f366589d3e4e0e"}, + {file = "zope.interface-7.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5112c530fa8aa2108a3196b9c2f078f5738c1c37cfc716970edc0df0414acda8"}, + {file = "zope.interface-7.0.3.tar.gz", hash = "sha256:cd2690d4b08ec9eaf47a85914fe513062b20da78d10d6d789a792c0b20307fb1"}, ] [package.dependencies] @@ -2373,4 +2378,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "1ea50a9b0e3f78464295e2b405a3de2a1d808b7db8f375239b508c447f455ecf" +content-hash = "e595007c4660eb0b86f8fd6da0aa517f526e7168b8da697ba0ae391b5f97645b" diff --git a/pyproject.toml b/pyproject.toml index f6b620e9..a29792e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,12 +60,12 @@ packages = [{include = "converter"}] [tool.poetry.dependencies] python = "^3.10" -wheel = "0.43.0" -black = "24.4.2" -certifi="2024.6.2" +wheel = "0.44.0" +black = "24.8.0" +certifi="2024.7.4" dateparser="1.2" extruct="0.17.0" -flake8 = "7.1.0" +flake8 = "7.1.1" html2text="2024.2.26" jmespath="1.0.1" image = "1.5.33" @@ -74,7 +74,7 @@ itemloaders="1.3.1" isodate="0.6.1" Pillow="10.3.0" playwright="1.44.0" -pytest="8.2.2" +pytest="8.3.2" python-dateutil="2.9.0.post0" python-dotenv="1.0.1" requests="2.32.3" @@ -85,10 +85,10 @@ urllib3="2.2.2" vobject="0.9.7" w3lib="2.2.1" xmltodict="0.13.0" -trafilatura = "1.11" +trafilatura = "1.12.1" babel = "2.15.0" langcodes = {extras = ["data"], version = "^3.3.0"} -httpx = "0.27" +httpx = "0.27.2" async-lru = "2.0.4" [tool.poetry.group.edu_sharing_client.dependencies] diff --git a/requirements.txt b/requirements.txt index f61c20f0..9f033614 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,8 +5,8 @@ attrs==24.2.0 ; python_version >= "3.10" and python_version < "4.0" automat==24.8.1 ; python_version >= "3.10" and python_version < "4.0" babel==2.15.0 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" -black==24.4.2 ; python_version >= "3.10" and python_version < "4.0" -certifi==2024.6.2 ; python_version >= "3.10" and python_version < "4.0" +black==24.8.0 ; python_version >= "3.10" and python_version < "4.0" +certifi==2024.7.4 ; python_version >= "3.10" and python_version < "4.0" cffi==1.17.0 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" @@ -21,7 +21,7 @@ django==5.1 ; python_version >= "3.10" and python_version < "4.0" exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" extruct==0.17.0 ; python_version >= "3.10" and python_version < "4.0" filelock==3.15.4 ; python_version >= "3.10" and python_version < "4.0" -flake8==7.1.0 ; python_version >= "3.10" and python_version < "4.0" +flake8==7.1.1 ; python_version >= "3.10" and python_version < "4.0" greenlet==3.0.3 ; python_version >= "3.10" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" html-text==0.6.2 ; python_version >= "3.10" and python_version < "4.0" @@ -29,7 +29,7 @@ html2text==2024.2.26 ; python_version >= "3.10" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" htmldate==1.9.0 ; python_version >= "3.10" and python_version < "4.0" httpcore==1.0.5 ; python_version >= "3.10" and python_version < "4.0" -httpx==0.27.0 ; python_version >= "3.10" and python_version < "4.0" +httpx==0.27.2 ; python_version >= "3.10" and python_version < "4.0" hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" idna==3.8 ; python_version >= "3.10" and python_version < "4.0" image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" @@ -69,7 +69,7 @@ pyopenssl==24.2.1 ; python_version >= "3.10" and python_version < "4.0" pyparsing==3.1.4 ; python_version >= "3.10" and python_version < "4.0" pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" pyrdfa3==3.6.4 ; python_version >= "3.10" and python_version < "4.0" -pytest==8.2.2 ; python_version >= "3.10" and python_version < "4.0" +pytest==8.3.2 ; python_version >= "3.10" and python_version < "4.0" python-dateutil==2.9.0.post0 ; python_version >= "3.10" and python_version < "4.0" python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" pytz==2024.1 ; python_version >= "3.10" and python_version < "4.0" @@ -81,7 +81,7 @@ requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" scrapy==2.11.2 ; python_version >= "3.10" and python_version < "4.0" service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==73.0.1 ; python_version >= "3.10" and python_version < "4.0" +setuptools==74.0.0 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" soupsieve==2.6 ; python_version >= "3.10" and python_version < "4.0" @@ -89,7 +89,7 @@ sqlparse==0.5.1 ; python_version >= "3.10" and python_version < "4.0" tld==0.13 ; python_version >= "3.10" and python_version < "4" tldextract==5.1.2 ; python_version >= "3.10" and python_version < "4.0" tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" -trafilatura==1.11.0 ; python_version >= "3.10" and python_version < "4.0" +trafilatura==1.12.1 ; python_version >= "3.10" and python_version < "4.0" twisted==24.7.0 ; python_version >= "3.10" and python_version < "4.0" typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") @@ -98,6 +98,6 @@ urllib3==2.2.2 ; python_version >= "3.10" and python_version < "4.0" vobject==0.9.7 ; python_version >= "3.10" and python_version < "4.0" w3lib==2.2.1 ; python_version >= "3.10" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" -wheel==0.43.0 ; python_version >= "3.10" and python_version < "4.0" +wheel==0.44.0 ; python_version >= "3.10" and python_version < "4.0" xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" -zope-interface==7.0.2 ; python_version >= "3.10" and python_version < "4.0" +zope-interface==7.0.3 ; python_version >= "3.10" and python_version < "4.0" From d785419220ed4467e693b4e7a8319d8179dacbf9 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 29 Aug 2024 11:13:49 +0200 Subject: [PATCH 547/590] DMED-119 - Remove unused import statement in `TestSpider` --- converter/spiders/test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/converter/spiders/test.py b/converter/spiders/test.py index 49e27042..4fe3679c 100644 --- a/converter/spiders/test.py +++ b/converter/spiders/test.py @@ -6,7 +6,6 @@ import sys import vobject -from converter.constants import Constants from converter.es_connector import EduSharingConstants from converter.items import LomAgeRangeItemLoader from converter.spiders.base_classes.lom_base import LomBase From 377fc23b089bf76c4ecab75bfbaec9f49de1b457 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 29 Aug 2024 13:35:37 +0200 Subject: [PATCH 548/590] fix: edu-sharing API client init - the "stable demo"-instance of edu-sharing v9.x doesn't provide a filled "services"-dict like in previous edu-sharing versions, which caused the crawler to exit with an error - implemented a (rudimentary) check to look for the version information in edu-sharing v8 and v9 repositories --- converter/es_connector.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 6bc69788..21fde7fc 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -813,16 +813,22 @@ def init_api_client(self): EduSharing.mediacenterApi = MEDIACENTERV1Api(EduSharing.apiClient) EduSharing.nodeApi = NODEV1Api(EduSharing.apiClient) about = EduSharing.aboutApi.about() - EduSharing.version = list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0][ - "version" - ] - version_str = str(EduSharing.version["major"]) + "." + str(EduSharing.version["minor"]) + if "services" in about and about["services"]: + # edu-sharing API v6.x to v8.1 behavior: looking for the BULK v1 API "version"-dict + EduSharing.version = \ + list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0]["version"] + elif "version" in about and about["version"]: + # edu-sharing API v9.x behavior: + # we expect a "version"-dict to exist within the "about"-dict that might look like this: + # {'major': 1, 'minor': 1, 'renderservice': '9.0', 'repository': '9.0'} + EduSharing.version = about["version"] + version_str: str = f"{EduSharing.version["major"]}.{EduSharing.version["minor"]}" if ( EduSharing.version["major"] != 1 or EduSharing.version["minor"] < 0 or EduSharing.version["minor"] > 1 ): - raise Exception(f"Given repository api version is unsupported: " + version_str) + raise Exception(f"Given repository API version is unsupported: " + version_str) else: log.info("Detected edu-sharing bulk api with version " + version_str) if env.get_bool("EDU_SHARING_PERMISSION_CONTROL", False, True) is True: From 591455c6e13611edaeec536848e6752c0b8e7b70 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 29 Aug 2024 13:53:49 +0200 Subject: [PATCH 549/590] fix: ValidationErrors (keywords / typicalAgeRange) - typecast the "keyword"-set to list[str] before adding it to the ItemLoader - typecast typicalAgeRange values (from int) to str before adding them to the itemloader --- converter/spiders/science_in_school_spider.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/converter/spiders/science_in_school_spider.py b/converter/spiders/science_in_school_spider.py index f79158da..cc7e9e20 100644 --- a/converter/spiders/science_in_school_spider.py +++ b/converter/spiders/science_in_school_spider.py @@ -25,7 +25,7 @@ class ScienceInSchoolSpider(scrapy.Spider, LomBase): name = "science_in_school_spider" friendlyName = "Science in School" start_urls = ["https://www.scienceinschool.org/issue/"] - version = "0.0.5" # last update: 2023-08-02 + version = "0.0.5" # last update: 2024-08-29 custom_settings = {"AUTOTHROTTLE_ENABLED": True, "AUTOTHROTTLE_DEBUG": True} allowed_domains = ["scienceinschool.org"] ALL_ARTICLE_URLS = set() @@ -254,7 +254,8 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade if title: general.add_value("title", title) if keywords: - general.add_value("keyword", keywords) + keyword_list: list[str] = list(keywords) + general.add_value("keyword", keyword_list) if description: general.add_value("description", description) if language: @@ -321,8 +322,8 @@ async def parse(self, response: scrapy.http.Response, **kwargs) -> BaseItemLoade age_range_total.add(from_range) age_range_total.add(to_range) if age_range_total: - lom_age_range_loader.add_value("fromRange", min(age_range_total)) - lom_age_range_loader.add_value("toRange", max(age_range_total)) + lom_age_range_loader.add_value("fromRange", str(min(age_range_total))) + lom_age_range_loader.add_value("toRange", str(max(age_range_total))) educational.add_value("typicalAgeRange", lom_age_range_loader.load_item()) lom.add_value("educational", educational.load_item()) From c991bf778e419b3953fe1b2723a7ab6a93f659b0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 30 Aug 2024 13:48:51 +0200 Subject: [PATCH 550/590] docs: URL to documentation of openapi-generator-cli commands - removed the old "swagger"-generated command that required a specific (outdated) toolkit version and replaced it with a more detailed wiki article - (for historic context's sake, the old command is also available in the linked wiki article) --- converter/es_connector.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 21fde7fc..a75d0336 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -49,8 +49,9 @@ class EduSharingConstants: "unknown": "ccm:lifecyclecontributer_unknown", # (= contributor in an unknown capacity ("Mitarbeiter")) } -# creating the swagger client: java -jar swagger-codegen-cli-3.0.20.jar generate -l python -i http://localhost:8080/edu-sharing/rest/swagger.json -o edu_sharing_swagger -c edu-sharing-swagger.config.json -# ToDo: document API-Client generation via "openapi-generator-cli" with all necessary settings +# The edu-sharing API client was generated via "openapi-generator-cli" +# for more information on how to generate the client, please take a look in the oeh-search-etl GitHub Wiki: +# https://github.com/openeduhub/oeh-search-etl/wiki/How-To-update-edu-sharing-OpenAPI-Client class ESApiClient(ApiClient): COOKIE_REBUILD_THRESHOLD = 60 * 5 lastRequestTime = 0 From a4388ead0665dc27d7d5dfb4de79a00f0b24cf0a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Fri, 30 Aug 2024 17:34:36 +0200 Subject: [PATCH 551/590] fix: flake8 E999 SyntaxError (f-string) --- converter/es_connector.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index a75d0336..2bd91334 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -817,13 +817,13 @@ def init_api_client(self): if "services" in about and about["services"]: # edu-sharing API v6.x to v8.1 behavior: looking for the BULK v1 API "version"-dict EduSharing.version = \ - list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0]["version"] + list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0]["version"] elif "version" in about and about["version"]: # edu-sharing API v9.x behavior: # we expect a "version"-dict to exist within the "about"-dict that might look like this: # {'major': 1, 'minor': 1, 'renderservice': '9.0', 'repository': '9.0'} EduSharing.version = about["version"] - version_str: str = f"{EduSharing.version["major"]}.{EduSharing.version["minor"]}" + version_str: str = f"{EduSharing.version['major']}.{EduSharing.version['minor']}" if ( EduSharing.version["major"] != 1 or EduSharing.version["minor"] < 0 From 5b99a66ac28083c3e080244cc3404cfc5922132d Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Tue, 3 Sep 2024 08:27:51 +0200 Subject: [PATCH 552/590] DMED-119 - revert `run.py` to its original state --- run.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/run.py b/run.py index 0d51ff92..83e157fe 100644 --- a/run.py +++ b/run.py @@ -1,5 +1,4 @@ -import asyncio import sys import datetime as dt import time @@ -13,8 +12,6 @@ from schulcloud.permission_updater import PermissionUpdater from schulcloud.oeh_importer import OehImporter -import nest_asyncio -nest_asyncio.apply() needed_env_vars = [ 'CRAWLER', @@ -129,9 +126,9 @@ def __init__(self, name: str, function: Callable, schedule: list[str]): if not self.schedule_rules: raise ValueError('No schedule') - async def run_schedule(self): + def run_schedule(self): if not self.schedule_rules: - await self.run() + self.run() return while True: now = dt.datetime.now() @@ -149,14 +146,14 @@ async def run_schedule(self): time.sleep(min(time_remaining.total_seconds(), check_interval_seconds)) continue - await self.run() + self.run() break - async def run(self): - await self.function() + def run(self): + self.function() -async def main(): +def main(): env = Environment(env_vars=needed_env_vars) schedule = env['SCHEDULE'].split(';') crawler = env['CRAWLER'].lower() @@ -181,9 +178,9 @@ async def main(): print(f'Unexpected execution target "{crawler}"', file=sys.stderr) return 1 - await job.run_schedule() + job.run_schedule() return 0 if __name__ == '__main__': - asyncio.run(main()) + sys.exit(main()) From b2c52a79de08be95e249773f4af02fd15e6feb11 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Tue, 3 Sep 2024 08:30:40 +0200 Subject: [PATCH 553/590] DMED-119 - remove `oeh_importer` and `test_spider` --- charts/edusharing-crawler/README.md | 4 +- converter/spiders/test.py | 216 -------------------- run.py | 4 - schulcloud/oeh_importer.py | 292 ---------------------------- 4 files changed, 2 insertions(+), 514 deletions(-) delete mode 100644 converter/spiders/test.py delete mode 100644 schulcloud/oeh_importer.py diff --git a/charts/edusharing-crawler/README.md b/charts/edusharing-crawler/README.md index 1fae975d..4dde0c24 100644 --- a/charts/edusharing-crawler/README.md +++ b/charts/edusharing-crawler/README.md @@ -8,7 +8,7 @@ ## List of available Crawlers/Scripts * mediothek_pixiothek_spider * merlin_spider -* oeh_importer +* oeh_spider * permission_updater * sodix_spider @@ -35,7 +35,7 @@ The following environment variables are read: | EDU_SHARING_PASSWORD | ALL | Edusharing Password to authenticate | `my_password` | | DRY_RUN | ALL (optinal) | Define whether not to upload to Edu-Sharing instance (default is `False`) | `False` | | LOG_LEVEL | ALL (optional) | Set the Log Level (default is `INFO`) | `INFO` | -| SPLASH_URL | mediothek_pixiothek_spider, merlin_spider, oeh_importer, sodix_spider | Provide Url for Crawler to connect to | `http://splash.my-namespace.svc.cluster.local:8050` | +| SPLASH_URL | mediothek_pixiothek_spider, merlin_spider, oeh_spider, sodix_spider | Provide Url for Crawler to connect to | `http://splash.my-namespace.svc.cluster.local:8050` | | SODIX_USER | sodix_spider | Sodix User to authenticate | `my_sodix_user` | | SODIX_PASSWORD | sodix_spider | Sodix Password to authenticate | `my_sodix_password` | | S3_ACCESS_KEY | h5p_upload, fwu_upload | Access Key with access to the Bucket | `my_s3_access_key` | diff --git a/converter/spiders/test.py b/converter/spiders/test.py deleted file mode 100644 index 4fe3679c..00000000 --- a/converter/spiders/test.py +++ /dev/null @@ -1,216 +0,0 @@ -import datetime -import json -import logging -import requests -import scrapy -import sys -import vobject - -from converter.es_connector import EduSharingConstants -from converter.items import LomAgeRangeItemLoader -from converter.spiders.base_classes.lom_base import LomBase - -class TestSpider(LomBase, scrapy.Spider): - name = "test_spider" - allowed_domains = ["redaktion.openeduhub.net"] - - API_URL = 'https://redaktion.openeduhub.net/edu-sharing/' - MDS_ID = 'mds_oeh' - - total = -1 - offset = 0 - count = 100 - - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) - - self.log = logging.getLogger('OehImporter') - self.log.setLevel(logging.DEBUG) - self.log.addHandler(logging.FileHandler('oeh2_output.txt')) - - self.fake_request = scrapy.http.Request(self.API_URL) - self.fake_response = scrapy.http.Response(self.API_URL, request=self.fake_request) - - def start_requests(self): - url = f'https://redaktion.openeduhub.net/edu-sharing/rest/search/v1/queries/-home-/{self.MDS_ID}/ngsearch?contentType=FILES&maxItems={self.count}&skipCount={self.offset}&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-' - headers = { - 'Accept': 'application/json', - 'Content-Type': 'application/json' - } - body = { - 'criteria': [] - } - yield scrapy.Request(url=url, body=json.dumps(body), headers=headers, method='POST', callback=self.parse) - - async def parse(self, response): - data = json.loads(response.body) - - if self.total == -1: - self.total = data['pagination']['total'] - - nodes = data['nodes'] - for j in range(len(nodes)): - node = nodes[j] - self.log.debug(f'{datetime.datetime.now()} {self.offset+j} / {self.total} :: {node["ccm:replicationsource"] if "ccm:replicationsource" in node else ""} :: {node["name"]}') - ending = node['name'].rsplit('.', 1)[-1] - if ending in ('mp4', 'h5p'): - self.log.info('skipped') - continue - - response_copy = self.fake_response.replace(url=node['content']['url']) - self.fake_response.meta['item'] = node - - item = await LomBase.parse(self, response_copy) - yield item - - self.offset += len(nodes) - if self.offset < self.total: - url = f'https://redaktion.openeduhub.net/edu-sharing/rest/search/v1/queries/-home-/{self.MDS_ID}/ngsearch?contentType=FILES&maxItems={self.count}&skipCount={self.offset}&sortProperties=cm%3Acreated&sortAscending=true&propertyFilter=-all-' - headers = { - 'Accept': 'application/json', - 'Content-Type': 'application/json' - } - body = { - 'criteria': [] - } - yield scrapy.Request(url=url, body=json.dumps(body), headers=headers, method='POST', callback=self.parse) - - def getProperty(self, name, response): - return ( - response.meta["item"]["properties"][name] - if name in response.meta["item"]["properties"] - else None - ) - - def getBase(self, response): - base = LomBase.getBase(self, response) - base.replace_value("thumbnail", response.meta["item"]["preview"]["url"]) - base.replace_value( - "origin", self.getProperty("ccm:replicationsource", response) - ) - if self.getProperty("ccm:replicationsource", response): - # imported objects usually have the content as binary text - # TODO: Sometimes, edu-sharing redirects if no local content is found, and this should be html-parsed - if response.meta["item"]["downloadUrl"]: - try: - r = requests.get(response.meta["item"]["downloadUrl"]) - if r.status_code == 200: - base.replace_value("fulltext", r.text) - except: - logging.warning( - "error fetching data from " + str(response.meta["item"]["downloadUrl"]), - sys.exc_info()[0], - ) - # TODO - #else: - # # try to transform using alfresco - # r = requests.get( - # self.apiUrl - # + "/node/v1/nodes/" - # + response.meta["item"]["ref"]["repo"] - # + "/" - # + response.meta["item"]["ref"]["id"] - # + "/textContent", - # headers={"Accept": "application/json"}, - # ).json() - # if "text" in r: - # base.replace_value("fulltext", r["text"]) - - return base - - # fulltext is handled in base, response is not necessary - async def mapResponse(self, response, fetchData=True): - return await LomBase.mapResponse(self, response, False) - - def getId(self, response=None) -> str: - return response.meta["item"]["ref"]["id"] - - def getHash(self, response=None) -> str: - return self.version + response.meta["item"]["properties"]["cm:modified"][0] - - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.replace_value("title", response.meta["item"]["title"]) - general.add_value( - "keyword", self.getProperty("cclom:general_keyword", response) - ) - general.add_value( - "description", self.getProperty("cclom:general_description", response) - ) - general.replace_value("aggregationLevel", "1") - return general - - def getLOMEducational(self, response): - educational = LomBase.getLOMEducational(self, response) - tar_from = self.getProperty("ccm:educationaltypicalagerange_from", response) - tar_to = self.getProperty("ccm:educationaltypicalagerange_to", response) - if tar_from and tar_to: - range = LomAgeRangeItemLoader() - range.add_value("fromRange", tar_from) - range.add_value("toRange", tar_to) - educational.add_value("typicalAgeRange", range.load_item()) - return educational - - def getLOMLifecycle(self, response): - lifecycle = LomBase.getLOMLifecycle(self, response) - for role in EduSharingConstants.LIFECYCLE_ROLES_MAPPING.keys(): - entry = self.getProperty("ccm:lifecyclecontributer_" + role, response) - if entry and entry[0]: - # TODO: we currently only support one author per role - vcard = vobject.readOne(entry[0]) - if hasattr(vcard, "n"): - given = vcard.n.value.given - family = vcard.n.value.family - lifecycle.add_value("role", role) - lifecycle.add_value("firstName", given) - lifecycle.add_value("lastName", family) - return lifecycle - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.replace_value("format", "text/html") - technical.replace_value("duration", self.getProperty("cclom:duration", response)) - if 'ccm:wwwurl' in response.meta['item']['properties']: - technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) - else: - technical.replace_value("location", response.url) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("url", response.meta["item"]["license"]["url"]) - license.add_value( - "internal", self.getProperty("ccm:commonlicense_key", response) - ) - license.add_value("author", self.getProperty("ccm:author_freetext", response)) - return license - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value("discipline", self.getProperty("ccm:taxonid", response)) - valuespaces.add_value( - "intendedEndUserRole", - self.getProperty("ccm:educationalintendedenduserrole", response), - ) - valuespaces.add_value( - "educationalContext", self.getProperty("ccm:educationalcontext", response) - ) - valuespaces.add_value( - "learningResourceType", - self.getProperty("ccm:educationallearningresourcetype", response), - ) - valuespaces.add_value( - "sourceContentType", self.getProperty("ccm:sourceContentType", response) - ) - valuespaces.add_value( - "toolCategory", self.getProperty("ccm:toolCategory", response) - ) - return valuespaces - - def getPermissions(self, response): - permissions = LomBase.getPermissions(self, response) - permissions.replace_value("public", False) - return permissions - - def shouldImport(self, response=None): - return "ccm:collection_io_reference" not in response.meta["item"]["aspects"] \ No newline at end of file diff --git a/run.py b/run.py index 83e157fe..ee3fdde2 100644 --- a/run.py +++ b/run.py @@ -10,8 +10,6 @@ from schulcloud.h5p.upload import Uploader as H5PUploader from schulcloud.fwu.upload_fwu import Uploader as FWU_Uploader from schulcloud.permission_updater import PermissionUpdater -from schulcloud.oeh_importer import OehImporter - needed_env_vars = [ 'CRAWLER', @@ -166,8 +164,6 @@ def main(): job = Job('FWU Uploader', FWU_Uploader().upload, schedule) elif crawler == 'permission_updater': job = Job('Permission Updater', PermissionUpdater().run, schedule) - elif crawler == 'oeh_importer': - job = Job('OEH Importer', OehImporter().run, schedule) elif crawler.endswith('spider'): job = Job( f'Crawler {crawler}', diff --git a/schulcloud/oeh_importer.py b/schulcloud/oeh_importer.py deleted file mode 100644 index 3db9aebf..00000000 --- a/schulcloud/oeh_importer.py +++ /dev/null @@ -1,292 +0,0 @@ -import asyncio -import datetime -import sys -import logging -import time -import traceback - -import requests -import scrapy as scrapy -from scrapy.crawler import Crawler -from scrapy.exceptions import DropItem -from scrapy.spiders import Spider -from scrapy.utils.project import get_project_settings -import vobject - -from edu_sharing_client.rest import ApiException - -from converter.spiders.base_classes.lom_base import LomBase, LomAgeRangeItemLoader -from converter.es_connector import EduSharingConstants -from converter.pipelines import EduSharingCheckPipeline, FilterSparsePipeline, LOMFillupPipeline, NormLicensePipeline,\ - ConvertTimePipeline, ProcessValuespacePipeline, ProcessThumbnailPipeline, EduSharingStorePipeline, BasicPipeline - -from schulcloud.edusharing import EdusharingAPI, RequestTimeoutException - -import nest_asyncio -nest_asyncio.apply() - -class OehImporter(LomBase, Spider): - name = "oeh_importer" - friendlyName = "Open Edu Hub" - API_URL = 'https://redaktion.openeduhub.net/edu-sharing/' - MDS_ID = 'mds_oeh' - - def __init__(self, **kwargs): - LomBase.__init__(self, **kwargs) - - self.log = logging.getLogger('OehImporter') - self.log.setLevel(logging.DEBUG) - self.log.addHandler(logging.FileHandler('oeh2_output.txt')) - - self.pipeline: list[BasicPipeline] = [ - EduSharingCheckPipeline(), - FilterSparsePipeline(), - LOMFillupPipeline(), - NormLicensePipeline(), - ConvertTimePipeline(), - ProcessValuespacePipeline(), - ProcessThumbnailPipeline(), - EduSharingStorePipeline() - ] - - self.api = EdusharingAPI(self.API_URL) - - self.total = -1 - - self.fake_request = scrapy.http.Request(self.API_URL) - self.fake_response = scrapy.http.Response(self.API_URL, request=self.fake_request) - - self.crawler = Crawler(OehImporter) - self.crawler._apply_settings() - self.crawler.engine = self.crawler._create_engine() - start_requests = iter(self.start_requests()) - self.crawler.engine.open_spider(self, start_requests) - self.crawler.engine.start() - - asyncio.run(self.run()) - - async def run(self): - i = 0 - while True: - nodes = self.request(i, 100) - for j in range(len(nodes)): - node = nodes[j] - self.log.debug(f'{datetime.datetime.now()} {i+j} / {self.total} :: {node["ccm:replicationsource"] if "ccm:replicationsource" in node else ""} :: {node["name"]}') - ending = node['name'].rsplit('.', 1)[-1] - if ending in ('mp4', 'h5p'): - self.log.info('skipped') - continue - await self.process_node(node) - i += len(nodes) - if i >= self.total: - break - - def request(self, offset: int, count: int): - search_url = f'/search/v1/queries/-home-/{self.MDS_ID}/ngsearch' - params = { - 'contentType': 'FILES', - 'maxItems': str(count), - 'skipCount': str(offset), - 'sortProperties': 'cm%3Acreated', # 'cm:created' - 'sortAscending': 'true', - 'propertyFilter': '-all-' - } - body = { - 'criteria': [] - } - - try: - response = self.api.make_request('POST', search_url, params=params, json_data=body, timeout=30) - except RequestTimeoutException: - print('~~~ <-', params) - raise RuntimeError('Timeout') - - print(response.status_code, '<-', params, '->', response.headers['Content-Type']) - - if response.status_code == 200 and response.headers['Content-Type'] == 'application/json': - data = response.json() - if self.total == -1: - self.total = data['pagination']['total'] - return data['nodes'] - else: - print(response.text) - raise RuntimeError(f'Unexpected response: {response.status_code} {response.text}') - - async def process_node(self, node: dict): - response_copy = self.fake_response.replace(url=node['content']['url']) - self.fake_response.meta['item'] = node - while True: - try: - if self.hasChanged(response_copy): - item = await LomBase.parse(self, response_copy) - await self.send_to_pipeline(item) - except ApiException as exc: - # sometimes edusharing will return 401 "admin rights required" for all bulk.find requests - if exc.status in (401, 503, 504): - time.sleep(10) - print('retry') - continue - self.log.error(traceback.format_exc()) - except DropItem as exc: - self.log.warning(f'Item dropped: {exc.args[0]}') - except KeyboardInterrupt: - self.log.info('KeyboardInterrupt') - exit(1) - except: - self.log.error(traceback.format_exc()) - break - - async def send_to_pipeline(self, item: scrapy.Item): - for pipeline in self.pipeline: - # spider has to be an object with a "name" attribute - if asyncio.iscoroutinefunction(pipeline.process_item): - item = await pipeline.process_item(item, self) - else: - item = pipeline.process_item(item, self) - - def getProperty(self, name, response): - return ( - response.meta["item"]["properties"][name] - if name in response.meta["item"]["properties"] - else None - ) - - def getBase(self, response): - base = LomBase.getBase(self, response) - base.replace_value("thumbnail", response.meta["item"]["preview"]["url"]) - base.replace_value( - "origin", self.getProperty("ccm:replicationsource", response) - ) - if self.getProperty("ccm:replicationsource", response): - # imported objects usually have the content as binary text - # TODO: Sometimes, edu-sharing redirects if no local content is found, and this should be html-parsed - if response.meta["item"]["downloadUrl"]: - try: - r = requests.get(response.meta["item"]["downloadUrl"]) - if r.status_code == 200: - base.replace_value("fulltext", r.text) - except: - logging.warning( - "error fetching data from " + str(response.meta["item"]["downloadUrl"]), - sys.exc_info()[0], - ) - # TODO - #else: - # # try to transform using alfresco - # r = requests.get( - # self.apiUrl - # + "/node/v1/nodes/" - # + response.meta["item"]["ref"]["repo"] - # + "/" - # + response.meta["item"]["ref"]["id"] - # + "/textContent", - # headers={"Accept": "application/json"}, - # ).json() - # if "text" in r: - # base.replace_value("fulltext", r["text"]) - - return base - - # fulltext is handled in base, response is not necessary - async def mapResponse(self, response, fetchData=True): - return await LomBase.mapResponse(self, response, False) - - def getId(self, response=None) -> str: - return response.meta["item"]["ref"]["id"] - - def getHash(self, response=None) -> str: - return self.version + response.meta["item"]["properties"]["cm:modified"][0] - - def getLOMGeneral(self, response): - general = LomBase.getLOMGeneral(self, response) - general.replace_value("title", response.meta["item"]["title"]) - general.add_value( - "keyword", self.getProperty("cclom:general_keyword", response) - ) - general.add_value( - "description", self.getProperty("cclom:general_description", response) - ) - general.replace_value("aggregationLevel", "1") - return general - - def getLOMEducational(self, response): - educational = LomBase.getLOMEducational(self, response) - tar_from = self.getProperty("ccm:educationaltypicalagerange_from", response) - tar_to = self.getProperty("ccm:educationaltypicalagerange_to", response) - if tar_from and tar_to: - range = LomAgeRangeItemLoader() - range.add_value("fromRange", tar_from) - range.add_value("toRange", tar_to) - educational.add_value("typicalAgeRange", range.load_item()) - return educational - - def getLOMLifecycle(self, response): - lifecycle = LomBase.getLOMLifecycle(self, response) - for role in EduSharingConstants.LIFECYCLE_ROLES_MAPPING.keys(): - entry = self.getProperty("ccm:lifecyclecontributer_" + role, response) - if entry and entry[0]: - # TODO: we currently only support one author per role - vcard = vobject.readOne(entry[0]) - if hasattr(vcard, "n"): - given = vcard.n.value.given - family = vcard.n.value.family - lifecycle.add_value("role", role) - lifecycle.add_value("firstName", given) - lifecycle.add_value("lastName", family) - return lifecycle - - def getLOMTechnical(self, response): - technical = LomBase.getLOMTechnical(self, response) - technical.replace_value("format", "text/html") - technical.replace_value("duration", self.getProperty("cclom:duration", response)) - if 'ccm:wwwurl' in response.meta['item']['properties']: - technical.replace_value("location", response.meta["item"]["properties"]["ccm:wwwurl"][0]) - else: - technical.replace_value("location", response.url) - return technical - - def getLicense(self, response): - license = LomBase.getLicense(self, response) - license.add_value("url", response.meta["item"]["license"]["url"]) - license.add_value( - "internal", self.getProperty("ccm:commonlicense_key", response) - ) - license.add_value("author", self.getProperty("ccm:author_freetext", response)) - return license - - def getValuespaces(self, response): - valuespaces = LomBase.getValuespaces(self, response) - valuespaces.add_value("discipline", self.getProperty("ccm:taxonid", response)) - valuespaces.add_value( - "intendedEndUserRole", - self.getProperty("ccm:educationalintendedenduserrole", response), - ) - valuespaces.add_value( - "educationalContext", self.getProperty("ccm:educationalcontext", response) - ) - valuespaces.add_value( - "learningResourceType", - self.getProperty("ccm:educationallearningresourcetype", response), - ) - valuespaces.add_value( - "sourceContentType", self.getProperty("ccm:sourceContentType", response) - ) - valuespaces.add_value( - "toolCategory", self.getProperty("ccm:toolCategory", response) - ) - return valuespaces - - def getPermissions(self, response): - permissions = LomBase.getPermissions(self, response) - permissions.replace_value("public", False) - return permissions - - def shouldImport(self, response=None): - return "ccm:collection_io_reference" not in response.meta["item"]["aspects"] - - -async def main(): - await OehImporter().run() - -if __name__ == '__main__': - asyncio.run(main()) From 1c2b200dec9f2d65bdc68616be2efb438b0bde71 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Tue, 3 Sep 2024 08:38:34 +0200 Subject: [PATCH 554/590] DMED-119 - revert and minor changes in `Readme.md` --- Readme.md | 65 ++++++++++++++++++++++++------------------------------- 1 file changed, 28 insertions(+), 37 deletions(-) diff --git a/Readme.md b/Readme.md index 03bc040a..e641c3c8 100644 --- a/Readme.md +++ b/Readme.md @@ -1,20 +1,33 @@ # Open Edu Hub Search ETL -## Step 1: Project Setup - Python (manual approach) +This repository is forked from openeduhub. Only a few spiders are directly in use +with oeh_spider being the main one. Others are mediothek_pixiothek, merlin, sodix. +Notable differences to the original repository are schulcloud/ and run.py. -- make sure you have python3 installed () - - (Python 3.10 or newer is required) -- go to project root -- Run the following commands: +The terms "spider" and "crawler" may be used interchangeable. -``` -sudo apt install python3-dev python3-pip python3-venv libpq-dev -y +## Requirements +Before doing anything in this repository, make sure you meet the following requirements: + +- docker and docker-compose +- Python 3.11 +- a python virtual environment +- an .env file containing all the necessary credentials and settings +- splash service for crawlers + +Debian-based systems: +```bash +sudo apt install python3.11 python3-dev python3-pip python3-venv libpq-dev python3 -m venv .venv +source .venv/bin/activate +pip3 install -r requirements.txt +cp .env.example .env +# adjust .env according to your use case ``` For windows, go to python.org to download and install the proper python version. After that: ```commandline -python3.9 -m venv .venv +python3 -m venv .venv .venv\Scripts\activate.bat pip3 install -r requirements.txt copy .env.example .env @@ -29,16 +42,6 @@ Splash creates screenshots from web pages when thumbnails are not available. There is another service for use at crawl time called "pyppeteer" which is currently not in use by our crawlers. -## Step 1 (alternative): Project Setup - Python (automated, via `poetry`) - -- Step 1: Make sure that you have [Poetry](https://python-poetry.org) v1.5.0+ installed -- Step 2: Open your terminal in the project root directory: - - Step 2.1: (this is an optional, strictly personal preference) If you want to have your `.venv` to be created in the project root directory: - - `poetry config virtualenvs.in-project true` -- Step 3: Install dependencies (according to `pyproject.toml`) by running: `poetry install` - -## Step 2: Project Setup - required Docker Containers -If you have Docker installed, use `docker-compose up` to start up the multi-container for `Splash` and `Playwright`-integration. ## Run a crawler (Activate your virtual environment as in requirements above, if not already done.) @@ -48,26 +51,14 @@ scrapy crawl oeh_spider If a crawler has [Scrapy Spider Contracts](https://docs.scrapy.org/en/latest/topics/contracts.html#spiders-contracts) implemented, you can test those by running `scrapy check ` -# Running crawlers - -- A crawler can be run with `scrapy crawl `. - - (It assumes that you have an edu-sharing v6.0+ instance in your `.env` settings configured which can accept the data.) -- If a crawler has [Scrapy Spider Contracts](https://docs.scrapy.org/en/latest/topics/contracts.html#spiders-contracts) implemented, you can test those by running `scrapy check ` - - -## Running crawlers via Docker - +Or using the docker image: ```bash -git clone https://github.com/openeduhub/oeh-search-etl -cd oeh-search-etl -cp converter/.env.example .env -# modify .env with your edu sharing instance -export CRAWLER=your_crawler_id_spider # i.e. wirlernenonline_spider -docker compose build scrapy -docker compose up +docker build --tag oeh-search-etl . +./docker_run.sh oeh_spider ``` +From the docker image respectively run.py, there are also other options one can execute like H5P upload or sodix permission script. -# Building a Crawler +## Writing a spider/crawler - We use scrapy, a framework for crawling metadata from the web - To create a new spider, inside `converter/spiders/`, copy `sample_spider.py` to `_spider.py` @@ -79,5 +70,5 @@ docker compose up - To learn more about the LOM standard we're using, you'll find useful information at https://en.wikipedia.org/wiki/Learning_object_metadata - For more information, have a look into Confluence ("Using OpenEduHub (OEH) spiders") -# Still have questions? Check out our GitHub-Wiki! -If you need help getting started or setting up your work environment, please don't hesitate to visit our GitHub Wiki at https://github.com/openeduhub/oeh-search-etl/wiki +## Still have questions? Check out our GitHub-Wiki! +If you need help getting started or setting up your work environment, please don't hesitate to visit our GitHub Wiki at https://github.com/openeduhub/oeh-search-etl/wiki \ No newline at end of file From 8a8f047176946e10f7c648210aa9b2664d2ec790 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:53:56 +0200 Subject: [PATCH 555/590] fix: missing dependencies (pydantic) in requirements.txt - chore: update 'certifi' dependency to v2024.08.30 - update pyproject.toml version Attention: - 'poetry export' does not include the "edu_sharing_client" dependency group by default - to export all dependencies to requirements.txt, you need use the "--with edu_sharing_client" parameter with the poetry "export"-command (see: https://python-poetry.org/docs/cli/#export) --- poetry.lock | 20 ++++++++++---------- pyproject.toml | 4 ++-- requirements.txt | 9 ++++++--- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/poetry.lock b/poetry.lock index 952b3fdd..4b98392f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -180,13 +180,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -1143,13 +1143,13 @@ source = ["Cython (>=3.0.11)"] [[package]] name = "lxml-html-clean" -version = "0.2.0" +version = "0.2.2" description = "HTML cleaner from lxml project" optional = false python-versions = "*" files = [ - {file = "lxml_html_clean-0.2.0-py3-none-any.whl", hash = "sha256:80bdc730b288b8e68f0bf86b99f4bbef129c5ec59b694c6681422be4c1eeb3c5"}, - {file = "lxml_html_clean-0.2.0.tar.gz", hash = "sha256:47c323f39d95d4cbf4956da62929c89a79313074467efaa4821013c97bf95628"}, + {file = "lxml_html_clean-0.2.2-py3-none-any.whl", hash = "sha256:177ebe822b39d1b68df7c0c34ba005cb087b23d3791dae87efb3a2bb162ef398"}, + {file = "lxml_html_clean-0.2.2.tar.gz", hash = "sha256:cc34178e34673025c49c3d7f4bd48754e9e4b23875df2308f43c21733d8437fb"}, ] [package.dependencies] @@ -2039,13 +2039,13 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "74.0.0" +version = "74.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-74.1.0-py3-none-any.whl", hash = "sha256:cee604bd76cc092355a4e43ec17aee5369095974f41f088676724dc6bc2c9ef8"}, + {file = "setuptools-74.1.0.tar.gz", hash = "sha256:bea195a800f510ba3a2bc65645c88b7e016fe36709fefc58a880c4ae8a0138d7"}, ] [package.extras] @@ -2378,4 +2378,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "e595007c4660eb0b86f8fd6da0aa517f526e7168b8da697ba0ae391b5f97645b" +content-hash = "4e5368d8a12001350635f27f1992f386d7a0da79dd16187327c29d1a05a1c782" diff --git a/pyproject.toml b/pyproject.toml index a29792e0..2a8c1f6c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,7 +49,7 @@ exclude = ''' [tool.poetry] name = "oeh-search-etl" -version = "2023.08.01" +version = "2024.09.03" description = "Crawls educational sites for use in WirLernenOnline.de" authors = ["Torsten Simon "] maintainers = [ @@ -62,7 +62,7 @@ packages = [{include = "converter"}] python = "^3.10" wheel = "0.44.0" black = "24.8.0" -certifi="2024.7.4" +certifi="2024.8.30" dateparser="1.2" extruct="0.17.0" flake8 = "7.1.1" diff --git a/requirements.txt b/requirements.txt index 9f033614..c6218ac3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +annotated-types==0.7.0 ; python_version >= "3.10" and python_version < "4.0" anyio==4.4.0 ; python_version >= "3.10" and python_version < "4.0" asgiref==3.8.1 ; python_version >= "3.10" and python_version < "4.0" async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" @@ -6,7 +7,7 @@ automat==24.8.1 ; python_version >= "3.10" and python_version < "4.0" babel==2.15.0 ; python_version >= "3.10" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" black==24.8.0 ; python_version >= "3.10" and python_version < "4.0" -certifi==2024.7.4 ; python_version >= "3.10" and python_version < "4.0" +certifi==2024.8.30 ; python_version >= "3.10" and python_version < "4.0" cffi==1.17.0 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" @@ -43,7 +44,7 @@ jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" justext==3.0.1 ; python_version >= "3.10" and python_version < "4.0" langcodes[data]==3.4.0 ; python_version >= "3.10" and python_version < "4.0" language-data==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -lxml-html-clean==0.2.0 ; python_version >= "3.10" and python_version < "4.0" +lxml-html-clean==0.2.2 ; python_version >= "3.10" and python_version < "4.0" lxml==5.3.0 ; python_version >= "3.10" and python_version < "4.0" lxml[html-clean]==5.3.0 ; python_version >= "3.10" and python_version < "4.0" marisa-trie==1.2.0 ; python_version >= "3.10" and python_version < "4.0" @@ -62,6 +63,8 @@ pyasn1-modules==0.4.0 ; python_version >= "3.10" and python_version < "4.0" pyasn1==0.6.0 ; python_version >= "3.10" and python_version < "4.0" pycodestyle==2.12.1 ; python_version >= "3.10" and python_version < "4.0" pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" +pydantic-core==2.20.1 ; python_version >= "3.10" and python_version < "4.0" +pydantic==2.8.2 ; python_version >= "3.10" and python_version < "4.0" pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" pyee==11.1.0 ; python_version >= "3.10" and python_version < "4.0" pyflakes==3.2.0 ; python_version >= "3.10" and python_version < "4.0" @@ -81,7 +84,7 @@ requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" scrapy==2.11.2 ; python_version >= "3.10" and python_version < "4.0" service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==74.0.0 ; python_version >= "3.10" and python_version < "4.0" +setuptools==74.1.0 ; python_version >= "3.10" and python_version < "4.0" six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" soupsieve==2.6 ; python_version >= "3.10" and python_version < "4.0" From d0238b2e834d5436c195abe1c00d064322ff03fc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:01:16 +0200 Subject: [PATCH 556/590] change: GitHub workflow from Python 3.10 to 3.12 --- .github/workflows/python.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python.yaml b/.github/workflows/python.yaml index fbf78619..10fe4ee8 100644 --- a/.github/workflows/python.yaml +++ b/.github/workflows/python.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.10"] + python-version: ["3.12"] steps: - uses: actions/checkout@v3 From 8b5485a5d4d995b02d71d89be529437d51a00f15 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:18:56 +0200 Subject: [PATCH 557/590] change/logging: edu-sharing API client init fallback when "services" list is empty - early edu-sharing v9.x versions didn't provide a list of services in the initial about["services"] API response, which was a confirmed bug and has been fixed - since the fallback is no longer necessary in "healthy" edu-sharing versions, a logging message will be shown (to make devs aware when the fallback is active) --- converter/es_connector.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 2bd91334..cac76298 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -815,13 +815,16 @@ def init_api_client(self): EduSharing.nodeApi = NODEV1Api(EduSharing.apiClient) about = EduSharing.aboutApi.about() if "services" in about and about["services"]: - # edu-sharing API v6.x to v8.1 behavior: looking for the BULK v1 API "version"-dict + # edu-sharing API v6.x to v9.1 behavior: look for the BULK v1 API "version"-dict EduSharing.version = \ list(filter(lambda x: x["name"] == "BULK", about["services"]))[0]["instances"][0]["version"] - elif "version" in about and about["version"]: - # edu-sharing API v9.x behavior: - # we expect a "version"-dict to exist within the "about"-dict that might look like this: + elif "services" in about and not about["services"] and "version" in about and about["version"]: + # edu-sharing API v9.x workaround: + # if about["services"] is an empty list (instead of the expected list[dict]), + # we're falling back to the about["version"]-dict that might look like this: # {'major': 1, 'minor': 1, 'renderservice': '9.0', 'repository': '9.0'} + log.info(f"Failed to retrieve BULK v1 API version from edu-sharing during APi client init: " + f"about['services'] was empty (expected: list[dict]). Using about['version'] fallback...") EduSharing.version = about["version"] version_str: str = f"{EduSharing.version['major']}.{EduSharing.version['minor']}" if ( From 1f74f23aa2dfc7778093c77e42b6917a9e1fd243 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:52:20 +0200 Subject: [PATCH 558/590] fix: ValidationError for int values in "CourseItem.course_duration" - according to the edu-sharing API client, the "cclom:typicallearningtime"-values should be string values, not integers (as previously assumed) - es_connector checks if the provided value is either of type Integer, String or None and only saves valid (numeric) values to "cclom:typicallearningtime", wrapped as a string --- converter/es_connector.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index cac76298..b2b251da 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -594,18 +594,19 @@ def transform_item(self, uuid, spider, item): if "course_description_short" in item["course"]: spaces["ccm:oeh_course_description_short"] = item["course"]["course_description_short"] if "course_duration" in item["course"]: - course_duration: int | str = item["course"]["course_duration"] - if course_duration and isinstance(course_duration, str) and course_duration.isnumeric(): - # convert strings to int values - course_duration = int(course_duration) - if course_duration and isinstance(course_duration, int): - # edu-sharing property 'cclom:typicallearningtime' expects values in ms! - course_duration_in_ms: int = int(course_duration * 1000) - item["course"]["course_duration"] = course_duration_in_ms + course_duration: int | str | None = item["course"]["course_duration"] + if (course_duration and isinstance(course_duration, str) and course_duration.isnumeric() + or course_duration and isinstance(course_duration, int)): + # if course_duration is of type int, we assume it's a value in seconds. + # the edu-sharing property 'cclom:typicallearningtime' expects values in ms: + course_duration_in_ms: int = int(course_duration) * 1000 + # the edu-sharing API expects a string value, otherwise we'd encounter pydantic ValidationErrors: + course_duration = str(course_duration_in_ms) + item["course"]["course_duration"] = course_duration spaces["cclom:typicallearningtime"] = item["course"]["course_duration"] else: log.warning(f"Could not transform 'course_duration' {course_duration} to ms. " - f"Expected int (seconds), but received type {type(course_duration)} instead.") + f"Expected seconds (type: int), but received type {type(course_duration)} instead.") if "course_learningoutcome" in item["course"]: course_learning_outcome: list[str] = item["course"]["course_learningoutcome"] if course_learning_outcome and isinstance(course_learning_outcome, list): From bb321bba96d4362d7200a1494c8388f303c2d5cf Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:03:35 +0200 Subject: [PATCH 559/590] feat: enable EduSharingTypeValidationPipeline - the new edu-sharing API client does rigorous Type-Checks before submitting items to the repository, which causes pydantic "ValidationError"s for some metadata properties which haven't been normalized previously - example 1: when a crawler collects a set[str] of keywords (to prevent duplicate entries), the pipeline will convert the set[str] to a list[str] before trying to submit it via the REST API - example 2: time- or age-related properties (e.g. "typicalLearningTime", "typicalAgeRange") might cause ValidationErrors when the crawler collects these values as Integers, but the edu-sharing API expects the value to be wrapped in a string - (this pipeline will be expanded over time as more edge-cases arise) --- converter/pipelines.py | 37 +++++++++++++++++++++++++++++++++++++ converter/settings.py | 1 + 2 files changed, 38 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 5607a6fd..2cf2cc0f 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -948,6 +948,43 @@ def process_item(self, raw_item, spider): # raise DropItem() return raw_item +class EduSharingTypeValidationPipeline(BasicPipeline): + """ + Rudimentary type-conversion before handling metadata properties off to the API client. + """ + # ToDo: if you notice pydantic "ValidationError"s during crawls, implement handling of those edge-cases here! + def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: + item_adapter = ItemAdapter(item) + if "course" in item_adapter: + course_item: dict = item_adapter["course"] + if "course_duration" in course_item: + course_duration: int = course_item["course_duration"] + if course_duration and isinstance(course_duration, int): + course_item["course_duration"] = str(course_duration) + if "lom" in item_adapter: + if "educational" in item_adapter["lom"]: + lom_educational: dict = item_adapter["lom"]["educational"] + if "typicalLearningTime" in lom_educational: + typical_learning_time: int | str | None = lom_educational["typicalLearningTime"] + if typical_learning_time and isinstance(typical_learning_time, int): + lom_educational["typicalLearningTime"] = str(typical_learning_time) + if "typicalAgeRange" in lom_educational: + if "fromRange" in lom_educational["typicalAgeRange"]: + from_range: int | str | None = lom_educational["typicalAgeRange"]["fromRange"] + if from_range and isinstance(from_range, int): + lom_educational["typicalAgeRange"]["fromRange"] = str(from_range) + if "toRange" in lom_educational["typicalAgeRange"]: + to_range: int | str | None = lom_educational["typicalAgeRange"]["toRange"] + if to_range and isinstance(to_range, int): + lom_educational["typicalAgeRange"]["toRange"] = str(to_range) + if "general" in item_adapter["lom"]: + lom_general: dict = item_adapter["lom"]["general"] + if "keyword" in lom_general: + keywords: list[str] | set[str] | None = lom_general["keyword"] + if keywords and isinstance(keywords, set): + lom_general["keyword"] = list(keywords) + return item + class JSONStorePipeline(BasicPipeline, PipelineWithPerSpiderMethods): def __init__(self): diff --git a/converter/settings.py b/converter/settings.py index 71b05baa..254b665b 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -133,6 +133,7 @@ "converter.pipelines.ProcessValuespacePipeline": 250, "converter.pipelines.CourseItemPipeline": 275, "converter.pipelines.ProcessThumbnailPipeline": 300, + "converter.pipelines.EduSharingTypeValidationPipeline": 325, ( "converter.pipelines.DummyPipeline" if storeMode == "None" From f5475e231a0711ff56271a02c4a686185d18dba0 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 19:08:24 +0200 Subject: [PATCH 560/590] change: remove outdated swagger config json --- edu-sharing-swagger.config.json | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 edu-sharing-swagger.config.json diff --git a/edu-sharing-swagger.config.json b/edu-sharing-swagger.config.json deleted file mode 100644 index 545aed84..00000000 --- a/edu-sharing-swagger.config.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "packageName":"edu_sharing_client", - "projectName":"edu-sharing-client", - "packageVersion":"6.0-DEV" - } \ No newline at end of file From 3176cb9c33f9aa2d0f19e0311e75ada3b895a487 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Tue, 3 Sep 2024 22:20:37 +0200 Subject: [PATCH 561/590] build: DEPRECATE requirements.txt in favor of poetry builds - change: upgrade dockerfile to Python 3.12.5 - change: add (local) edu_sharing_client package to dockerfile before trying to install dependencies - this change was necessary to guarantee that fresh development environments can use the edu-sharing API client without "ModuleNotFoundError"s - chore: update dependencies - chore: update pyproject.toml to reflect Python 3.12 requirement GitHub Workflows: - build: skip .venv/ and edu_sharing_client directories during flake8 checks - build: use poetry to run flake8 and pytest - build: install dependencies with poetry in GitHub workflow --- .github/workflows/python.yaml | 19 +++--- Dockerfile | 9 +-- poetry.lock | 65 ++++++++------------- pyproject.toml | 10 ++-- requirements.txt | 106 ---------------------------------- 5 files changed, 45 insertions(+), 164 deletions(-) delete mode 100644 requirements.txt diff --git a/.github/workflows/python.yaml b/.github/workflows/python.yaml index 10fe4ee8..576951e2 100644 --- a/.github/workflows/python.yaml +++ b/.github/workflows/python.yaml @@ -33,17 +33,22 @@ jobs: restore-keys: | ${{ runner.os }}-pip- ${{ runner.os }}- - - name: Install dependencies + - name: Install Poetry via pip run: | python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi - - name: Lint with flake8 + python -m pip install poetry + - name: Configure Poetry to use in-project .venv + run: | + python -m poetry config virtualenvs.in-project true + - name: Install Dependencies with Poetry + run: | + python -m poetry install + - name: Lint with flake8 (via Poetry) run: | # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --exclude=.venv/,edu_sharing_openapi/ # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --exclude=.venv/,edu_sharing_openapi/ - name: Test with pytest run: | - pytest + poetry run pytest diff --git a/Dockerfile b/Dockerfile index 88060761..e5ca18c1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,17 +1,18 @@ -FROM python:3.12.4-slim-bookworm +FROM python:3.12.5-slim-bookworm # ENV CRAWLER wirlernenonline_spider WORKDIR / COPY entrypoint.sh entrypoint.sh -COPY requirements.txt requirements.txt -RUN pip3 install -r requirements.txt +COPY edu_sharing_openapi/ edu_sharing_openapi/ +COPY pyproject.toml poetry.lock ./ +RUN pip3 install poetry +RUN poetry install COPY scrapy.cfg scrapy.cfg COPY setup.cfg setup.cfg COPY converter/ converter/ COPY csv/ csv/ -COPY edu_sharing_openapi/ edu_sharing_openapi/ COPY valuespace_converter/ valuespace_converter/ diff --git a/poetry.lock b/poetry.lock index 4b98392f..4a68ab20 100644 --- a/poetry.lock +++ b/poetry.lock @@ -23,10 +23,8 @@ files = [ ] [package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -44,9 +42,6 @@ files = [ {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, ] -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] @@ -61,9 +56,6 @@ files = [ {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] name = "attrs" version = "24.2.0" @@ -169,8 +161,6 @@ mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -517,13 +507,13 @@ files = [ [[package]] name = "django" -version = "5.1" +version = "5.1.1" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ - {file = "Django-5.1-py3-none-any.whl", hash = "sha256:d3b811bf5371a26def053d7ee42a9df1267ef7622323fe70a601936725aa4557"}, - {file = "Django-5.1.tar.gz", hash = "sha256:848a5980e8efb76eea70872fb0e4bc5e371619c70fffbe48e3e1b50b2c09455d"}, + {file = "Django-5.1.1-py3-none-any.whl", hash = "sha256:71603f27dac22a6533fb38d83072eea9ddb4017fead6f67f2562a40402d61c3f"}, + {file = "Django-5.1.1.tar.gz", hash = "sha256:021ffb7fdab3d2d388bc8c7c2434eb9c1f6f4d09e6119010bbb1694dda286bc2"}, ] [package.dependencies] @@ -536,18 +526,23 @@ argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" +name = "edu-sharing-client" +version = "1.0.0" +description = "edu-sharing Repository REST API" optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] +python-versions = "^3.12" +files = [] +develop = false -[package.extras] -test = ["pytest (>=6)"] +[package.dependencies] +pydantic = ">=2" +python-dateutil = ">=2.8.2" +typing-extensions = ">=4.7.1" +urllib3 = ">= 1.25.3" + +[package.source] +type = "directory" +url = "edu_sharing_openapi" [[package]] name = "extruct" @@ -855,7 +850,6 @@ files = [ [package.dependencies] setuptools = ">=61.0" -tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] scripts = ["click (>=6.0)"] @@ -1767,11 +1761,9 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -2039,13 +2031,13 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "74.1.0" +version = "74.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.1.0-py3-none-any.whl", hash = "sha256:cee604bd76cc092355a4e43ec17aee5369095974f41f088676724dc6bc2c9ef8"}, - {file = "setuptools-74.1.0.tar.gz", hash = "sha256:bea195a800f510ba3a2bc65645c88b7e016fe36709fefc58a880c4ae8a0138d7"}, + {file = "setuptools-74.1.1-py3-none-any.whl", hash = "sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766"}, + {file = "setuptools-74.1.1.tar.gz", hash = "sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847"}, ] [package.extras] @@ -2137,17 +2129,6 @@ requests-file = ">=1.4" release = ["build", "twine"] testing = ["black", "mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "types-filelock", "types-requests"] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "trafilatura" version = "1.12.1" @@ -2377,5 +2358,5 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "4e5368d8a12001350635f27f1992f386d7a0da79dd16187327c29d1a05a1c782" +python-versions = "^3.12" +content-hash = "69894e003525a1415ae92c18d70267f6a87142e03036b6c20b1f5eb6d270b741" diff --git a/pyproject.toml b/pyproject.toml index 2a8c1f6c..2fe4a15e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ maintainers = [ "Andreas Schnäpp <981166+Criamos@users.noreply.github.com>" ] readme = "README.md" -python = "^3.11" +python = "^3.12" homepage = "https://github.com/openeduhub/oeh-search-etl" repository = "https://github.com/openeduhub/oeh-search-etl" documentation = "https://github.com/openeduhub/oeh-search-etl" @@ -17,7 +17,6 @@ keywords = ["metadata", "oer", "crawl", "wirlernenonline"] classifiers = [ "Framework :: Scrapy", "Development Status :: 4 - Beta", - "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Education :: Testing", "Topic :: Internet :: WWW/HTTP :: Indexing/Search", @@ -25,7 +24,7 @@ classifiers = [ [tool.black] line-length = 120 -target-version = ['py311'] +target-version = ['py312'] include = '\.pyi?$' exclude = ''' @@ -59,7 +58,7 @@ readme = "Readme.md" packages = [{include = "converter"}] [tool.poetry.dependencies] -python = "^3.10" +python = "^3.12" wheel = "0.44.0" black = "24.8.0" certifi="2024.8.30" @@ -81,7 +80,6 @@ requests="2.32.3" six="1.16.0" Scrapy="2.11.2" scrapy-splash="0.9.0" -urllib3="2.2.2" vobject="0.9.7" w3lib="2.2.1" xmltodict="0.13.0" @@ -90,12 +88,14 @@ babel = "2.15.0" langcodes = {extras = ["data"], version = "^3.3.0"} httpx = "0.27.2" async-lru = "2.0.4" +urllib3 = "^2.2.2" [tool.poetry.group.edu_sharing_client.dependencies] # these dependencies are used (and automatically generated) by the "openapi-generator-cli"-generated client # see: /edu_sharing_openapi/pyproject.toml pydantic = ">=2.8.2" typing-extensions = ">=4.12.2" +edu-sharing-client = {path = "edu_sharing_openapi"} [build-system] requires = ["poetry-core"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index c6218ac3..00000000 --- a/requirements.txt +++ /dev/null @@ -1,106 +0,0 @@ -annotated-types==0.7.0 ; python_version >= "3.10" and python_version < "4.0" -anyio==4.4.0 ; python_version >= "3.10" and python_version < "4.0" -asgiref==3.8.1 ; python_version >= "3.10" and python_version < "4.0" -async-lru==2.0.4 ; python_version >= "3.10" and python_version < "4.0" -attrs==24.2.0 ; python_version >= "3.10" and python_version < "4.0" -automat==24.8.1 ; python_version >= "3.10" and python_version < "4.0" -babel==2.15.0 ; python_version >= "3.10" and python_version < "4.0" -beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" -black==24.8.0 ; python_version >= "3.10" and python_version < "4.0" -certifi==2024.8.30 ; python_version >= "3.10" and python_version < "4.0" -cffi==1.17.0 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" -charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" -click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" -colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and (sys_platform == "win32" or platform_system == "Windows") -constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" -courlan==1.3.0 ; python_version >= "3.10" and python_version < "4.0" -cryptography==43.0.0 ; python_version >= "3.10" and python_version < "4.0" -cssselect==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -dateparser==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -defusedxml==0.7.1 ; python_version >= "3.10" and python_version < "4.0" -django==5.1 ; python_version >= "3.10" and python_version < "4.0" -exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" -extruct==0.17.0 ; python_version >= "3.10" and python_version < "4.0" -filelock==3.15.4 ; python_version >= "3.10" and python_version < "4.0" -flake8==7.1.1 ; python_version >= "3.10" and python_version < "4.0" -greenlet==3.0.3 ; python_version >= "3.10" and python_version < "4.0" -h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" -html-text==0.6.2 ; python_version >= "3.10" and python_version < "4.0" -html2text==2024.2.26 ; python_version >= "3.10" and python_version < "4.0" -html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" -htmldate==1.9.0 ; python_version >= "3.10" and python_version < "4.0" -httpcore==1.0.5 ; python_version >= "3.10" and python_version < "4.0" -httpx==0.27.2 ; python_version >= "3.10" and python_version < "4.0" -hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" -idna==3.8 ; python_version >= "3.10" and python_version < "4.0" -image==1.5.33 ; python_version >= "3.10" and python_version < "4.0" -incremental==24.7.2 ; python_version >= "3.10" and python_version < "4.0" -iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" -isodate==0.6.1 ; python_version >= "3.10" and python_version < "4.0" -itemadapter==0.9.0 ; python_version >= "3.10" and python_version < "4.0" -itemloaders==1.3.1 ; python_version >= "3.10" and python_version < "4.0" -jmespath==1.0.1 ; python_version >= "3.10" and python_version < "4.0" -jstyleson==0.0.2 ; python_version >= "3.10" and python_version < "4.0" -justext==3.0.1 ; python_version >= "3.10" and python_version < "4.0" -langcodes[data]==3.4.0 ; python_version >= "3.10" and python_version < "4.0" -language-data==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -lxml-html-clean==0.2.2 ; python_version >= "3.10" and python_version < "4.0" -lxml==5.3.0 ; python_version >= "3.10" and python_version < "4.0" -lxml[html-clean]==5.3.0 ; python_version >= "3.10" and python_version < "4.0" -marisa-trie==1.2.0 ; python_version >= "3.10" and python_version < "4.0" -mccabe==0.7.0 ; python_version >= "3.10" and python_version < "4.0" -mf2py==2.0.1 ; python_version >= "3.10" and python_version < "4.0" -mypy-extensions==1.0.0 ; python_version >= "3.10" and python_version < "4.0" -packaging==24.1 ; python_version >= "3.10" and python_version < "4.0" -parsel==1.9.1 ; python_version >= "3.10" and python_version < "4.0" -pathspec==0.12.1 ; python_version >= "3.10" and python_version < "4.0" -pillow==10.3.0 ; python_version >= "3.10" and python_version < "4.0" -platformdirs==4.2.2 ; python_version >= "3.10" and python_version < "4.0" -playwright==1.44.0 ; python_version >= "3.10" and python_version < "4.0" -pluggy==1.5.0 ; python_version >= "3.10" and python_version < "4.0" -protego==0.3.1 ; python_version >= "3.10" and python_version < "4.0" -pyasn1-modules==0.4.0 ; python_version >= "3.10" and python_version < "4.0" -pyasn1==0.6.0 ; python_version >= "3.10" and python_version < "4.0" -pycodestyle==2.12.1 ; python_version >= "3.10" and python_version < "4.0" -pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" -pydantic-core==2.20.1 ; python_version >= "3.10" and python_version < "4.0" -pydantic==2.8.2 ; python_version >= "3.10" and python_version < "4.0" -pydispatcher==2.0.7 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "CPython" -pyee==11.1.0 ; python_version >= "3.10" and python_version < "4.0" -pyflakes==3.2.0 ; python_version >= "3.10" and python_version < "4.0" -pyopenssl==24.2.1 ; python_version >= "3.10" and python_version < "4.0" -pyparsing==3.1.4 ; python_version >= "3.10" and python_version < "4.0" -pypydispatcher==2.1.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation == "PyPy" -pyrdfa3==3.6.4 ; python_version >= "3.10" and python_version < "4.0" -pytest==8.3.2 ; python_version >= "3.10" and python_version < "4.0" -python-dateutil==2.9.0.post0 ; python_version >= "3.10" and python_version < "4.0" -python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" -pytz==2024.1 ; python_version >= "3.10" and python_version < "4.0" -queuelib==1.7.0 ; python_version >= "3.10" and python_version < "4.0" -rdflib==7.0.0 ; python_version >= "3.10" and python_version < "4.0" -regex==2024.7.24 ; python_version >= "3.10" and python_version < "4.0" -requests-file==2.1.0 ; python_version >= "3.10" and python_version < "4.0" -requests==2.32.3 ; python_version >= "3.10" and python_version < "4.0" -scrapy-splash==0.9.0 ; python_version >= "3.10" and python_version < "4.0" -scrapy==2.11.2 ; python_version >= "3.10" and python_version < "4.0" -service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" -setuptools==74.1.0 ; python_version >= "3.10" and python_version < "4.0" -six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" -sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" -soupsieve==2.6 ; python_version >= "3.10" and python_version < "4.0" -sqlparse==0.5.1 ; python_version >= "3.10" and python_version < "4.0" -tld==0.13 ; python_version >= "3.10" and python_version < "4" -tldextract==5.1.2 ; python_version >= "3.10" and python_version < "4.0" -tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" -trafilatura==1.12.1 ; python_version >= "3.10" and python_version < "4.0" -twisted==24.7.0 ; python_version >= "3.10" and python_version < "4.0" -typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" -tzdata==2024.1 ; python_version >= "3.10" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") -tzlocal==5.2 ; python_version >= "3.10" and python_version < "4.0" -urllib3==2.2.2 ; python_version >= "3.10" and python_version < "4.0" -vobject==0.9.7 ; python_version >= "3.10" and python_version < "4.0" -w3lib==2.2.1 ; python_version >= "3.10" and python_version < "4.0" -webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" -wheel==0.44.0 ; python_version >= "3.10" and python_version < "4.0" -xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" -zope-interface==7.0.3 ; python_version >= "3.10" and python_version < "4.0" From 936380716db1895f4a604d64c2af62b537049813 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 4 Sep 2024 09:38:10 +0200 Subject: [PATCH 562/590] change: use poetry to run shellscript --- entrypoint.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/entrypoint.sh b/entrypoint.sh index 9be1348f..c0800d10 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -2,8 +2,8 @@ if [ -z "$ARGS" ] then - scrapy crawl "$CRAWLER" + poetry run scrapy crawl "$CRAWLER" else - scrapy crawl -a "$ARGS" "$CRAWLER" + poetry run scrapy crawl -a "$ARGS" "$CRAWLER" fi From 09c4d712d3b06b0cb2cb4344d6388ae168200d05 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 4 Sep 2024 11:32:49 +0200 Subject: [PATCH 563/590] docs: update README - remove mention of requirements.txt (since this installation method is deprecated and error-prone) --- Readme.md | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/Readme.md b/Readme.md index 0764a635..96768d7a 100644 --- a/Readme.md +++ b/Readme.md @@ -1,9 +1,9 @@ # Open Edu Hub Search ETL -## Step 1: Project Setup - Python (manual approach) +## Step 1: Project Setup - Python 3.12 (manual approach) - make sure you have python3 installed () - - (Python 3.10 or newer is required) + - (Python 3.12 or newer is required) - go to project root - Run the following commands: @@ -16,15 +16,19 @@ python3 -m venv .venv `.venv\Scripts\activate.bat` (on Windows) -`pip3 install -r requirements.txt` +`pip3 install poetry` + +`poetry install` ## Step 1 (alternative): Project Setup - Python (automated, via `poetry`) - Step 1: Make sure that you have [Poetry](https://python-poetry.org) v1.5.0+ installed -- Step 2: Open your terminal in the project root directory: - - Step 2.1: (this is an optional, strictly personal preference) If you want to have your `.venv` to be created in the project root directory: + - for detailed instructions, please consult the [Poetry Installation Guide](https://python-poetry.org/docs/#installation) +- Step 2: Open your terminal **in the project root directory**: + - Step 2.1: If you want to have your `.venv` to be created inside the project root directory: - `poetry config virtualenvs.in-project true` -- Step 3: Install dependencies (according to `pyproject.toml`) by running: `poetry install` + - *(this is an optional, strictly personal preference)* +- Step 3: **Install dependencies** (according to `pyproject.toml`) by running: `poetry install` ## Step 2: Project Setup - required Docker Containers If you have Docker installed, use `docker-compose up` to start up the multi-container for `Splash` and `Playwright`-integration. @@ -58,7 +62,7 @@ docker compose up - To create a new spider, create a file inside `converter/spiders/_spider.py` - We recommend inheriting the `LomBase` class in order to get out-of-the-box support for our metadata model - You may also Inherit a Base Class for crawling data, if your site provides LRMI metadata, the `LrmiBase` is a good start. If your system provides an OAI interface, you may use the `OAIBase` -- As a sample/template, please take a look at the `sample_spider.py` +- As a sample/template, please take a look at the `sample_spider.py` or `sample_spider_alternative.py` - To learn more about the LOM standard we're using, you'll find useful information at https://en.wikipedia.org/wiki/Learning_object_metadata # Still have questions? Check out our GitHub-Wiki! From b450ee88935fd2749e7e014ee44d83f2a753ae82 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Mon, 9 Sep 2024 10:59:38 +0200 Subject: [PATCH 564/590] fix merge issues --- .gitignore | 4 +- converter/es_connector.py | 3 - converter/items.py | 20 -- converter/pipelines.py | 1 - converter/settings.py | 4 - converter/spiders/base_classes/lom_base.py | 10 - .../spiders/mediothek_pixiothek_spider.py | 300 ++---------------- converter/spiders/merlin_spider.py | 194 ++++------- converter/spiders/oeh_spider.py | 9 - .../spiders/utils/spider_name_converter.py | 50 --- 10 files changed, 82 insertions(+), 513 deletions(-) delete mode 100644 converter/spiders/utils/spider_name_converter.py diff --git a/.gitignore b/.gitignore index 1c6676ec..4610703f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,4 @@ __pycache__/ .venv/ .env -nohups/ -nohup.out -out +out \ No newline at end of file diff --git a/converter/es_connector.py b/converter/es_connector.py index ed5703ab..b2b251da 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -373,14 +373,11 @@ def transform_item(self, uuid, spider, item): # - if a property has the value None, either delete the property or don't store it! spaces = { "ccm:replicationsource": spider.name, - "ccm:replicationsource_DISPLAYNAME": get_spider_friendly_name(spider.name), "ccm:replicationsourceid": item["sourceId"], "ccm:replicationsourcehash": item["hash"], "ccm:replicationsourceuuid": uuid, "cm:name": item["lom"]["general"]["title"], "cclom:title": item["lom"]["general"]["title"], - "ccm:hpi_lom_general_aggregationlevel": str(item["lom"]["general"]["aggregationLevel"]), - "ccm:hpi_searchable": str(item["searchable"]), } if "general" in item["lom"]: if "aggregationLevel" in item["lom"]["general"]: diff --git a/converter/items.py b/converter/items.py index 8ddfd504..dbe92621 100644 --- a/converter/items.py +++ b/converter/items.py @@ -207,18 +207,6 @@ class LomClassificationItem(Item): taxonPath = Field(output_processor=JoinMultivalues()) # ToDo: LOM classification 'taxonPath' has no equivalent property in edu-sharing, might be obsolete -class LomRelationResourceItem(Item): - identifier = Field(output_processor=JoinMultivalues()) - catalog = Field() - entry = Field() - description = Field() - -class LomRelationItem(Item): - """ - Following the LOM-DE.doc#7 (Relation) specifications: http://sodis.de/lom-de/LOM-DE.doc . - """ - kind = Field() - resource = Field(serializer=LomRelationResourceItem) class LomBaseItem(Item): """ @@ -516,14 +504,6 @@ class LomClassificationItemLoader(ItemLoader): default_item_class = LomClassificationItem default_output_processor = TakeFirst() -class LomRelationResourceItemLoader(ItemLoader): - default_item_class = LomRelationResourceItem - default_output_processor = TakeFirst() - -class LomRelationItemLoader(ItemLoader): - default_item_class = LomRelationItem - default_output_processor = TakeFirst() - class PermissionItemLoader(ItemLoader): default_item_class = PermissionItem diff --git a/converter/pipelines.py b/converter/pipelines.py index 612a9994..2cf2cc0f 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -823,7 +823,6 @@ async def process_item(self, raw_item, spider): log.warning(f"Could not read thumbnail at {url}: {str(e)} (falling back to screenshot)") raise e if "thumbnail" in item: - logging.warn("(falling back to " + ("defaultThumbnail" if "defaultThumbnail" in item else "screenshot") + ")") del item["thumbnail"] return await self.process_item(raw_item, spider) else: diff --git a/converter/settings.py b/converter/settings.py index fd87c609..254b665b 100644 --- a/converter/settings.py +++ b/converter/settings.py @@ -79,10 +79,6 @@ # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs DOWNLOAD_DELAY = 0 - -# Configure a delay between the parsing executions. (default: 0) -PARSE_DELAY = 0 - # The download delay setting will honor only one of: # CONCURRENT_REQUESTS_PER_DOMAIN = 16 # CONCURRENT_REQUESTS_PER_IP = 16 diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index 7dbb7cbe..e9e036bb 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -94,12 +94,6 @@ async def parse(self, response): if self.getId(response) is not None and self.getHash(response) is not None: if not self.hasChanged(response): return None - - # Avoid stressing the servers across calls of this method. - settings = get_project_settings() - if "PARSE_DELAY" in settings and float(settings.get('PARSE_DELAY')) > 0: - time.sleep(float(settings.get("PARSE_DELAY"))) - main = self.getBase(response) main.add_value("lom", self.getLOM(response).load_item()) main.add_value("valuespaces", self.getValuespaces(response).load_item()) @@ -159,7 +153,6 @@ def getLOM(self, response) -> LomBaseItemloader: lom.add_value("technical", self.getLOMTechnical(response).load_item()) lom.add_value("educational", self.getLOMEducational(response).load_item()) lom.add_value("classification", self.getLOMClassification(response).load_item()) - lom.add_value("relation", self.getLOMRelation(response).load_item()) return lom def getBase(self, response=None) -> BaseItemLoader: @@ -190,9 +183,6 @@ def getLicense(self, response=None) -> LicenseItemLoader: def getLOMClassification(self, response=None) -> LomClassificationItemLoader: return LomClassificationItemLoader(response=response) - def getLOMRelation(self, response=None) -> LomRelationItemLoader: - return LomRelationItemLoader(response=response) - def getPermissions(self, response=None) -> PermissionItemLoader: permissions = PermissionItemLoader(response=response) # default all materials to public, needs to be changed depending on the spider! diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index aa9047d7..79316b46 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -9,10 +9,9 @@ class MediothekPixiothekSpider(CrawlSpider, LomBase): """ - This crawler fetches data from the Mediothek/Pixiothek. The API request sends all results in one page. The outcome - is an JSON array which will be parsed to their elements. + This crawler fetches data from the Mediothek/Pixiothek. The API request sends all results in one page. The outcome is an JSON array which will be parsed to their elements. - Author: Ioannis Koumarelas, ioannis.koumarelas@gmail.com , Schul-Cloud, Content team. + Author: Timur Yure, timur.yure@capgemini.com , Capgemini for Schul-Cloud, Content team. """ name = "mediothek_pixiothek_spider" @@ -45,6 +44,9 @@ async def parse(self, response: scrapy.http.TextResponse, **kwargs): copy_response.meta["item"] = element yield await LomBase.parse(self, response=copy_response) + # def _if_exists_add(self, edu_dict: dict, element_dict: dict, edu_attr: str, element_attr: str): + # if element_attr in element_dict: + # edu_dict[edu_attr] = element_dict[element_attr] def getId(self, response) -> str: # Element response as a Python dict. @@ -78,8 +80,6 @@ def getBase(self, response): # portal." base.add_value("thumbnail", element_dict["previewImageUrl"]) - base.add_value("searchable", element_dict.get("searchable", "0")) - return base def getLOMGeneral(self, response): @@ -88,10 +88,9 @@ def getLOMGeneral(self, response): # Element response as a Python dict. element_dict = response.meta["item"] - general.add_value("title", element_dict["title"]) - - general.add_value("aggregationLevel", element_dict["aggregation_level"]) - + # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? + # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel + general.add_value("title", element_dict["einzeltitel"]) # self._if_exists_add(general, element_dict, "description", "kurzinhalt") if "kurzinhalt" in element_dict: general.add_value("description", element_dict["kurzinhalt"]) @@ -140,268 +139,21 @@ def is_public(element_dict) -> bool: """ return element_dict["oeffentlich"] == "1" - def getPermissions(self, response): - """ - Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, - otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups - and mediacenters accordingly. - """ - permissions = LomBase.getPermissions(self, response) - - # Self-explained. Only 1 media center in this case. - permissions.add_value("autoCreateGroups", True) - # permissions.add_value("autoCreateMediacenters", True) - - element_dict = response.meta["item"] - permissions.replace_value('public', False) - if "oeffentlich" in element_dict and element_dict["oeffentlich"] == "0": # private - permissions.add_value('groups', ['Thuringia-private']) - # permissions.add_value('mediacenters', [self.name]) # only 1 mediacenter. - else: - permissions.add_value('groups', ['Thuringia-public']) - - return permissions - - - def getLOMRelation(self, response=None) -> LomRelationItemLoader: - """ - Helps implement collections using relations as described in the LOM-DE.doc#7 (Relation) specifications: - http://sodis.de/lom-de/LOM-DE.doc . - """ - relation = LomBase.getLOMRelation(self, response) - - # Element response as a Python dict. - element_dict = response.meta["item"] - - relation.add_value("kind", element_dict["relation"][0]["kind"]) - - resource = LomRelationResourceItem() - resource["identifier"] = element_dict["relation"][0]["resource"]["identifier"] - relation.add_value("resource", resource) - - return relation - - def prepare_collections(self, prepared_elements): - """ - Prepares Mediothek and Pixiothek collections according to their strategies. - """ - mediothek_elements = [] - pixiothek_elements = [] - for element_dict in prepared_elements: - if element_dict["pixiothek"] == "1": - pixiothek_elements.append(element_dict) - else: - mediothek_elements.append(element_dict) - - pixiothek_elements_grouped, mediothek_elements = \ - self.group_pixiothek_elements(pixiothek_elements, mediothek_elements) - - mediothek_elements_grouped = self.group_mediothek_elements(mediothek_elements) - - collection_elements = [] - collection_elements.extend(pixiothek_elements_grouped) - collection_elements.extend(mediothek_elements_grouped) - - return collection_elements - - def group_by_elements(self, elements, group_by): - """ - This method groups the corresponding elements based on the provided group_by parameter. This changes the logic - so that every element in the end maps to an educational element in the https://www.schulportal-thueringen.de. - """ - groups = {} - for idx, element in enumerate(elements): - if group_by not in element: - logging.debug("Element " + str(element["id"]) + " does not contain information about " + group_by) - continue - group_by_value = element[group_by] - if group_by_value not in groups: - groups[group_by_value] = [] - groups[group_by_value].append(element) - - # For consistency sort all values per key. - for key in groups.keys(): - groups[key] = sorted(groups[key], key=lambda x: int(x["id"])) - - return groups - - def group_pixiothek_elements(self, pixiothek_elements, mediothek_elements): - """ - Collection elements in Pixiothek have a "parent" (representative) Mediothek element that describes the whole - collection. Our task in this method is for every Pixiothek group to find its Mediothek element and add the - connections between it and the Pixiothek elements. These Mediothek elements will not be considered as children - of Mediothek collections. - - If we cannot find such a "parent" element among the Mediothek elements, then we select one of them as the - collection parent (representative element) and set some of its attributes accordingly. - """ - - default_download_url = "https://www.schulportal-thueringen.de/html/images/" \ - "themes/tsp2/startseite/banner_phone_startseite.jpg?id=" - - mediothek_default_download_url = "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" - - pixiothek_elements_grouped_by = self.group_by_elements(pixiothek_elements, "serientitel") - - # Group Mediothek elements by einzeltitel. We are going to use this dictionary in the following loop to find - # Pixiothek items that have this value in their serientitel. - mediothek_elements_grouped_by_einzeltitel = self.group_by_elements(mediothek_elements, "einzeltitel") - - single_element_collection_serientitel = "Mediensammlungen zur freien Verwendung im Bildungsbereich" - - collection_elements = [] - - edusharing = EduSharing() - - # Keeping track of "parent" (representative) elements to remove them from the Mediothek elements. - parent_mediothek_elements = set() - - # Generate new "representative" (parent) element. - for group_by_key in sorted(pixiothek_elements_grouped_by.keys()): - group = pixiothek_elements_grouped_by[group_by_key] - serientitel = None - if "serientitel" in group[0]: - serientitel = group[0]["serientitel"] - - # If a single Mediothek element exists with the same einzeltitel as this group's serientitel, then we shall use it - # as the parent element of this collection. - if serientitel in mediothek_elements_grouped_by_einzeltitel and \ - len(mediothek_elements_grouped_by_einzeltitel[serientitel]) == 1 and \ - mediothek_elements_grouped_by_einzeltitel[serientitel][0]["id"] not in parent_mediothek_elements: # Is not used as a parent of another collection. - - parent_element = copy.deepcopy(mediothek_elements_grouped_by_einzeltitel[serientitel][0]) - parent_mediothek_elements.add(parent_element["id"]) - parent_element["title"] = parent_element["einzeltitel"] - parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) - - # If the found Mediothek element has a serientitel equal to a predefined value, which indicates that - # this is a collection item (which should normally be a parent and not a single element), we treat - # specially and set the title equal to the einzeltitel, which already describes the collection. - if parent_element["serientitel"] == single_element_collection_serientitel: - group.append(copy.deepcopy(mediothek_elements_grouped_by_einzeltitel[serientitel][0])) - - # Else, we shall use any random element of this group as the parent element. - else: - parent_element = copy.deepcopy(group[0]) - - # We need to assign a new ID, different from the previous ones. For this purpose, we decide to modify - # the ID of the existing element and add some suffix to note that this is an artificial element. - # Clearly, such a big number for an ID will have no collisions with existing real elements. - artificial_element_suffix = "000000" - parent_element["id"] = parent_element["id"] + artificial_element_suffix - - # Assign a fake URL that we can still recognize if we ever want to allow the access of the collection - # content. - parent_element["downloadUrl"] = default_download_url + parent_element["id"] - parent_element["title"] = parent_element["serientitel"] - - parent_element["searchable"] = 1 - parent_element["aggregation_level"] = 2 - parent_element["uuid"] = edusharing.buildUUID(parent_element["downloadUrl"]) - - for element in group: - element["searchable"] = 0 - element["aggregation_level"] = 1 - element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) - - element["title"] = element["dateiBezeichnung"] - - # Add connections from parent to children elements. - parent_element, group = self.relate_parent_with_children_elements(parent_element, group) - - collection_elements.append(parent_element) - collection_elements.extend(group) - - # Remove Mediothek elements which were used as parents. We go in reverse mode as only then the indices keep - # making sense as we keep deleting elements. The other way around, every time you delete an element the - # consequent indices are not valid anymore. - for i in reversed(range(len(mediothek_elements))): - if mediothek_elements[i]["id"] in parent_mediothek_elements: - del (mediothek_elements[i]) - - return collection_elements, mediothek_elements - - def group_mediothek_elements(self, mediothek_elements): - """ - Collection elements in Mediothek have no special element to represent them (a parent element). Therefore, we - select one of them as the collection representative (parent element) and set some of its attributes accordingly. - """ - mediothek_default_download_url = "https://www.schulportal-thueringen.de/web/guest/media/detail?tspi=" - - mediothek_elements_grouped_by = self.group_by_elements(mediothek_elements, "mediumNummer") - - # Specifies a special case when a - single_element_collection_serientitel = "Mediensammlungen zur freien Verwendung im Bildungsbereich" - - collection_elements = [] - - edusharing = EduSharing() # Used to generate UUIDs. - - # Generate new "parent" (representative) element. - for group_by_key in sorted(mediothek_elements_grouped_by.keys()): - group = mediothek_elements_grouped_by[group_by_key] - parent_element = copy.deepcopy(group[0]) - - # We need to assign a new ID, different from the previous ones. For this purpose, we decide to modify - # the ID of the existing element and add some suffix to note that this is an artificial element. - # Clearly, such a big number for an ID will have no collisions with existing real elements. - artificial_element_suffix = "000000" - parent_element["id"] = parent_element["id"] + artificial_element_suffix - - parent_element["downloadUrl"] = mediothek_default_download_url + str(parent_element["mediumId"]) - - parent_element["title"] = parent_element["einzeltitel"] - - parent_element["searchable"] = 1 - parent_element["aggregation_level"] = 2 - parent_element["uuid"] = edusharing.buildUUID(parent_element["downloadUrl"]) - - for element in group: - element["searchable"] = 0 - element["aggregation_level"] = 1 - element["uuid"] = edusharing.buildUUID(element["downloadUrl"]) - - element["title"] = element["dateiBezeichnung"] - - # Add connections from parent to children elements. - parent_element, group = self.relate_parent_with_children_elements(parent_element, group) - - collection_elements.append(parent_element) - collection_elements.extend(group) - - return collection_elements - - def relate_parent_with_children_elements(self, parent_element, children_elements): - # Add connections from "parent" to "children" elements. - parent_element["relation"] = [ - { - "kind": "haspart", - "resource": { - "identifier": [ - # Use the ccm:replicationsourceuuid to refer to the children elements. - element["uuid"] for element in children_elements - ] - } - } - ] - - # Add connections from "children" elements to "parent". - for element in children_elements: - element["relation"] = [ - { - "kind": "ispartof", - "resource": { - # Use the ccm:replicationsourceuuid to refer to the parent element. - "identifier": [parent_element["uuid"]] - } - } - ] - return parent_element, children_elements - - def prepare_element(self, element_dict): - # TODO: Decide which title. Do we have to construct the title, by concatenating multiple from the provided ones? - # Einzeltitel, einzeluntertitel, serientitel, serienuntertitel - # Please keep in mind that we override this value for parent elements of collections. - element_dict["title"] = element_dict["einzeltitel"] - - return element_dict \ No newline at end of file + # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. + # + # def getPermissions(self, response): + # """ + # Licensing information is controlled via the 'oeffentlich' flag. When it is '1' it is available to the public, + # otherwise only to Thuringia. Therefore, when the latter happens we set the public to private, and set the groups + # and mediacenters accordingly. + # """ + # permissions = LomBase.getPermissions(self, response) + # + # element_dict = response.meta["item"] + # + # if element_dict["oeffentlich"] == "0": # private + # permissions.replace_value('public', False) + # permissions.add_value('groups', ['Thuringia']) + # permissions.add_value('mediacenters', 'mediothek') # only 1 mediacenter. + # + # return permissions diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 46afc182..8c3dd16b 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -1,8 +1,8 @@ +from datetime import datetime + import xmltodict as xmltodict from lxml import etree from scrapy.spiders import CrawlSpider - -from converter.constants import Constants from converter.items import * from .base_classes import LomBase import scrapy @@ -19,7 +19,7 @@ class MerlinSpider(CrawlSpider, LomBase): name = "merlin_spider" url = "https://merlin.nibis.de/index.php" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "Merlin" # name as shown in the search ui - version = "0.2" # the version of your crawler, used to identify if a reimport is necessary + version = "0.1" # the version of your crawler, used to identify if a reimport is necessary apiUrl = "https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*" # * regular expression, to represent all possible values. limit = 100 @@ -49,9 +49,6 @@ async def parse(self, response: scrapy.http.Response): root = etree.XML(response.body) tree = etree.ElementTree(root) - # Get the total number of possible elements - elements_total = int(tree.xpath('/root/sum')[0].text) - # If results are returned. elements = tree.xpath("/root/items/*") if len(elements) > 0: @@ -60,28 +57,25 @@ async def parse(self, response: scrapy.http.Response): element_xml_str = etree.tostring( element, pretty_print=True, encoding="unicode" ) - try: - element_dict = xmltodict.parse(element_xml_str) - element_dict = element_dict["data"] + element_dict = xmltodict.parse(element_xml_str) - # Preparing the values here helps for all following logic across the methods. - self.prepare_element(element_dict) + # Temporary solution for public-only content. + # TODO: remove this when licensed content are enabled! + if not self.is_public(element_dict["data"]): + continue - # If there is no available county (Kreis) code, then we do not want to deal with this element. - if not("county_ids" in element_dict - and element_dict["county_ids"] is not None - and len(element_dict["county_ids"]) > 0): - continue + # TODO: It's probably a pointless attribute. + # del element_dict["data"]["score"] - # TODO: It's probably a pointless attribute. - # del element_dict["data"]["score"] + # Passing the dictionary for easier access to attributes. + copyResponse.meta["item"] = element_dict["data"] - # Passing the dictionary for easier access to attributes. - copyResponse.meta["item"] = element_dict + # In case JSON string representation is preferred: + # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) + copyResponse._set_body(element_xml_str) - # In case JSON string representation is preferred: - # copyResponse._set_body(json.dumps(copyResponse.meta['item'], indent=1, ensure_ascii=False)) - copyResponse._set_body(element_xml_str) + if self.hasChanged(copyResponse): + yield self.handleEntry(copyResponse) # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. await LomBase.parse(self, copyResponse) @@ -89,8 +83,8 @@ async def parse(self, response: scrapy.http.Response): # TODO: To not stress the Rest APIs. # time.sleep(0.1) - # If we are below the total available numbers continue fetching more pages. - if current_expected_count < elements_total: + # If the number of returned results is equal to the imposed limit, it means that there are more to be returned. + if len(elements) == self.limit: self.page += 1 url = self.apiUrl.replace("%start", str(self.page * self.limit)).replace( "%anzahl", str(self.limit) @@ -114,13 +108,13 @@ def getHash(self, response): return ( hash(self.version) + hash(self.getId(response)) - # + self._date_to_integer(datetime.date(datetime.now())) + + self._date_to_integer(datetime.date(datetime.now())) ) - # def _date_to_integer(self, dt_time): - # """ Converting the date to an integer, so it is useful in the getHash method - # Using prime numbers for less collisions. """ - # return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day + def _date_to_integer(self, dt_time): + """ Converting the date to an integer, so it is useful in the getHash method + Using prime numbers for less collisions. """ + return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day def mapResponse(self, response): r = ResponseItemLoader(response=response) @@ -134,23 +128,7 @@ async def handleEntry(self, response): def getBase(self, response): base = LomBase.getBase(self, response) - - # Element response as a Python dict. - element_dict = dict(response.meta["item"]) - - base.add_value("thumbnail", element_dict.get("thumbnail", "")) # get or default - - # As a backup, if no other thumbnail URL is available. - element_dict["hardcodedDefaultLogoUrl"] = "/logos/bs_logos/merlin.png" - - # By the order of preference. As soon as one of these default thumbnails is available you keep that. - for default_thumbnail in ["srcLogoUrl", "logo", "hardcodedDefaultLogoUrl"]: - if default_thumbnail in element_dict: - base.add_value("defaultThumbnail", "https://merlin.nibis.de" + element_dict[default_thumbnail]) - break - - # Adding a default searchable value to constitute this element (node) as a valid-to-be-returned object. - base.add_value("searchable", "1") + base.add_value("thumbnail", response.xpath("/data/thumbnail/text()").get()) return base @@ -161,29 +139,12 @@ def getLOMGeneral(self, response): "description", response.xpath("/data/beschreibung/text()").get() ) - # Adding a default aggregationLevel, which can be used during filtering queries. - general.add_value("aggregationLevel", "1") - return general def getUri(self, response): location = response.xpath("/data/media_url/text()").get() return "http://merlin.nibis.de" + location - def getLicense(self, response): - license = LomBase.getLicense(self, response) - - # Element response as a Python dict. - element_dict = response.meta["item"] - - # If there is only one element and is the County code 3100, then it is public content. - if len(element_dict["county_ids"]) == 1 and str(element_dict["county_ids"][0]) == "county-3100": - license.replace_value('internal', Constants.LICENSE_COPYRIGHT_LAW) # public - else: - license.replace_value('internal', Constants.LICENSE_NONPUBLIC) # private - - return license - def getLOMTechnical(self, response): technical = LomBase.getLOMTechnical(self, response) @@ -233,81 +194,36 @@ def getValuespaces(self, response): valuespaces.add_value("learningResourceType", resource_types) return valuespaces - def getPermissions(self, response): + def is_public(self, element_dict) -> bool: """ - In case license information, in the form of counties (Kreis codes), is available. This changes the permissions from - public to private and sets accordingly the groups and mediacenters. For more information regarding the available - Merlin county (kreis) codes please consult 'http://merlin.nibis.de/index.php?action=kreise' + Temporary solution to check whether the content is public and only save it if this holds. """ + return not ( + element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0 + ) - permissions = LomBase.getPermissions(self, response) - - element_dict = response.meta["item"] - - permissions.replace_value("public", False) - permissions.add_value("autoCreateGroups", True) - - groups = [] - - county_ids = element_dict["county_ids"] - public_county = "county-3100" - - # If there is only one element and is the County code 3100, then it is public content. - if len(county_ids) == 1 and str(county_ids[0]) == public_county: - # Add to state-wide public group. - # groups.append("state-LowerSaxony-public") - groups.append("LowerSaxony-public") - - # Add 1 group per County-code, which in this case is just "100" (3100). - groups.extend(county_ids) - else: - # Add to state-wide private/licensed group. - # groups.append("state-LowerSaxony-licensed") - groups.append("LowerSaxony-private") - - # If County code 100 (country-wide) is included in the list, remove it. - if public_county in county_ids: - county_ids.remove(public_county) - - # Add 1 group per county. - groups.extend(county_ids) - - permissions.add_value("groups", groups) - - return permissions - - def prepare_element(self, element_dict): - # Step 1. Prepare county (Kreis) codes. - if "kreis_id" in element_dict and element_dict["kreis_id"] is not None: - county_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... - if not isinstance(county_ids, list): # one element - county_ids = [county_ids] - county_ids = sorted(county_ids, key=lambda x: int(x)) - - # Add prefix "3" to conform with nationally-assigned IDs: - # https://de.wikipedia.org/wiki/Liste_der_Landkreise_in_Deutschland - county_ids = ["3" + id for id in county_ids] - county_ids = ["county-" + x for x in county_ids] - element_dict["county_ids"] = county_ids - - # Step 2. Fix thumbnail URL. - thumbnail_prepared = element_dict["thumbnail"] - - # Step 2. Case a: Remove the 3 dots "...". - thumbnail_prepared = thumbnail_prepared.replace("...", "") - - # Step 2. Case b: Replace "%2F" with '/' - # TODO: check why not ALL occurrences are replaced. - thumbnail_prepared = thumbnail_prepared.replace("%2F", "/") - - # Step 2. Case c: Replace the dot after the parent identifier with a '/'. - if element_dict["parent_identifier"] is not None: - parent_identifier = element_dict["parent_identifier"] - subpath_position = thumbnail_prepared.find(parent_identifier) + len(parent_identifier) - if thumbnail_prepared[subpath_position] == ".": - thumbnail_prepared = thumbnail_prepared[:subpath_position] + "/" + thumbnail_prepared[subpath_position + 1:] - - element_dict["thumbnail"] = thumbnail_prepared - - return element_dict - + # TODO: This code snippet will be enabled in the next PR for licensed content, after clarifications are made. + # + # def getPermissions(self, response): + # """ + # In case license information, in the form of Kreis codes, is available. This changes the permissions from + # public to private and sets accordingly the groups and mediacenters. For more information regarding the available + # Merlin kreis codes please consult 'http://merlin.nibis.de/index.php?action=kreise' + # """ + # + # permissions = LomBase.getPermissions(self, response) + # + # element_dict = response.meta["item"] + # + # if element_dict["kreis_id"] is not None and len(element_dict["kreis_id"]) > 0: # private + # kreis_ids = element_dict["kreis_id"]["data"] # ... redundant extra nested dictionary "data"... + # if not isinstance(kreis_ids, list): # one element + # kreis_ids = [kreis_ids] + # kreis_ids = sorted(kreis_ids, key=lambda x: int(x)) + # kreis_ids = ["merlin_" + id for id in kreis_ids] # add prefix + # + # permissions.replace_value('public', False) + # permissions.add_value('groups', ['Lower Saxony']) + # permissions.add_value('mediacenters', kreis_ids) + # + # return permissions diff --git a/converter/spiders/oeh_spider.py b/converter/spiders/oeh_spider.py index f5c969ed..627bfaa2 100644 --- a/converter/spiders/oeh_spider.py +++ b/converter/spiders/oeh_spider.py @@ -93,12 +93,3 @@ def shouldImport(self, response=None): ) return False return True - - def getPermissions(self, response): - permissions = LomBase.getPermissions(self, response) - - permissions.replace_value("public", False) - permissions.add_value("autoCreateGroups", True) - permissions.add_value("groups", ["public"]) - - return permissions diff --git a/converter/spiders/utils/spider_name_converter.py b/converter/spiders/utils/spider_name_converter.py deleted file mode 100644 index 35ba9d64..00000000 --- a/converter/spiders/utils/spider_name_converter.py +++ /dev/null @@ -1,50 +0,0 @@ -import logging - -spider_to_friendly_name = None - - -def load_friendly_spider_names(): - """ - Returns a dictionary which maps the Spider's name to its "friendly" name. - - e.g., merlin_spider --> Merlin, br_rss_spider --> Bayerischer Rundfunk - - Based on https://stackoverflow.com/questions/46871133/get-all-spiders-class-name-in-scrapy - - Author: Ioannis Koumarelas, ioannis.koumarelas@hpi.de, Schul-Cloud, Content team. - """ - from scrapy.utils import project - from scrapy import spiderloader - - settings = project.get_project_settings() - spider_loader = spiderloader.SpiderLoader.from_settings(settings) - - spider_names = spider_loader.list() - spider_classes = [spider_loader.load(name) for name in spider_names] - - spider_name_to_friendly_name = {} - for spider in spider_classes: - spider_name_to_friendly_name[spider.name] = spider.friendlyName - - return spider_name_to_friendly_name - - -def get_spider_friendly_name(spider_name): - """ - Given the spider's name, returns its friendly name. - """ - - global spider_to_friendly_name - if spider_to_friendly_name is None: - spider_to_friendly_name = load_friendly_spider_names() - - if spider_name in spider_to_friendly_name: - return spider_to_friendly_name[spider_name] - else: - if spider_name is not None: - logging.info("Friendly name for spider " + spider_name + " has not been found.") - return spider_name - - -if __name__ == '__main__': - load_friendly_spider_names() \ No newline at end of file From a0dddf2153e8a4c3e836e2edfa1b058d487d7bd4 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Mon, 9 Sep 2024 11:27:23 +0200 Subject: [PATCH 565/590] DMED-119 - update Python version requirement and package dependencies --- Readme.md | 10 ++++++---- crawl.sh | 16 ++++++++++++++++ setup.cfg | 2 +- 3 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 crawl.sh diff --git a/Readme.md b/Readme.md index e641c3c8..b103e4bf 100644 --- a/Readme.md +++ b/Readme.md @@ -10,17 +10,18 @@ The terms "spider" and "crawler" may be used interchangeable. Before doing anything in this repository, make sure you meet the following requirements: - docker and docker-compose -- Python 3.11 +- Python 3.12 or newer is required - a python virtual environment - an .env file containing all the necessary credentials and settings - splash service for crawlers Debian-based systems: ```bash -sudo apt install python3.11 python3-dev python3-pip python3-venv libpq-dev +sudo apt install python3-dev python3-pip python3-venv libpq-dev -y python3 -m venv .venv source .venv/bin/activate -pip3 install -r requirements.txt +pip3 install poetry +poetry install cp .env.example .env # adjust .env according to your use case ``` @@ -29,7 +30,8 @@ For windows, go to python.org to download and install the proper python version. ```commandline python3 -m venv .venv .venv\Scripts\activate.bat -pip3 install -r requirements.txt +pip3 install poetry +poetry install copy .env.example .env REM adjust .env according to your use case ``` diff --git a/crawl.sh b/crawl.sh new file mode 100644 index 00000000..d412b08c --- /dev/null +++ b/crawl.sh @@ -0,0 +1,16 @@ +#!/bin/sh + +source .venv/bin/activate + +scrapy crawl serlo_spider & +scrapy crawl leifi_spider & +scrapy crawl planet_schule_spider & +scrapy crawl tutory_spider & +scrapy crawl br_rss_spider & +scrapy crawl zdf_rss_spider & +scrapy crawl digitallearninglab_spider & +scrapy crawl geogebra_spider & +scrapy crawl memucho_spider & +scrapy crawl wirlernenonline_spider & +scrapy crawl irights_spider & +scrapy crawl rlp_spider & \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 46b9e72d..5c232dc4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,8 +5,8 @@ version = 0.1.0 [options] packages = converter install_requires = - python_version == "3.9" requests + python_version == "3.9" wheel image html2text From 87d7515d189d2ad160a510f530714cdc002d6577 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Mon, 9 Sep 2024 12:14:45 +0200 Subject: [PATCH 566/590] DMED-119 - add missing dependencies to `pyproject.toml` and update `Dockerfiles` --- Dockerfile.edusharing-crawler | 9 +- Dockerfile.edusharing-setup | 9 +- poetry.lock | 499 ++++++++++++++++++++-------------- pyproject.toml | 4 +- 4 files changed, 303 insertions(+), 218 deletions(-) diff --git a/Dockerfile.edusharing-crawler b/Dockerfile.edusharing-crawler index 9f9b57b2..9fb45f1f 100644 --- a/Dockerfile.edusharing-crawler +++ b/Dockerfile.edusharing-crawler @@ -1,13 +1,14 @@ -FROM python:3.11.6-slim-bookworm +FROM python:3.12.5-slim-bookworm RUN apt-get install ca-certificates WORKDIR /oeh-search-etl -COPY requirements.txt . -RUN python3.11 -m pip install --no-cache-dir -r requirements.txt +COPY edu_sharing_openapi/ edu_sharing_openapi/ +COPY pyproject.toml poetry.lock ./ +RUN pip3 install --no-cache-dir poetry && poetry install COPY . . COPY .env.docker .env -CMD [ "bash", "-c", "python3.11 run.py" ] +CMD [ "bash", "-c", "poetry run python run.py" ] diff --git a/Dockerfile.edusharing-setup b/Dockerfile.edusharing-setup index 31cc77ef..16a2e573 100644 --- a/Dockerfile.edusharing-setup +++ b/Dockerfile.edusharing-setup @@ -1,4 +1,4 @@ -FROM python:3.11.6-slim-bookworm +FROM python:3.12.5-slim-bookworm ENV TZ="Europe/Berlin" @@ -6,10 +6,11 @@ RUN apt-get install ca-certificates WORKDIR /oeh-search-etl -COPY requirements.txt . -RUN python3.11 -m pip install --no-cache-dir -r requirements.txt +COPY edu_sharing_openapi/ edu_sharing_openapi/ +COPY pyproject.toml poetry.lock ./ +RUN pip3 install --no-cache-dir poetry && poetry install COPY . . COPY ./schulcloud/edusharing_setup.py ./edusharing_setup.py -CMD [ "bash", "-c", "python3.11 edusharing_setup.py" ] +CMD [ "bash", "-c", "poetry run python edusharing_setup.py" ] diff --git a/poetry.lock b/poetry.lock index 4a68ab20..f73ad92f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -168,6 +168,44 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "boto3" +version = "1.35.14" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.14-py3-none-any.whl", hash = "sha256:c3e138e9041d59cd34cdc28a587dfdc899dba02ea26ebc3e10fb4bc88e5cf31b"}, + {file = "boto3-1.35.14.tar.gz", hash = "sha256:7bc78d7140c353b10a637927fe4bc4c4d95a464d1b8f515d5844def2ee52cbd5"}, +] + +[package.dependencies] +botocore = ">=1.35.14,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.14" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.14-py3-none-any.whl", hash = "sha256:24823135232f88266b66ae8e1d0f3d40872c14cd976781f7fe52b8f0d79035a0"}, + {file = "botocore-1.35.14.tar.gz", hash = "sha256:8515a2fc7ca5bcf0b10016ba05ccf2d642b7cb77d8773026ff2fa5aa3bf38d2e"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.21.2)"] + [[package]] name = "certifi" version = "2024.8.30" @@ -181,78 +219,78 @@ files = [ [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -414,38 +452,38 @@ dev = ["black", "mypy", "pytest", "pytest-cov"] [[package]] name = "cryptography" -version = "43.0.0" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"}, - {file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"}, - {file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"}, - {file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"}, - {file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"}, - {file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"}, - {file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"}, - {file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"}, - {file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"}, - {file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"}, - {file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"}, - {file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"}, - {file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -458,7 +496,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -530,7 +568,7 @@ name = "edu-sharing-client" version = "1.0.0" description = "edu-sharing Repository REST API" optional = false -python-versions = "^3.12" +python-versions = "^3.7" files = [] develop = false @@ -544,6 +582,17 @@ urllib3 = ">= 1.25.3" type = "directory" url = "edu_sharing_openapi" +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + [[package]] name = "extruct" version = "0.17.0" @@ -570,19 +619,19 @@ cli = ["requests"] [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "flake8" @@ -1290,6 +1339,20 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "openpyxl" +version = "3.1.5" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, +] + +[package.dependencies] +et-xmlfile = "*" + [[package]] name = "packaging" version = "24.1" @@ -1418,19 +1481,19 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, + {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "playwright" @@ -1527,18 +1590,18 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" typing-extensions = [ {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, @@ -1546,103 +1609,104 @@ typing-extensions = [ [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [package.dependencies] @@ -1962,6 +2026,23 @@ files = [ [package.dependencies] requests = ">=1.0.0" +[[package]] +name = "s3transfer" +version = "0.10.2" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, + {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + [[package]] name = "scrapy" version = "2.11.2" @@ -2031,13 +2112,13 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "74.1.1" +version = "74.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.1.1-py3-none-any.whl", hash = "sha256:fc91b5f89e392ef5b77fe143b17e32f65d3024744fba66dc3afe07201684d766"}, - {file = "setuptools-74.1.1.tar.gz", hash = "sha256:2353af060c06388be1cecbf5953dcdb1f38362f87a2356c480b6b4d5fcfc8847"}, + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, ] [package.extras] @@ -2359,4 +2440,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "69894e003525a1415ae92c18d70267f6a87142e03036b6c20b1f5eb6d270b741" +content-hash = "c2fb652ed5769e982dd8ea9bd0984f6ca20b9af181ec900cfcf79299c77a7293" diff --git a/pyproject.toml b/pyproject.toml index 2fe4a15e..e8c3368c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,7 +88,9 @@ babel = "2.15.0" langcodes = {extras = ["data"], version = "^3.3.0"} httpx = "0.27.2" async-lru = "2.0.4" -urllib3 = "^2.2.2" +urllib3 = "2.2.2" +boto3 = "1.35.14" +openpyxl = "3.1.5" [tool.poetry.group.edu_sharing_client.dependencies] # these dependencies are used (and automatically generated) by the "openapi-generator-cli"-generated client From ca1fc3837210cb85f07cd361c2f28e6b63e91226 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Mon, 9 Sep 2024 13:14:58 +0200 Subject: [PATCH 567/590] DMED-119 - remove `docker-compose-dev.yml` --- docker-compose-dev.yml | 98 ------------------------------------------ 1 file changed, 98 deletions(-) delete mode 100644 docker-compose-dev.yml diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml deleted file mode 100644 index f4cf6db4..00000000 --- a/docker-compose-dev.yml +++ /dev/null @@ -1,98 +0,0 @@ -version: "3.4" - -services: - elasticsearch: - image: elasticsearch:7.4.2 - environment: - - "discovery.type=single-node" - #- ELASTIC_PASSWORD=changethisinproduction - #- xpack.security.enabled=true - - http.port=9200 - - http.cors.enabled=true - - http.cors.allow-origin=* - - http.cors.allow-headers=X-Requested-With,X-Auth-Token,Content-Type,Content-Length,Authorization,Access-Control-Allow-Headers,Accept - - http.cors.allow-credentials=true - - bootstrap.memory_lock=true - - 'ES_JAVA_OPTS=-Xms2g -Xmx4g' - networks: - - elasticnet - ports: - - "127.0.0.1:9200:9200" - restart: on-failure - volumes: - - es-data:/usr/share/elasticsearch/data - kibana: - image: docker.elastic.co/kibana/kibana:7.4.2 - networks: - - elasticnet - depends_on: - - elasticsearch - ports: - - "5601:5601" # exposte to host - postgres: - build: - context: ./postgres - dockerfile: postgres.Dockerfile - environment: - - "POSTGRES_USER=search" - - "POSTGRES_PASSWORD=admin" - - "POSTGRES_DB=search" - networks: - - elasticnet - ports: - - "127.0.0.1:5432:5432" - restart: always - volumes: - - pg-data:/var/lib/postgresql/data - valuespace_converter: - build: - context: ./etl/valuespace_converter - dockerfile: valuespace_converter.Dockerfile - networks: - - elasticnet - ports: - - "5010:5010" # exposte to host - restart: on-failure - logstash: - build: - context: ./logstash - dockerfile: logstash_psql.Dockerfile - environment: - - LS_JAVA_OPTS=-Xmx4g - networks: - - elasticnet - depends_on: - - elasticsearch - - postgres - restart: on-failure - valuespaces: - image: laocoon667/oer-flask-api:dev - networks: - - elasticnet - ports: - - "127.0.0.1:5000:5000" - restart: on-failure - splash: - image: scrapinghub/splash - networks: - - elasticnet - command: --maxrss 4000 - restart: always - ports: - - "127.0.0.1:8050:8050" - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8050/_ping"] - interval: 30s - timeout: 5s - retries: 3 - -networks: - elasticnet: - -volumes: - pg-data: - driver: local - es-data: - driver: local - - From 37e00a3ee1db1c9dd8eee60bf8aa40b55f9fe08b Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 11:43:41 +0200 Subject: [PATCH 568/590] DMED-119 - use synchronous requests for setting node --- converter/es_connector.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index b2b251da..3a6c86ee 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -224,20 +224,16 @@ async def set_node_binary_data(self, uuid, item) -> bool: else: return False - async def set_node_preview(self, uuid, item) -> bool: + def set_node_preview(self, uuid, item) -> bool: if "thumbnail" in item: key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None if key: files = {"image": base64.b64decode(item["thumbnail"][key])} - response = await self._client_async.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/preview?mimetype=" - + item["thumbnail"]["mimetype"], - headers=self.get_headers(None), - files=files, - timeout=None, + response = requests.post( + url=f"{get_project_settings().get('EDU_SHARING_BASE_URL')}" + f"rest/node/v1/nodes/-home-/{uuid}" + f"/content?mimetype={item['thumbnail']['mimetype']}", + data=files ) return response.status_code == 200 else: @@ -759,7 +755,7 @@ async def insert_item(self, spider, uuid, item): # temporary burst of items that need to be inserted node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) self.set_node_permissions(node["ref"]["id"], item) - await self.set_node_preview(node["ref"]["id"], item) + self.set_node_preview(node["ref"]["id"], item) if not await self.set_node_binary_data(node["ref"]["id"], item): await self.set_node_text(node["ref"]["id"], item) From 09bd03482c3d31bf301861d0a92808cc26dc9f7d Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 13:17:44 +0200 Subject: [PATCH 569/590] DMED-119 - refactor SodixApi credentials to use spider-specific environment variables --- schulcloud/sodix/sodix.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schulcloud/sodix/sodix.py b/schulcloud/sodix/sodix.py index 71b49d07..bd4c79f7 100644 --- a/schulcloud/sodix/sodix.py +++ b/schulcloud/sodix/sodix.py @@ -11,8 +11,8 @@ class SodixApi: def __init__(self): env = util.Environment(env_vars=needed_envs) - self.user = env['SODIX_USER'] - self.password = env['SODIX_PASSWORD'] + self.user = env['SODIX_SPIDER_USERNAME'] + self.password = env['SODIX_SPIDER_PASSWORD'] self.access_token = '' self.login() From 106f281bbafd46bee1a3d265566f0bdc6f1def5a Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 13:18:43 +0200 Subject: [PATCH 570/590] DMED-119 - fix needed envs for SodixApi --- schulcloud/sodix/sodix.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schulcloud/sodix/sodix.py b/schulcloud/sodix/sodix.py index bd4c79f7..1472f32e 100644 --- a/schulcloud/sodix/sodix.py +++ b/schulcloud/sodix/sodix.py @@ -2,7 +2,7 @@ import requests from schulcloud import util -needed_envs = ['SODIX_USER', 'SODIX_PASSWORD'] +needed_envs = ['SODIX_SPIDER_USERNAME', 'SODIX_SPIDER_PASSWORD'] class SodixApi: From aad16152c6121d2c25c18f8dad29211f9afc41be Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 13:22:12 +0200 Subject: [PATCH 571/590] DMED-119 - refactor Uploader class to handle cases where multiple nodes with the same replication source ID are found --- schulcloud/h5p/upload.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/schulcloud/h5p/upload.py b/schulcloud/h5p/upload.py index 91c1552e..17715080 100644 --- a/schulcloud/h5p/upload.py +++ b/schulcloud/h5p/upload.py @@ -190,7 +190,10 @@ def collection_status(self, collection: Collection, collection_node: Node): filename = os.path.basename(child.filepath) name = os.path.splitext(filename)[0] rep_source_id = create_replicationsourceid(name) - node_exists = self.api.find_node_by_replication_source_id(rep_source_id, skip_exception=True) + try: + node_exists = self.api.find_node_by_replication_source_id(rep_source_id, skip_exception=True) + except FoundTooManyException: + return "too_many" if not node_exists: if uploaded_nodes == 0: return "missing" From cc5038d55aa488ec03f1e9115bc25dde6605f434 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 13:54:31 +0200 Subject: [PATCH 572/590] DMED-119 - fix issue with wrong initialization of FoundTooManyException in h5p uploader --- schulcloud/h5p/upload.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schulcloud/h5p/upload.py b/schulcloud/h5p/upload.py index 17715080..5bc948e7 100644 --- a/schulcloud/h5p/upload.py +++ b/schulcloud/h5p/upload.py @@ -229,7 +229,7 @@ def delete_too_many_children(self, collection_node: Node, collection: Collection for es_child in es_children: es_child_node = self.api.search_custom('ccm:replicationsourceuuid', es_child) if len(es_child_node) > 1: - raise FoundTooManyException + raise FoundTooManyException(es_child) es_child_node = es_child_node[0] delete_child = True for child in collection.children: From 2aa1a2a2f6aaeb4f8ff04aad86171a94b0ef2527 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 13:56:20 +0200 Subject: [PATCH 573/590] DMED-119 - add missing await to LomBase.getUrlData function call in merlin_spider --- converter/spiders/merlin_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 8c3dd16b..7d6db37e 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -41,7 +41,7 @@ async def parse(self, response: scrapy.http.Response): print("Parsing URL: " + response.url) # Call Splash only once per page (that contains multiple XML elements). - data = self.getUrlData(response.url) + data = await LomBase.getUrlData(response.url) response.meta["rendered_data"] = data # We would use .fromstring(response.text) if the response did not include the XML declaration: From 34d4748613027dd6495f9eeda94981f35c4d6e6b Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Wed, 25 Sep 2024 14:22:06 +0200 Subject: [PATCH 574/590] DMED-119 - fix getUrlData self issue --- converter/spiders/merlin_spider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 7d6db37e..10f9a6a5 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -41,7 +41,7 @@ async def parse(self, response: scrapy.http.Response): print("Parsing URL: " + response.url) # Call Splash only once per page (that contains multiple XML elements). - data = await LomBase.getUrlData(response.url) + data = await self.getUrlData(response.url) response.meta["rendered_data"] = data # We would use .fromstring(response.text) if the response did not include the XML declaration: From dcde34609ebb8a16f780817b4ae027625a6e423f Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 25 Sep 2024 12:01:03 +0200 Subject: [PATCH 575/590] change: replace 'httpx' async client with 'requests'-session - to mitigate "httpx.ReadError"s upon dropped or reset HTTP connections (to / from the edu-sharing repository), the edu-sharing connector will use a shared "requests.Session"-object from now on Background: - "httpx.ReadError"s were observed for HTTP Requests that contained (potentially huge) payloads, especially during Thumbnail uploads (via set_node_preview()) and fulltext uploads (via set_node_text()) - since we cannot reasonably limit the size of the uploaded data, switching back to "requests" to handle these requests (hopefully more graceful than httpx) should fix these HTTP Connection Pool issues - the httpx discussion at https://github.com/encode/httpx/discussions/3067 pointed towards similar errors which users observed for payloads above 1 MiB PS: Thank you, Constantin (@bergatco) and Paul, for the collective debugging session! --- converter/es_connector.py | 101 +++++++++++++++++--------------------- converter/pipelines.py | 2 +- 2 files changed, 45 insertions(+), 58 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index b2b251da..b851032c 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -1,15 +1,12 @@ -import asyncio import base64 import json import logging import pprint import time import uuid -from asyncio import Semaphore from enum import Enum from typing import List, Optional -import httpx import requests import vobject from requests.auth import HTTPBasicAuth @@ -113,8 +110,8 @@ class CreateGroupType(Enum): nodeApi: NODEV1Api groupCache: List[str] enabled: bool - _client_async = httpx.AsyncClient() - _sem: Semaphore = asyncio.Semaphore(25) + r_session: requests.Session = requests.Session() + # see: https://requests.readthedocs.io/en/latest/user/advanced/#session-objects def __init__(self): cookie_threshold = env.get("EDU_SHARING_COOKIE_REBUILD_THRESHOLD", True) @@ -167,16 +164,14 @@ def sync_node(self, spider, type, properties): raise e return response["node"] - async def set_node_text(self, uuid, item) -> bool: + def set_node_text(self, uuid, item) -> bool: if "fulltext" in item: - response = await self._client_async.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/textContent?mimetype=text/plain", - headers=self.get_headers("multipart/form-data"), + response = self.r_session.post( + url=f"{get_project_settings().get("EDU_SHARING_BASE_URL")}" + f"rest/node/v1/nodes/-home-/{uuid}" + "/textContent?mimetype=text/plain", data=item["fulltext"].encode("utf-8"), - timeout=None, + headers=self.get_headers("multipart/form-data"), ) return response.status_code == 200 # does currently not store data @@ -200,44 +195,36 @@ def set_permissions(self, uuid, permissions) -> bool: except ApiException as e: return False - async def set_node_binary_data(self, uuid, item) -> bool: + def set_node_binary_data(self, uuid, item) -> bool: if "binary" in item: log.info( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"] + f"{get_project_settings().get("EDU_SHARING_BASE_URL")}" + f"rest/node/v1/nodes/-home-/{uuid}" + f"/content?mimetype={item["lom"]["technical"]["format"]}" ) files = {"file": item["binary"]} - response = await self._client_async.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/content?mimetype=" - + item["lom"]["technical"]["format"], + response = self.r_session.post( + url=f"{get_project_settings().get("EDU_SHARING_BASE_URL")}" + f"rest/node/v1/nodes/-home-/{uuid}" + f"/content?mimetype={item['lom']['technical']['format']}", headers=self.get_headers(None), files=files, - timeout=None, ) return response.status_code == 200 else: return False - async def set_node_preview(self, uuid, item) -> bool: + def set_node_preview(self, uuid, item) -> bool: if "thumbnail" in item: key = "large" if "large" in item["thumbnail"] else "small" if "small" in item["thumbnail"] else None if key: files = {"image": base64.b64decode(item["thumbnail"][key])} - response = await self._client_async.post( - get_project_settings().get("EDU_SHARING_BASE_URL") - + "rest/node/v1/nodes/-home-/" - + uuid - + "/preview?mimetype=" - + item["thumbnail"]["mimetype"], + response = self.r_session.post( + url=f"{get_project_settings().get("EDU_SHARING_BASE_URL")}" + f"rest/node/v1/nodes/-home-/{uuid}" + f"/preview?mimetype={item["thumbnail"]["mimetype"]}", headers=self.get_headers(None), files=files, - timeout=None, ) return response.status_code == 200 else: @@ -753,46 +740,47 @@ def set_node_permissions(self, uuid, item): ) log.error(item["permissions"]) - async def insert_item(self, spider, uuid, item): - async with self._sem: - # inserting items is controlled with a Semaphore, otherwise we'd get PoolTimeout Exceptions when there's a - # temporary burst of items that need to be inserted - node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) - self.set_node_permissions(node["ref"]["id"], item) - await self.set_node_preview(node["ref"]["id"], item) - if not await self.set_node_binary_data(node["ref"]["id"], item): - await self.set_node_text(node["ref"]["id"], item) + def insert_item(self, spider, uuid, item): + node = self.sync_node(spider, "ccm:io", self.transform_item(uuid, spider, item)) + self.set_node_permissions(node["ref"]["id"], item) + self.set_node_preview(node["ref"]["id"], item) + if not self.set_node_binary_data(node["ref"]["id"], item): + self.set_node_text(node["ref"]["id"], item) - async def update_item(self, spider, uuid, item): - await self.insert_item(spider, uuid, item) + def update_item(self, spider, uuid, item): + self.insert_item(spider, uuid, item) @staticmethod def init_cookie(): - log.debug("Init edu sharing cookie...") + log.debug("Init edu-sharing cookie...") settings = get_project_settings() auth = requests.get( - settings.get("EDU_SHARING_BASE_URL") + "rest/authentication/v1/validateSession", + url=f"{settings.get("EDU_SHARING_BASE_URL")}rest/authentication/v1/validateSession", auth=HTTPBasicAuth( - settings.get("EDU_SHARING_USERNAME"), - settings.get("EDU_SHARING_PASSWORD"), + username=f"{settings.get("EDU_SHARING_USERNAME")}", + password=f"{settings.get("EDU_SHARING_PASSWORD")}" ), headers={"Accept": "application/json"}, ) - isAdmin = json.loads(auth.text)["isAdmin"] - log.info("Got edu sharing cookie, admin status: " + str(isAdmin)) - if isAdmin: + is_admin = json.loads(auth.text)["isAdmin"] + log.info(f"Got edu-sharing cookie, admin status: {is_admin}") + if is_admin: + # --- setting cookies for the (openAPI generated) API client: cookies = [] for cookie in auth.headers["SET-COOKIE"].split(","): cookies.append(cookie.split(";")[0]) EduSharing.cookie = ";".join(cookies) + # --- setting cookies for the requests.Session object: + cookie_dict: dict = requests.utils.dict_from_cookiejar(auth.cookies) + EduSharing.r_session.cookies.update(cookie_dict) return auth def init_api_client(self): if EduSharing.cookie is None: settings = get_project_settings() auth = self.init_cookie() - isAdmin = json.loads(auth.text)["isAdmin"] - if isAdmin: + is_admin = json.loads(auth.text)["isAdmin"] + if is_admin: configuration = Configuration() configuration.host = settings.get("EDU_SHARING_BASE_URL") + "rest" EduSharing.apiClient = ESApiClient( @@ -849,9 +837,8 @@ def init_api_client(self): return log.warning(auth.text) raise Exception( - "Could not authentify as admin at edu-sharing. Please check your settings for repository " - + settings.get("EDU_SHARING_BASE_URL") - ) + f"Could not authenticate as admin at edu-sharing. Please check your settings for repository " + f"{settings.get("EDU_SHARING_BASE_URL")}") @staticmethod def build_uuid(url): diff --git a/converter/pipelines.py b/converter/pipelines.py index 2cf2cc0f..24e9f953 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -1103,7 +1103,7 @@ async def process_item(self, raw_item, spider): if "title" in item["lom"]["general"]: title = str(item["lom"]["general"]["title"]) entryUUID = EduSharing.build_uuid(item["response"]["url"] if "url" in item["response"] else item["hash"]) - await self.insert_item(spider, entryUUID, item) + self.insert_item(spider, entryUUID, item) log.info("item " + entryUUID + " inserted/updated") # @TODO: We may need to handle Collections From fbade3b14150722fd8dd5fd7fcb2948af4dfee6a Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Wed, 25 Sep 2024 15:59:24 +0200 Subject: [PATCH 576/590] logging: reflect 'resetVersion'/'forceUpdate'-setting in log messages - when using the 'resetVersion=true' Spider Argument, logging messages did not correctly reflect what was happening during the hash check in the EduSharingCheckPipeline - LomBase now stores a custom_setting key ("EDU_SHARING_FORCE_UPDATE"), which can be accessed via "spider.custom_settings" for later readouts - if an active 'resetVersion' or 'forceUpdate' setting was detected, the pipeline's debug message should be easier to understand that even though an item's hash hasn't changed, the item will get updated nonetheless --- converter/pipelines.py | 17 +++++++++++------ converter/spiders/base_classes/lom_base.py | 3 +++ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 24e9f953..a7e0f7fa 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -939,13 +939,18 @@ def process_item(self, raw_item, spider): db_item = self.find_item(item["sourceId"], spider) if db_item: if item["hash"] != db_item[1]: - log.debug(f"hash has changed, continuing pipelines for item {item['sourceId']}") + log.debug(f"EduSharingCheckPipeline: hash has changed. Continuing pipelines for item {item['sourceId']}") else: - log.debug(f"hash unchanged, skipping item {item['sourceId']}") - # self.update(item['sourceId'], spider) - # for tests, we update everything for now - # activate this later - # raise DropItem() + if "EDU_SHARING_FORCE_UPDATE" in spider.custom_settings and spider.custom_settings["EDU_SHARING_FORCE_UPDATE"]: + log.debug(f"EduSharingCheckPipeline: hash unchanged for item {item['sourceId']}, " + f"but detected active 'force item update'-setting (resetVersion / forceUpdate). " + f"Continuing pipelines ...") + else: + log.debug(f"EduSharingCheckPipeline: hash unchanged, skipping item {item['sourceId']}") + # self.update(item['sourceId'], spider) + # for tests, we update everything for now + # activate this later + # raise DropItem() return raw_item class EduSharingTypeValidationPipeline(BasicPipeline): diff --git a/converter/spiders/base_classes/lom_base.py b/converter/spiders/base_classes/lom_base.py index e9e036bb..fd8306dd 100644 --- a/converter/spiders/base_classes/lom_base.py +++ b/converter/spiders/base_classes/lom_base.py @@ -43,6 +43,9 @@ def __init__(self, **kwargs): logging.info( f"resetVersion requested, will force update + reset versions for crawler {self.name}" ) + # populate the custom_settings so we can read the value more comfortably + # when an item passes through the pipeline + self.custom_settings.update({"EDU_SHARING_FORCE_UPDATE": True}) # EduSharing().deleteAll(self) EduSharing.resetVersion = True self.forceUpdate = True From ab2c80a6e1ef919fcc98711567a831366877873b Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 12:55:58 +0200 Subject: [PATCH 577/590] fix 10 weak warnings w.r.t. variable names and too broad exception clauses --- converter/es_connector.py | 42 +++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index b851032c..4939b922 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -132,16 +132,16 @@ def get_headers(self, content_type: str | None = "application/json"): return header_dict def sync_node(self, spider, type, properties): - groupBy = [] + group_by = [] if "ccm:replicationsourceorigin" in properties: - groupBy = ["ccm:replicationsourceorigin"] + group_by = ["ccm:replicationsourceorigin"] try: response = EduSharing.bulkApi.sync( request_body=properties, match=["ccm:replicationsource", "ccm:replicationsourceid"], type=type, group=spider.name, - group_by=groupBy, + group_by=group_by, reset_version=EduSharing.resetVersion, ) except ApiException as e: @@ -192,7 +192,7 @@ def set_permissions(self, uuid, permissions) -> bool: send_copy=False, ) return True - except ApiException as e: + except ApiException: return False def set_node_binary_data(self, uuid, item) -> bool: @@ -416,7 +416,7 @@ def transform_item(self, uuid, spider, item): try: # edusharing requires milliseconds duration = int(float(duration) * 1000) - except: + except ValueError: log.debug( f"The supplied 'technical.duration'-value {duration} could not be converted from " f"seconds to milliseconds. ('cclom:duration' expects ms)" @@ -444,8 +444,8 @@ def transform_item(self, uuid, spider, item): continue mapping = EduSharingConstants.LIFECYCLE_ROLES_MAPPING[person["role"].lower()] # convert to a vcard string - firstName = person["firstName"] if "firstName" in person else "" - lastName = person["lastName"] if "lastName" in person else "" + first_name = person["firstName"] if "firstName" in person else "" + last_name = person["lastName"] if "lastName" in person else "" title: str = person["title"] if "title" in person else "" organization = person["organization"] if "organization" in person else "" url = person["url"] if "url" in person else "" @@ -463,8 +463,8 @@ def transform_item(self, uuid, spider, item): address_type: str = person["address_type"] if "address_type" in person else "" # create the vCard object first, then add attributes on-demand / if available vcard = vobject.vCard() - vcard.add("n").value = vobject.vcard.Name(family=lastName, given=firstName) - vcard.add("fn").value = organization if organization else (firstName + " " + lastName).strip() + vcard.add("n").value = vobject.vcard.Name(family=last_name, given=first_name) + vcard.add("fn").value = organization if organization else (first_name + " " + last_name).strip() # only the "fn"-attribute is required to serialize the vCard. (all other properties are optional) if address_city or address_country or address_postal_code or address_region or address_street: # The vCard v3 "ADR" property is used for physical addresses @@ -530,7 +530,7 @@ def transform_item(self, uuid, spider, item): else: spaces[mapping] = [vcard.serialize(lineLength=10000)] - valuespaceMapping = { + valuespace_mapping = { "accessibilitySummary": "ccm:accessibilitySummary", "conditionsOfAccess": "ccm:conditionsOfAccess", "containsAdvertisement": "ccm:containsAdvertisement", @@ -549,11 +549,11 @@ def transform_item(self, uuid, spider, item): "toolCategory": "ccm:toolCategory", } for key in item["valuespaces"]: - spaces[valuespaceMapping[key]] = item["valuespaces"][key] + spaces[valuespace_mapping[key]] = item["valuespaces"][key] # add raw values if the api supports it if EduSharing.version["major"] >= 1 and EduSharing.version["minor"] >= 1: for key in item["valuespaces_raw"]: - splitted = valuespaceMapping[key].split(":") + splitted = valuespace_mapping[key].split(":") splitted[0] = "virtual" spaces[":".join(splitted)] = item["valuespaces_raw"][key] @@ -611,11 +611,11 @@ def transform_item(self, uuid, spider, item): pass pass - mdsId = env.get("EDU_SHARING_METADATASET", allow_null=True, default="mds_oeh") - if mdsId != "default": - spaces["cm:edu_metadataset"] = mdsId + mds_id = env.get("EDU_SHARING_METADATASET", allow_null=True, default="mds_oeh") + if mds_id != "default": + spaces["cm:edu_metadataset"] = mds_id spaces["cm:edu_forcemetadataset"] = "true" - log.debug("Using metadataset " + mdsId) + log.debug("Using metadataset " + mds_id) else: log.debug("Using default metadataset") @@ -643,7 +643,7 @@ def create_groups_if_not_exists(self, groups, type: CreateGroupType): log.info("Group " + uuid + " was found in edu-sharing (cache inconsistency), no need to create") EduSharing.groupCache.append(uuid) continue - except ApiException as e: + except ApiException: log.info("Group " + uuid + " was not found in edu-sharing, creating it") pass @@ -691,14 +691,14 @@ def set_node_permissions(self, uuid, item): # if not 'groups' in item['permissions'] and not 'mediacenters' in item['permissions']: # log.error('Invalid state detected: Permissions public is set to false but neither groups or mediacenters are set. Please use either public = true without groups/mediacenters or public = false and set group/mediacenters. No permissions will be set!') # return - mergedGroups = [] + merged_groups = [] if "groups" in item["permissions"]: if "autoCreateGroups" in item["permissions"] and item["permissions"]["autoCreateGroups"] is True: self.create_groups_if_not_exists( item["permissions"]["groups"], EduSharing.CreateGroupType.Regular, ) - mergedGroups += list( + merged_groups += list( map( lambda x: EduSharingConstants.GROUP_PREFIX + x, item["permissions"]["groups"], @@ -713,7 +713,7 @@ def set_node_permissions(self, uuid, item): item["permissions"]["mediacenters"], EduSharing.CreateGroupType.MediaCenter, ) - mergedGroups += list( + merged_groups += list( map( lambda x: EduSharingConstants.GROUP_PREFIX + EduSharingConstants.MEDIACENTER_PROXY_PREFIX @@ -721,7 +721,7 @@ def set_node_permissions(self, uuid, item): item["permissions"]["mediacenters"], ) ) - for group in mergedGroups: + for group in merged_groups: permissions["permissions"].append( { "authority": { From 5a9dc0483f6d0a8e26d52eeacf8b77df77dff70f Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 26 Sep 2024 13:43:57 +0200 Subject: [PATCH 578/590] DMED-119 - fix merlin_spider issues --- converter/spiders/merlin_spider.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 10f9a6a5..53fa71a9 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -4,6 +4,7 @@ from lxml import etree from scrapy.spiders import CrawlSpider from converter.items import * +from converter.web_tools import WebEngine from .base_classes import LomBase import scrapy @@ -41,7 +42,7 @@ async def parse(self, response: scrapy.http.Response): print("Parsing URL: " + response.url) # Call Splash only once per page (that contains multiple XML elements). - data = await self.getUrlData(response.url) + data = await LomBase.getUrlData(self, response.url) response.meta["rendered_data"] = data # We would use .fromstring(response.text) if the response did not include the XML declaration: @@ -75,7 +76,7 @@ async def parse(self, response: scrapy.http.Response): copyResponse._set_body(element_xml_str) if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) + yield await self.handleEntry(copyResponse) # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. await LomBase.parse(self, copyResponse) @@ -105,7 +106,7 @@ def getHash(self, response): """ Since we have no 'last_modified' date from the elements we cannot do something better. Therefore, the current implementation takes into account (1) the code version, (2) the item's ID, and (3) the date (day, month, year). """ - return ( + return str( hash(self.version) + hash(self.getId(response)) + self._date_to_integer(datetime.date(datetime.now())) @@ -116,7 +117,7 @@ def _date_to_integer(self, dt_time): Using prime numbers for less collisions. """ return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day - def mapResponse(self, response): + async def mapResponse(self, response): r = ResponseItemLoader(response=response) r.add_value("status", response.status) r.add_value("headers", response.headers) From b683869487569efbccac31ee416ecf44bed3a31c Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 13:49:33 +0200 Subject: [PATCH 579/590] fix missing awaits and getHash() - after refactoring LomBase, some method calls in merlin_spider were missing awaits and async declarations - fix ValidationError for getHash() method: - getHash used to submit an 'int'-value to the edu-sharing API, but the API actually expects a string value - optimized imports PS: thanks Constantin (@bergatco) for the debug logs! --- converter/spiders/merlin_spider.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/converter/spiders/merlin_spider.py b/converter/spiders/merlin_spider.py index 8c3dd16b..868e43ec 100644 --- a/converter/spiders/merlin_spider.py +++ b/converter/spiders/merlin_spider.py @@ -1,11 +1,13 @@ from datetime import datetime +import scrapy import xmltodict as xmltodict from lxml import etree from scrapy.spiders import CrawlSpider + from converter.items import * +from converter.web_tools import WebEngine from .base_classes import LomBase -import scrapy class MerlinSpider(CrawlSpider, LomBase): @@ -20,6 +22,9 @@ class MerlinSpider(CrawlSpider, LomBase): url = "https://merlin.nibis.de/index.php" # the url which will be linked as the primary link to your source (should be the main url of your site) friendlyName = "Merlin" # name as shown in the search ui version = "0.1" # the version of your crawler, used to identify if a reimport is necessary + custom_settings = { + "WEB_TOOLS": WebEngine.Playwright, + } apiUrl = "https://merlin.nibis.de/index.php?action=resultXml&start=%start&anzahl=%anzahl&query[stichwort]=*" # * regular expression, to represent all possible values. limit = 100 @@ -75,7 +80,7 @@ async def parse(self, response: scrapy.http.Response): copyResponse._set_body(element_xml_str) if self.hasChanged(copyResponse): - yield self.handleEntry(copyResponse) + yield await self.handleEntry(copyResponse) # LomBase.parse() has to be called for every individual instance that needs to be saved to the database. await LomBase.parse(self, copyResponse) @@ -105,7 +110,7 @@ def getHash(self, response): """ Since we have no 'last_modified' date from the elements we cannot do something better. Therefore, the current implementation takes into account (1) the code version, (2) the item's ID, and (3) the date (day, month, year). """ - return ( + return str( hash(self.version) + hash(self.getId(response)) + self._date_to_integer(datetime.date(datetime.now())) @@ -116,7 +121,7 @@ def _date_to_integer(self, dt_time): Using prime numbers for less collisions. """ return 9973 * dt_time.year + 97 * dt_time.month + dt_time.day - def mapResponse(self, response): + async def mapResponse(self, response): r = ResponseItemLoader(response=response) r.add_value("status", response.status) r.add_value("headers", response.headers) From dd6f0820a6ce911dd1dd3c0c57b443391ff01328 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 14:23:57 +0200 Subject: [PATCH 580/590] add merlin_spider pyCharm runConfiguration --- .run/merlin_spider.run.xml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .run/merlin_spider.run.xml diff --git a/.run/merlin_spider.run.xml b/.run/merlin_spider.run.xml new file mode 100644 index 00000000..2dcacb1f --- /dev/null +++ b/.run/merlin_spider.run.xml @@ -0,0 +1,26 @@ + + + + + + \ No newline at end of file From e5e2446084466552199a7c2b86c1018773567b81 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 26 Sep 2024 14:39:28 +0200 Subject: [PATCH 581/590] DMED-119 - fixed `delete_too_many_children` function in `h5p_upload` --- .env.local | 49 ++++++++++++++++++++++++++++++++++++++++ .python-version | 1 + docker-compose.yml | 27 ---------------------- schulcloud/h5p/upload.py | 22 ++++++++---------- 4 files changed, 60 insertions(+), 39 deletions(-) create mode 100644 .env.local create mode 100644 .python-version diff --git a/.env.local b/.env.local new file mode 100644 index 00000000..a627d2d2 --- /dev/null +++ b/.env.local @@ -0,0 +1,49 @@ +# Add a url for your log file. If not set, stdoutput will be used +#LOG_FILE="/var/log/scrapy.log" + +# Level for logs, supported DEBUG, INFO, WARNING, ERROR +LOG_LEVEL="DEBUG" + +# MODE (edu-sharing, csv, json, or None) +MODE="edu-sharing" + +# csv rows to export from dataset (comma seperated, only used if mode == "csv") +CSV_ROWS="lom.general.title,lom.general.description,lom.general.keyword,lom.technical.location,valuespaces.discipline,valuespaces.learningResourceType" + +# Splash Integration settings for the local container, +# for more information, see https://splash.readthedocs.io/en/stable/ +DISABLE_SPLASH=False +SPLASH_URL="http://localhost:8050" + +# PYPPETEER Integration settings, as needed for the local container (as used in kmap_spider.py) +# for more information, see: https://github.com/pyppeteer/pyppeteer +PYPPETEER_WS_ENDPOINT="ws://localhost:3000" + +# Edu-Sharing instance that the crawlers should upload to +EDU_SHARING_BASE_URL="http://localhost:8100/edu-sharing/" +EDU_SHARING_USERNAME="admin" +EDU_SHARING_PASSWORD="admin" + +# Edu-Sharing instance that the permission script uses (different users needed due to different file locations in Edu-Sharing) +EDU_SHARING_BASE_URL="http://localhost:8100/edu-sharing/" +EDU_SHARING_USERNAME_ADMIN="admin" +EDU_SHARING_PASSWORD_ADMIN="admin" +EDU_SHARING_USERNAME_CRAWLER="admin" +EDU_SHARING_PASSWORD_CRAWLER="admin" + +# If set to true, don't upload to (above mentioned) Edu-Sharing instance +DRY_RUN=false + +# your youtube api key (required for youtube crawler) +YOUTUBE_API_KEY="" + +# only for oeh spider: select the sources you want to fetch from oeh (comma seperated) +# OEH_IMPORT_SOURCES='oeh,wirlernenonline_spider,serlo_spider,youtube_spider' + +# for sodix spider +# SODIX_USER="" +# SODIX_PASSWORD="" +SODIX_DOWNLOAD_DELAY=0.35 + +# local path for html_parser.py +LOCAL_PATH="local/path/index.html" \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..d9506ceb --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12.5 diff --git a/docker-compose.yml b/docker-compose.yml index 778bf5cf..45605d6d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,8 +10,6 @@ services: restart: always ports: - "127.0.0.1:8050:8050" - networks: - - scrapy healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8050/_ping"] interval: 30s @@ -25,28 +23,3 @@ services: - TIMEOUT=120000 ports: - "127.0.0.1:3000:3000" - networks: - - scrapy - scrapy: - # extra_hosts is only required if your need to access an edu-sharing instance on the host that runs docker - # host.docker.internal points to the ip address of the host docker network interface - extra_hosts: - host.docker.internal: host-gateway - image: openeduhub/oeh-search-etl:develop - build: - context: . - network: host - networks: - - scrapy - environment: - - "PYPPETEER_WS_ENDPOINT=ws://headless_chrome:3000" - - "PLAYWRIGHT_WS_ENDPOINT=ws://headless_chrome:3000" - - "SPLASH_URL=http://splash:8050" - - "CRAWLER=${CRAWLER}" - # optional keyword args, e.g. cleanrun=true - - "ARGS=${ARGS}" - - "DRY_RUN=False" - - "LOG_LEVEL=${LOG_LEVEL:-INFO}" - - "EDU_SHARING_BASE_URL=${EDU_SHARING_BASE_URL}" - - "EDU_SHARING_USERNAME=${EDU_SHARING_USERNAME}" - - "EDU_SHARING_PASSWORD=${EDU_SHARING_PASSWORD}" \ No newline at end of file diff --git a/schulcloud/h5p/upload.py b/schulcloud/h5p/upload.py index 5bc948e7..a1aedec5 100644 --- a/schulcloud/h5p/upload.py +++ b/schulcloud/h5p/upload.py @@ -227,18 +227,16 @@ def get_es_collection_children(self, collection: Node): def delete_too_many_children(self, collection_node: Node, collection: Collection): es_children = self.get_es_collection_children(collection_node) for es_child in es_children: - es_child_node = self.api.search_custom('ccm:replicationsourceuuid', es_child) - if len(es_child_node) > 1: - raise FoundTooManyException(es_child) - es_child_node = es_child_node[0] - delete_child = True - for child in collection.children: - if es_child_node.name == child.filepath: - delete_child = False - break - if delete_child: - print(f'Update Collection {collection.name}. Delete children: {es_child_node.name}') - self.api.delete_node(es_child_node.id) + es_child_nodes = self.api.search_custom('ccm:replicationsourceuuid', es_child) + for es_child_node in es_child_nodes: + delete_child = True + for child in collection.children: + if es_child_node.name == child.filepath: + delete_child = False + break + if delete_child: + print(f'Update Collection {collection.name}. Delete children: {es_child_node.name}') + self.api.delete_node(es_child_node.id) def setup_destination_folder(self, folder_name: str): """ From 993aa2f2d89f74e7d18e4594b07f767a3fc6c642 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 26 Sep 2024 14:54:15 +0200 Subject: [PATCH 582/590] DMED-119 - (hopefully) fix await issue in `mediothek_pixiothek_spider` --- .python-version | 1 - converter/spiders/mediothek_pixiothek_spider.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 .python-version diff --git a/.python-version b/.python-version deleted file mode 100644 index d9506ceb..00000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.12.5 diff --git a/converter/spiders/mediothek_pixiothek_spider.py b/converter/spiders/mediothek_pixiothek_spider.py index 79316b46..c139b302 100644 --- a/converter/spiders/mediothek_pixiothek_spider.py +++ b/converter/spiders/mediothek_pixiothek_spider.py @@ -63,7 +63,7 @@ def getHash(self, response): # date_object = datetime.strptime(hash, "%Y-%m-%d %H:%M:%S.%f").date() return element_id + element_timestamp - def mapResponse(self, response): + async def mapResponse(self, response): r = ResponseItemLoader(response=response) r.add_value("status", response.status) r.add_value("headers", response.headers) From 62c7e504cc4e03b50a71c54253d5d7a52515b746 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 26 Sep 2024 14:54:56 +0200 Subject: [PATCH 583/590] DMED-119 - remove `env.local` --- .env.local | 49 ------------------------------------------------- 1 file changed, 49 deletions(-) delete mode 100644 .env.local diff --git a/.env.local b/.env.local deleted file mode 100644 index a627d2d2..00000000 --- a/.env.local +++ /dev/null @@ -1,49 +0,0 @@ -# Add a url for your log file. If not set, stdoutput will be used -#LOG_FILE="/var/log/scrapy.log" - -# Level for logs, supported DEBUG, INFO, WARNING, ERROR -LOG_LEVEL="DEBUG" - -# MODE (edu-sharing, csv, json, or None) -MODE="edu-sharing" - -# csv rows to export from dataset (comma seperated, only used if mode == "csv") -CSV_ROWS="lom.general.title,lom.general.description,lom.general.keyword,lom.technical.location,valuespaces.discipline,valuespaces.learningResourceType" - -# Splash Integration settings for the local container, -# for more information, see https://splash.readthedocs.io/en/stable/ -DISABLE_SPLASH=False -SPLASH_URL="http://localhost:8050" - -# PYPPETEER Integration settings, as needed for the local container (as used in kmap_spider.py) -# for more information, see: https://github.com/pyppeteer/pyppeteer -PYPPETEER_WS_ENDPOINT="ws://localhost:3000" - -# Edu-Sharing instance that the crawlers should upload to -EDU_SHARING_BASE_URL="http://localhost:8100/edu-sharing/" -EDU_SHARING_USERNAME="admin" -EDU_SHARING_PASSWORD="admin" - -# Edu-Sharing instance that the permission script uses (different users needed due to different file locations in Edu-Sharing) -EDU_SHARING_BASE_URL="http://localhost:8100/edu-sharing/" -EDU_SHARING_USERNAME_ADMIN="admin" -EDU_SHARING_PASSWORD_ADMIN="admin" -EDU_SHARING_USERNAME_CRAWLER="admin" -EDU_SHARING_PASSWORD_CRAWLER="admin" - -# If set to true, don't upload to (above mentioned) Edu-Sharing instance -DRY_RUN=false - -# your youtube api key (required for youtube crawler) -YOUTUBE_API_KEY="" - -# only for oeh spider: select the sources you want to fetch from oeh (comma seperated) -# OEH_IMPORT_SOURCES='oeh,wirlernenonline_spider,serlo_spider,youtube_spider' - -# for sodix spider -# SODIX_USER="" -# SODIX_PASSWORD="" -SODIX_DOWNLOAD_DELAY=0.35 - -# local path for html_parser.py -LOCAL_PATH="local/path/index.html" \ No newline at end of file From 090da20f0ebf327e26e2eab1657e29ee8f9f2564 Mon Sep 17 00:00:00 2001 From: Constantin Bergatt Date: Thu, 26 Sep 2024 15:32:19 +0200 Subject: [PATCH 584/590] DMED-119 - fix validation error for "cclom:duration" --- converter/es_connector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 3a6c86ee..b26974db 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -431,7 +431,7 @@ def transform_item(self, uuid, spider, item): f"seconds to milliseconds. ('cclom:duration' expects ms)" ) pass - spaces["cclom:duration"] = duration + spaces["cclom:duration"] = str(duration) if "format" in item["lom"]["technical"]: spaces["cclom:format"] = item["lom"]["technical"]["format"] if "location" in item["lom"]["technical"]: From af3abe4a50737c23e76dbba64b6190e315be6db7 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 18:11:06 +0200 Subject: [PATCH 585/590] fix: ValidationError during handling of "cclom:duration"-values in es_connector (expected type: str) - feat: additional check in the pipelines for "lom.technical.duration"-values and conversion to string --- converter/es_connector.py | 9 ++++++--- converter/pipelines.py | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index 4939b922..e3a02fef 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -412,16 +412,19 @@ def transform_item(self, uuid, spider, item): if "technical" in item["lom"]: if "duration" in item["lom"]["technical"]: - duration = item["lom"]["technical"]["duration"] + duration: str | int | None = item["lom"]["technical"]["duration"] + # after passing through the pipelines, the duration value should be in seconds try: - # edusharing requires milliseconds + # edu-sharing requires values to be in milliseconds: duration = int(float(duration) * 1000) + # the edu-sharing API expects values to be wrapped in a string, + # otherwise pydantic throws ValidationErrors during POST requests: + duration = str(duration) except ValueError: log.debug( f"The supplied 'technical.duration'-value {duration} could not be converted from " f"seconds to milliseconds. ('cclom:duration' expects ms)" ) - pass spaces["cclom:duration"] = duration if "format" in item["lom"]["technical"]: spaces["cclom:format"] = item["lom"]["technical"]["format"] diff --git a/converter/pipelines.py b/converter/pipelines.py index a7e0f7fa..3201c683 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -988,6 +988,15 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr keywords: list[str] | set[str] | None = lom_general["keyword"] if keywords and isinstance(keywords, set): lom_general["keyword"] = list(keywords) + if "technical" in item_adapter["lom"]: + lom_technical: dict = item_adapter["lom"]["technical"] + if "duration" in lom_technical: + duration: int | str | None = lom_technical["duration"] + # after already passing through the ConvertTimePipeline, + # the duration value should be an Integer (seconds) + if duration and isinstance(duration, int): + # the edu-sharing API expects values to be wrapped in a string + lom_technical["duration"] = str(duration) return item From b75e140120d133a6e84c72f8cbad38d08c399ccc Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 18:13:30 +0200 Subject: [PATCH 586/590] fix: add missing 'license.internal' mapping for "NONPUBLIC" licenses --- converter/es_connector.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/converter/es_connector.py b/converter/es_connector.py index e3a02fef..ed901354 100644 --- a/converter/es_connector.py +++ b/converter/es_connector.py @@ -334,7 +334,12 @@ def map_license(self, spaces, license): ) if "internal" in license: match license["internal"]: - case "CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" | Constants.LICENSE_COPYRIGHT_FREE | Constants.LICENSE_COPYRIGHT_LAW | Constants.LICENSE_SCHULFUNK | Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN: + case ("CC_0" | "CC_BY" | "CC_BY_NC" | "CC_BY_NC_ND" | "CC_BY_NC_SA" | "CC_BY_ND" | "CC_BY_SA" | "PDM" | + Constants.LICENSE_COPYRIGHT_FREE | + Constants.LICENSE_COPYRIGHT_LAW | + Constants.LICENSE_SCHULFUNK | + Constants.LICENSE_UNTERRICHTS_UND_SCHULMEDIEN | + Constants.LICENSE_NONPUBLIC): spaces["ccm:commonlicense_key"] = license["internal"] case Constants.LICENSE_CUSTOM: spaces["ccm:commonlicense_key"] = "CUSTOM" From ffc9f75b3a9cee234497bd1971654ead2cabed51 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 18:36:49 +0200 Subject: [PATCH 587/590] feat: convert "BaseItem.hash"-values to a string background: - some old crawlers (e.g. merlin_spider) returned an Integer value in their "getHash()"-method, which now causes pydantic ValidationErrors with the new API client (since the API expects a string value for this property) --- converter/pipelines.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 3201c683..6633f4db 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -960,6 +960,11 @@ class EduSharingTypeValidationPipeline(BasicPipeline): # ToDo: if you notice pydantic "ValidationError"s during crawls, implement handling of those edge-cases here! def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: item_adapter = ItemAdapter(item) + if "hash" in item_adapter: + hash_value: int | str | None = item_adapter["hash"] + if hash_value and isinstance(hash_value, int): + # old crawlers might have returned hash values as integers, but the API expects a string + item_adapter["hash"] = str(hash_value) if "course" in item_adapter: course_item: dict = item_adapter["course"] if "course_duration" in course_item: From 084804fb470d66167f21e671abc8945c9f6d7186 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 19:47:54 +0200 Subject: [PATCH 588/590] fix: convert LOM General aggregationLevel int values to str - this fixes pydantic ValidationErrors in the es_connector during POST requests --- converter/pipelines.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/converter/pipelines.py b/converter/pipelines.py index 6633f4db..bfd2ab9e 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -989,6 +989,10 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr lom_educational["typicalAgeRange"]["toRange"] = str(to_range) if "general" in item_adapter["lom"]: lom_general: dict = item_adapter["lom"]["general"] + if "aggregationLevel" in lom_general: + aggregation_level: int | str | None = lom_general["aggregationLevel"] + if aggregation_level and isinstance(aggregation_level, int): + lom_general["aggregationLevel"] = str(aggregation_level) if "keyword" in lom_general: keywords: list[str] | set[str] | None = lom_general["keyword"] if keywords and isinstance(keywords, set): From 879875ad995594228555d0965f2252a6cec54ac6 Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 20:25:19 +0200 Subject: [PATCH 589/590] improve robustness of website-screenshot fallback in the thumbnail-pipeline - fix AttributeErrors when a website-screenshot fails: - if the first website-screenshot fails (e.g. when the first URL in LOM technical location points towards a .mp3 or .mp4), screenshot_bytes won't be available to work with - this caused an AttributeError when the pipeline tried to convert a "None"-type to a thumbnail - refactor: extracted the functionality of taking a website screenshot with playwright in the pipeline into its own method - feat: second website screenshot fallback - if a second URL is available in LOM technical location, the pipeline will try to take a screenshot of that URL before finally giving up - this could happen in edge-cases where the first URL in LOM technical location is unavailable for website-screenshots, but the array contains a second URL that might be more fruitful for a screenshot (e.g. a landing page) - fix: 2 warnings w.r.t. too broad exception clauses --- converter/pipelines.py | 68 ++++++++++++++++++++++++++++++------------ 1 file changed, 49 insertions(+), 19 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index bfd2ab9e..5c6c697b 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -265,11 +265,11 @@ def process_item(self, raw_item, spider): if "lastModified" in item: try: item["lastModified"] = float(item["lastModified"]) - except: + except ValueError: try: date = dateutil.parser.parse(item["lastModified"]) item["lastModified"] = int(date.timestamp()) - except: + except ValueError: log.warning( "Unable to parse given lastModified date " + item["lastModified"] @@ -744,25 +744,39 @@ async def process_item(self, raw_item, spider): # this edge-case is necessary for spiders that only need playwright to gather a screenshot, # but don't use playwright within the spider itself + lom_technical_location: list[str] | None = item["lom"]["technical"]["location"] target_url: str = item["lom"]["technical"]["location"][0] - playwright_cookies = None - playwright_adblock_enabled = False - if spider.custom_settings: - # some spiders might require setting specific cookies to take "clean" website screenshots - # (= without cookie banners or ads). - if "PLAYWRIGHT_COOKIES" in spider.custom_settings: - playwright_cookies = spider.custom_settings.get("PLAYWRIGHT_COOKIES") - if "PLAYWRIGHT_ADBLOCKER" in spider.custom_settings: - playwright_adblock_enabled: bool = spider.custom_settings["PLAYWRIGHT_ADBLOCKER"] - - playwright_dict = await WebTools.getUrlData(url=target_url, - engine=WebEngine.Playwright, - cookies=playwright_cookies, - adblock=playwright_adblock_enabled) - screenshot_bytes = playwright_dict.get("screenshot_bytes") - img = Image.open(BytesIO(screenshot_bytes)) - self.create_thumbnails_from_image_bytes(img, item, settings_crawler) + playwright_dict = await self.take_website_screenshot_with_playwright( + spider=spider, + target_url=target_url) + try: + screenshot_bytes: bytes | None = playwright_dict.get("screenshot_bytes") + except AttributeError: + screenshot_bytes = None + log.debug(f"Failed fallback #1: taking a website-screenshot of URL " + f"{target_url} wasn't possible!") + if lom_technical_location and isinstance(lom_technical_location, list) and len( + lom_technical_location) >= 2: + # this edge-case might happen during crawls of items with multiple URLs: + # the first URL might be a direct-link to an audio/video file (example: podcast episode as .mp3) + # while the second URL might point towards the webpage of said podcast episode + target_url_2nd: str = lom_technical_location[1] + if target_url_2nd and isinstance(target_url_2nd, str): + log.debug(f"Second URL in LOM Technical Location detected. " + f"Trying to take a website-screenshot of {lom_technical_location[1]} (fallback #2)...") + playwright_dict = await self.take_website_screenshot_with_playwright( + spider=spider, + target_url=target_url_2nd) + try: + screenshot_bytes: bytes | None = playwright_dict.get("screenshot_bytes") + except AttributeError: + screenshot_bytes = None + log.warning(f"Failed fallback #2: taking a website-screenshot of URL " + f"{target_url_2nd} wasn't possible!") + if screenshot_bytes: + img = Image.open(BytesIO(screenshot_bytes)) + self.create_thumbnails_from_image_bytes(img, item, settings_crawler) else: if settings_crawler.get("DISABLE_SPLASH") is False: log.warning( @@ -832,6 +846,22 @@ async def process_item(self, raw_item, spider): ) return raw_item + async def take_website_screenshot_with_playwright(self, spider: scrapy.Spider, target_url: str): + playwright_cookies = None + playwright_adblock_enabled = False + if spider.custom_settings: + # some spiders might require setting specific cookies to take "clean" website screenshots + # (= without cookie banners or ads). + if "PLAYWRIGHT_COOKIES" in spider.custom_settings: + playwright_cookies = spider.custom_settings.get("PLAYWRIGHT_COOKIES") + if "PLAYWRIGHT_ADBLOCKER" in spider.custom_settings: + playwright_adblock_enabled: bool = spider.custom_settings["PLAYWRIGHT_ADBLOCKER"] + playwright_dict = await WebTools.getUrlData(url=target_url, + engine=WebEngine.Playwright, + cookies=playwright_cookies, + adblock=playwright_adblock_enabled) + return playwright_dict + @alru_cache(maxsize=128) async def download_thumbnail_url(self, url: str, spider: scrapy.Spider): """ From d453684088b36bd2722123947761c7d1932feb8d Mon Sep 17 00:00:00 2001 From: criamos <981166+Criamos@users.noreply.github.com> Date: Thu, 26 Sep 2024 20:40:22 +0200 Subject: [PATCH 590/590] style: code formatting via black --- converter/pipelines.py | 509 +++++++++++++++++++++++------------------ 1 file changed, 285 insertions(+), 224 deletions(-) diff --git a/converter/pipelines.py b/converter/pipelines.py index 5c6c697b..8b567be9 100644 --- a/converter/pipelines.py +++ b/converter/pipelines.py @@ -3,6 +3,7 @@ from __future__ import annotations import base64 + # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting @@ -90,7 +91,7 @@ def close_spider(self, spider: scrapy.Spider) -> None: class PipelineWithFactoryMethod(metaclass=ABCMeta): @classmethod - def from_crawler(cls, crawler: scrapy.crawler.Crawler) -> 'PipelineWithFactoryMethod': + def from_crawler(cls, crawler: scrapy.crawler.Crawler) -> "PipelineWithFactoryMethod": """ If present, this classmethod is called to create a pipeline instance from a :class:`~scrapy.crawler.Crawler`. It must return a new instance @@ -121,18 +122,16 @@ def process_item(self, raw_item, spider): item = ItemAdapter(raw_item) try: if "title" not in item["lom"]["general"]: - raise DropItem( - "Entry {} has no title location".format(item["sourceId"]) - ) + raise DropItem("Entry {} has no title location".format(item["sourceId"])) except KeyError: - raise DropItem(f'Item {item} has no lom.technical.location') + raise DropItem(f"Item {item} has no lom.technical.location") try: if "location" not in item["lom"]["technical"] and "binary" not in item: raise DropItem( "Entry {} has no technical location or binary data".format(item["lom"]["general"]["title"]) ) except KeyError: - raise DropItem(f'Item {item} has no lom.technical.location') + raise DropItem(f"Item {item} has no lom.technical.location") # pass through explicit uuid elements if "uuid" in item: return raw_item @@ -157,13 +156,9 @@ def process_item(self, raw_item, spider): # if none of the above matches drop the item try: - raise DropItem( - "Entry " - + item["lom"]["general"]["title"] - + " has neither keywords nor description" - ) + raise DropItem("Entry " + item["lom"]["general"]["title"] + " has neither keywords nor description") except KeyError: - raise DropItem(f'Item {item} was dropped for not providing enough metadata') + raise DropItem(f"Item {item} was dropped for not providing enough metadata") class NormLanguagePipeline(BasicPipeline): @@ -203,8 +198,7 @@ def process_item(self, raw_item, spider): item["license"]["url"] = Constants.LICENSE_MAPPINGS[key] break if "internal" in item["license"] and ( - "url" not in item["license"] - or item["license"]["url"] not in Constants.VALID_LICENSE_URLS + "url" not in item["license"] or item["license"]["url"] not in Constants.VALID_LICENSE_URLS ): for key in Constants.LICENSE_MAPPINGS_INTERNAL: if item["license"]["internal"].casefold() == key.casefold(): @@ -214,17 +208,19 @@ def process_item(self, raw_item, spider): if "url" in item["license"] and "oer" not in item["license"]: match item["license"]["url"]: - case Constants.LICENSE_CC_BY_10 | \ - Constants.LICENSE_CC_BY_20 | \ - Constants.LICENSE_CC_BY_25 | \ - Constants.LICENSE_CC_BY_30 | \ - Constants.LICENSE_CC_BY_40 | \ - Constants.LICENSE_CC_BY_SA_10 | \ - Constants.LICENSE_CC_BY_SA_20 | \ - Constants.LICENSE_CC_BY_SA_25 | \ - Constants.LICENSE_CC_BY_SA_30 | \ - Constants.LICENSE_CC_BY_SA_40 | \ - Constants.LICENSE_CC_ZERO_10: + case ( + Constants.LICENSE_CC_BY_10 + | Constants.LICENSE_CC_BY_20 + | Constants.LICENSE_CC_BY_25 + | Constants.LICENSE_CC_BY_30 + | Constants.LICENSE_CC_BY_40 + | Constants.LICENSE_CC_BY_SA_10 + | Constants.LICENSE_CC_BY_SA_20 + | Constants.LICENSE_CC_BY_SA_25 + | Constants.LICENSE_CC_BY_SA_30 + | Constants.LICENSE_CC_BY_SA_40 + | Constants.LICENSE_CC_ZERO_10 + ): item["license"]["oer"] = OerType.ALL case _: # ToDo: log default case if not too spammy @@ -247,8 +243,10 @@ def process_item(self, raw_item, spider): # happy-case: the 'date' property is of type datetime pass elif lifecycle_date: - log.warning(f"Lifecycle Pipeline received invalid 'date'-value: {lifecycle_date} !" - f"Expected type 'str' or 'datetime', but received: {type(lifecycle_date)} instead.") + log.warning( + f"Lifecycle Pipeline received invalid 'date'-value: {lifecycle_date} !" + f"Expected type 'str' or 'datetime', but received: {type(lifecycle_date)} instead." + ) return raw_item @@ -270,17 +268,13 @@ def process_item(self, raw_item, spider): date = dateutil.parser.parse(item["lastModified"]) item["lastModified"] = int(date.timestamp()) except ValueError: - log.warning( - "Unable to parse given lastModified date " - + item["lastModified"] - ) + log.warning("Unable to parse given lastModified date " + item["lastModified"]) del item["lastModified"] if "typicalLearningTime" in item["lom"]["educational"]: tll_raw = item["lom"]["educational"]["typicalLearningTime"] - tll_duration_in_seconds = ( - determine_duration_and_convert_to_seconds(time_raw=tll_raw, - item_field_name="LomEducationalItem.typicalLearningTime") + tll_duration_in_seconds = determine_duration_and_convert_to_seconds( + time_raw=tll_raw, item_field_name="LomEducationalItem.typicalLearningTime" ) # ToDo: update es_connector and connect this property with the backend item["lom"]["educational"]["typicalLearningTime"] = tll_duration_in_seconds @@ -289,14 +283,13 @@ def process_item(self, raw_item, spider): if "duration" in item["lom"]["technical"]: raw_duration = item["lom"]["technical"]["duration"] duration_in_seconds = determine_duration_and_convert_to_seconds( - time_raw=raw_duration, - item_field_name="LomTechnicalItem.duration") + time_raw=raw_duration, item_field_name="LomTechnicalItem.duration" + ) item["lom"]["technical"]["duration"] = duration_in_seconds return raw_item -def determine_duration_and_convert_to_seconds(time_raw: str | int | float, - item_field_name: str) -> int | None: +def determine_duration_and_convert_to_seconds(time_raw: str | int | float, item_field_name: str) -> int | None: """ Tries to convert "duration"-objects (of unknown type) to seconds. Returns the converted duration as(as total seconds) int value if successful @@ -316,14 +309,12 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, # handling of "hh:mm:ss"-durations: t_split: list[str] = time_raw.split(":") if len(t_split) == 3: - time_in_seconds = ( - int(t_split[0]) * 60 * 60 - + int(t_split[1]) * 60 - + int(t_split[2]) - ) + time_in_seconds = int(t_split[0]) * 60 * 60 + int(t_split[1]) * 60 + int(t_split[2]) else: - log.warning(f"Encountered unhandled edge-case in '{item_field_name}': " - f"Expected format 'hh:mm:ss', but received {time_raw} instead.") + log.warning( + f"Encountered unhandled edge-case in '{item_field_name}': " + f"Expected format 'hh:mm:ss', but received {time_raw} instead." + ) if time_raw.startswith("P"): # handling of iso-formatted duration strings, e.g. "P14DT22H" or "P7W" # (see: https://en.wikipedia.org/wiki/ISO_8601#Durations) @@ -336,16 +327,20 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, # timedelta object can't handle conversion from months to .total_seconds() # see: https://github.com/gweis/isodate/issues/44 # and https://docs.python.org/3/library/datetime.html#datetime.timedelta - log.warning(f"Unhandled value detected: Cannot transform {time_raw} to total seconds!" - f"(months (M) or years (Y) aren't standardized duration units)") + log.warning( + f"Unhandled value detected: Cannot transform {time_raw} to total seconds!" + f"(months (M) or years (Y) aren't standardized duration units)" + ) time_in_seconds = None # ToDo: choose an acceptable solution # 1) either approximate the total seconds (inaccurate: "P6M" becomes 6 x 4W = 24W) # -> this would require RegEx parsing and string replacement of the month/year parts # 2) or keep the string representation AND find a better suited edu-sharing property for durations else: - log.warning(f"Encountered unhandled edge-case in '{item_field_name}': " - f"Expected ISO-8601 duration string, but received {time_raw} instead.") + log.warning( + f"Encountered unhandled edge-case in '{item_field_name}': " + f"Expected ISO-8601 duration string, but received {time_raw} instead." + ) if "." in time_raw and time_raw.count(".") == 1: # duration strings might come with float precision (e.g. "600.0" for 10 Minutes) try: @@ -353,30 +348,37 @@ def determine_duration_and_convert_to_seconds(time_raw: str | int | float, if seconds_float: time_in_seconds = int(seconds_float) except ValueError: - log.warning( - f"Unable to convert string {time_raw} (type: {type(time_raw)}) to 'int'-value (seconds).") + log.warning(f"Unable to convert string {time_raw} (type: {type(time_raw)}) to 'int'-value (seconds).") if time_raw.isnumeric(): try: time_in_seconds = int(time_raw) except ValueError: - log.warning(f"Unable to convert 'duration'-value {time_raw} (type ({type(time_raw)}) " - f"to 'int'-value (seconds).") + log.warning( + f"Unable to convert 'duration'-value {time_raw} (type ({type(time_raw)}) " + f"to 'int'-value (seconds)." + ) # ToDo (optional): implement processing of natural language strings? (e.g. "12 Stunden") # - this feature would need a rigorous testing suite for common expressions (English and German strings) else: try: time_in_seconds = int(time_raw) except ValueError: - log.warning(f"'duration' value {time_raw} could not be normalized to seconds. " - f"(Unhandled edge-case: Expected int or float value, " - f"but received {type(time_raw)} instead.") + log.warning( + f"'duration' value {time_raw} could not be normalized to seconds. " + f"(Unhandled edge-case: Expected int or float value, " + f"but received {type(time_raw)} instead." + ) if not time_in_seconds: if isinstance(time_in_seconds, int) and time_in_seconds == 0: - log.debug(f"Detected zero duration for '{item_field_name}'. " - f"Received raw value: {time_raw} of type {type(time_raw)} .") + log.debug( + f"Detected zero duration for '{item_field_name}'. " + f"Received raw value: {time_raw} of type {type(time_raw)} ." + ) else: - log.warning(f"Unable to convert '{item_field_name}'-value (type: {type(time_raw)}) from {time_raw} " - f"to numeric value (seconds).") + log.warning( + f"Unable to convert '{item_field_name}'-value (type: {type(time_raw)}) from {time_raw} " + f"to numeric value (seconds)." + ) return time_in_seconds @@ -400,14 +402,18 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr caf_iso: str = caf_parsed.isoformat() course_adapter["course_availability_from"] = caf_iso else: - log.warning(f"Failed to parse \"course_availability_from\"-property " - f"\"{course_availability_from}\" to a valid \"datetime\"-object. \n" - f"(Please check the object {item_adapter['sourceId']} " - f"or extend the CourseItemPipeline!)") + log.warning( + f'Failed to parse "course_availability_from"-property ' + f'"{course_availability_from}" to a valid "datetime"-object. \n' + f"(Please check the object {item_adapter['sourceId']} " + f"or extend the CourseItemPipeline!)" + ) del course_adapter["course_availability_from"] else: - log.warning(f"Cannot process BIRD 'course_availability_from'-property {course_availability_from} " - f"(Expected a string, but received {type(course_availability_from)} instead.") + log.warning( + f"Cannot process BIRD 'course_availability_from'-property {course_availability_from} " + f"(Expected a string, but received {type(course_availability_from)} instead." + ) del course_adapter["course_availability_from"] # Prepare BIRD "course_availability_until" for "ccm:oeh_event_end" (-> ISO-formatted "datetime"-string) @@ -420,15 +426,18 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr cau_iso: str = cau_parsed.isoformat() course_adapter["course_availability_until"] = cau_iso else: - log.warning(f"Failed to parse \"{course_availability_until}\" to a valid 'datetime'-object. " - f"(Please check the object {item_adapter['sourceId']} for unhandled edge-cases or " - f"extend the CourseItemPipeline!)") + log.warning( + f"Failed to parse \"{course_availability_until}\" to a valid 'datetime'-object. " + f"(Please check the object {item_adapter['sourceId']} for unhandled edge-cases or " + f"extend the CourseItemPipeline!)" + ) del course_adapter["course_availability_until"] else: log.warning( - f"Cannot process BIRD \"course_availability_until\"-property {course_availability_until} " + f'Cannot process BIRD "course_availability_until"-property {course_availability_until} ' f"(Expected a string, but received {type(course_availability_until)} instead.) " - f"Deleting property...") + f"Deleting property..." + ) del course_adapter["course_availability_until"] if "course_description_short" in course_adapter: @@ -438,17 +447,18 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr # happy-case: the description is a string pass else: - log.warning(f"Cannot process BIRD 'course_description_short'-property for item " - f"{item_adapter['sourceId']} . Expected a string, but received " - f"{type(course_description_short)} instead. Deleting property...") + log.warning( + f"Cannot process BIRD 'course_description_short'-property for item " + f"{item_adapter['sourceId']} . Expected a string, but received " + f"{type(course_description_short)} instead. Deleting property..." + ) del course_adapter["course_description_short"] if "course_duration" in course_adapter: # course_duration -> 'cclom:typicallearningtime' (ms) course_duration: int = course_adapter["course_duration"] course_duration = determine_duration_and_convert_to_seconds( - time_raw=course_duration, - item_field_name="CourseItem.course_duration" + time_raw=course_duration, item_field_name="CourseItem.course_duration" ) if isinstance(course_duration, int): if course_duration: @@ -457,13 +467,17 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr elif course_duration == 0: # a duration of zero seconds is not a valid time duration, but most likely just a limitation # of different backend systems how they store "empty" values for this metadata property. - log.debug(f"Received zero duration value within 'course_duration'-property of item " - f"{item_adapter['sourceId']}. Deleting property ...") + log.debug( + f"Received zero duration value within 'course_duration'-property of item " + f"{item_adapter['sourceId']}. Deleting property ..." + ) del course_adapter["course_duration"] else: - log.warning(f"Cannot process BIRD 'course_duration'-property for item {item_adapter['sourceId']} . " - f"Expected a single (positive) integer value (in seconds), " - f"but received {type(course_duration)} instead. Deleting property...") + log.warning( + f"Cannot process BIRD 'course_duration'-property for item {item_adapter['sourceId']} . " + f"Expected a single (positive) integer value (in seconds), " + f"but received {type(course_duration)} instead. Deleting property..." + ) del course_adapter["course_duration"] if "course_learningoutcome" in course_adapter: @@ -481,15 +495,18 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr course_learning_outcome_clean.append(clo_candidate) else: # if the list item isn't a string, we won't save it to the cleaned up list - log.warning(f"Received unexpected type as part of 'course_learningoutcome': " - f"Expected list[str], but received a {type(clo_candidate)} " - f"instead. Raw value: {clo_candidate}") + log.warning( + f"Received unexpected type as part of 'course_learningoutcome': " + f"Expected list[str], but received a {type(clo_candidate)} " + f"instead. Raw value: {clo_candidate}" + ) course_adapter["course_learningoutcome"] = course_learning_outcome_clean else: log.warning( f"Cannot process BIRD 'course_learningoutcome'-property for item {item_adapter['sourceId']} " f". Expected a string, but received {type(course_learning_outcome)} instead. " - f"Deleting property...") + f"Deleting property..." + ) del course_adapter["course_learningoutcome"] if "course_schedule" in course_adapter: @@ -499,9 +516,11 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr # happy-case pass else: - log.warning(f"Cannot process BIRD 'course_schedule'-property for item {item_adapter['sourceId']} . " - f"Expected a string, but received {type(course_schedule)} instead. " - f"Deleting property...") + log.warning( + f"Cannot process BIRD 'course_schedule'-property for item {item_adapter['sourceId']} . " + f"Expected a string, but received {type(course_schedule)} instead. " + f"Deleting property..." + ) del course_adapter["course_schedule"] if "course_url_video" in course_adapter: @@ -514,7 +533,8 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr log.warning( f"Cannot process BIRD 'course_url_video'-property for item {item_adapter['sourceId']} . " f"Expected a string, but received {type(course_url_video)} instead. " - f"Deleting property...") + f"Deleting property..." + ) del course_adapter["course_url_video"] if "course_workload" in course_adapter: @@ -526,8 +546,10 @@ def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scr # (and which type is expected) -> implement a type-check! course_workload: str = course_adapter["course_workload"] if course_workload: - log.error(f"Cannot process BIRD 'course_workload'-property: this field is not implemented yet! " - f"(Please update the 'CourseItemPipeline' (pipelines.py) and es_connector.py!)") + log.error( + f"Cannot process BIRD 'course_workload'-property: this field is not implemented yet! " + f"(Please update the 'CourseItemPipeline' (pipelines.py) and es_connector.py!)" + ) pass pass @@ -589,6 +611,7 @@ class ProcessThumbnailPipeline(BasicPipeline): """ generate thumbnails """ + pixel_limit: int = 178956970 # ~179 Megapixel pixel_limit_in_mp: float = pixel_limit / 1000000 Image.MAX_IMAGE_PIXELS = pixel_limit # doubles the Pillow default (89,478,485) → from 89,5 MegaPixels to 179 MP @@ -644,22 +667,28 @@ async def process_item(self, raw_item, spider): # we expect that some thumbnail URLs will be wrong, outdated or already offline, which is why we catch # the most common Exceptions while trying to dwonload the image. except twisted.internet.error.TCPTimedOutError: - log.warning(f"Thumbnail download of URL {url} failed due to TCPTimedOutError. " - f"(You might see this error if the image is unavailable under that specific URL.) " - f"Falling back to website screenshot.") + log.warning( + f"Thumbnail download of URL {url} failed due to TCPTimedOutError. " + f"(You might see this error if the image is unavailable under that specific URL.) " + f"Falling back to website screenshot." + ) del item["thumbnail"] return await self.process_item(raw_item, spider) except twisted.internet.error.DNSLookupError: - log.warning(f"Thumbnail download of URL {url} failed due to DNSLookupError. " - f"(The webserver might be offline.) Falling back to website screenshot.") + log.warning( + f"Thumbnail download of URL {url} failed due to DNSLookupError. " + f"(The webserver might be offline.) Falling back to website screenshot." + ) del item["thumbnail"] return await self.process_item(raw_item, spider) time_end: datetime = datetime.datetime.now() log.debug(f"Loading thumbnail from {url} took {time_end - time_start} (incl. awaiting).") log.debug(f"Thumbnail-URL-Cache: {self.download_thumbnail_url.cache_info()} after trying to query {url} ") if thumbnail_response.status != 200: - log.debug(f"Thumbnail-Pipeline received an unexpected response (status: {thumbnail_response.status}) " - f"from {url} (-> resolved URL: {thumbnail_response.url}") + log.debug( + f"Thumbnail-Pipeline received an unexpected response (status: {thumbnail_response.status}) " + f"from {url} (-> resolved URL: {thumbnail_response.url}" + ) # falling back to website screenshot: del item["thumbnail"] return await self.process_item(raw_item, spider) @@ -678,28 +707,31 @@ async def process_item(self, raw_item, spider): # only set the response if thumbnail retrieval was successful! elif _mimetype == "application/octet-stream": # ToDo: special handling for 'application/octet-stream' necessary? - log.debug(f"Thumbnail URL of MIME-Type 'image/...' expected, " - f"but received '{_mimetype}' instead. " - f"(If thumbnail conversion throws unexpected errors further down the line, " - f"the Thumbnail-Pipeline needs to be re-visited! URL: {url} )") + log.debug( + f"Thumbnail URL of MIME-Type 'image/...' expected, " + f"but received '{_mimetype}' instead. " + f"(If thumbnail conversion throws unexpected errors further down the line, " + f"the Thumbnail-Pipeline needs to be re-visited! URL: {url} )" + ) response = thumbnail_response else: - log.warning(f"Thumbnail URL {url} does not seem to be an image! " - f"Header contained Content-Type '{_mimetype}' instead. " - f"(Falling back to screenshot)") + log.warning( + f"Thumbnail URL {url} does not seem to be an image! " + f"Header contained Content-Type '{_mimetype}' instead. " + f"(Falling back to screenshot)" + ) del item["thumbnail"] return await self.process_item(raw_item, spider) except KeyError: - log.warning(f"Thumbnail URL response did not contain a Content-Type / MIME-Type! " - f"Thumbnail URL queried: {url} " - f"-> resolved URL: {thumbnail_response.url} " - f"(HTTP Status: {thumbnail_response.status}") + log.warning( + f"Thumbnail URL response did not contain a Content-Type / MIME-Type! " + f"Thumbnail URL queried: {url} " + f"-> resolved URL: {thumbnail_response.url} " + f"(HTTP Status: {thumbnail_response.status}" + ) del item["thumbnail"] return await self.process_item(raw_item, spider) - elif ( - "location" in item["lom"]["technical"] - and len(item["lom"]["technical"]["location"]) > 0 - ): + elif "location" in item["lom"]["technical"] and len(item["lom"]["technical"]["location"]) > 0: if settings_crawler.get("SPLASH_URL") and web_tools == WebEngine.Splash: target_url: str = item["lom"]["technical"]["location"][0] _splash_url: str = f"{settings_crawler.get('SPLASH_URL')}/render.png" @@ -710,33 +742,38 @@ async def process_item(self, raw_item, spider): "url": target_url, "wait": _splash_parameter_wait, "html5_media": _splash_parameter_wait, - "headers": _splash_headers + "headers": _splash_headers, } request_splash = scrapy.FormRequest( - url=_splash_url, - formdata=_splash_dict, - callback=NO_CALLBACK, - priority=1 + url=_splash_url, formdata=_splash_dict, callback=NO_CALLBACK, priority=1 ) splash_response: scrapy.http.Response = await maybe_deferred_to_future( spider.crawler.engine.download(request_splash) ) if splash_response and splash_response.status != 200: - log.debug(f"SPLASH could not handle the requested website. " - f"(Splash returned HTTP Status {splash_response.status} for {target_url} !)") + log.debug( + f"SPLASH could not handle the requested website. " + f"(Splash returned HTTP Status {splash_response.status} for {target_url} !)" + ) _splash_success = False # ToDo (optional): more granular Error-Handling for unsupported URLs? if splash_response.status == 415: - log.debug(f"SPLASH (HTTP Status {splash_response.status} -> Unsupported Media Type): " - f"Could not render target url {target_url}") + log.debug( + f"SPLASH (HTTP Status {splash_response.status} -> Unsupported Media Type): " + f"Could not render target url {target_url}" + ) elif splash_response: response: scrapy.http.Response = splash_response else: log.debug(f"SPLASH returned HTTP Status {splash_response.status} for {target_url} ") playwright_websocket_endpoint: str | None = env.get("PLAYWRIGHT_WS_ENDPOINT") - if (not bool(_splash_success) and playwright_websocket_endpoint - or playwright_websocket_endpoint and web_tools == WebEngine.Playwright): + if ( + not bool(_splash_success) + and playwright_websocket_endpoint + or playwright_websocket_endpoint + and web_tools == WebEngine.Playwright + ): # we're using Playwright to take a website screenshot if: # - the spider explicitly defined Playwright in its 'custom_settings'-dict # - or: Splash failed to render a website (= fallback) @@ -748,32 +785,40 @@ async def process_item(self, raw_item, spider): target_url: str = item["lom"]["technical"]["location"][0] playwright_dict = await self.take_website_screenshot_with_playwright( - spider=spider, - target_url=target_url) + spider=spider, target_url=target_url + ) try: screenshot_bytes: bytes | None = playwright_dict.get("screenshot_bytes") except AttributeError: screenshot_bytes = None - log.debug(f"Failed fallback #1: taking a website-screenshot of URL " - f"{target_url} wasn't possible!") - if lom_technical_location and isinstance(lom_technical_location, list) and len( - lom_technical_location) >= 2: + log.debug( + f"Failed fallback #1: taking a website-screenshot of URL " f"{target_url} wasn't possible!" + ) + if ( + lom_technical_location + and isinstance(lom_technical_location, list) + and len(lom_technical_location) >= 2 + ): # this edge-case might happen during crawls of items with multiple URLs: # the first URL might be a direct-link to an audio/video file (example: podcast episode as .mp3) # while the second URL might point towards the webpage of said podcast episode target_url_2nd: str = lom_technical_location[1] if target_url_2nd and isinstance(target_url_2nd, str): - log.debug(f"Second URL in LOM Technical Location detected. " - f"Trying to take a website-screenshot of {lom_technical_location[1]} (fallback #2)...") + log.debug( + f"Second URL in LOM Technical Location detected. " + f"Trying to take a website-screenshot of {lom_technical_location[1]} (fallback #2)..." + ) playwright_dict = await self.take_website_screenshot_with_playwright( - spider=spider, - target_url=target_url_2nd) + spider=spider, target_url=target_url_2nd + ) try: screenshot_bytes: bytes | None = playwright_dict.get("screenshot_bytes") except AttributeError: screenshot_bytes = None - log.warning(f"Failed fallback #2: taking a website-screenshot of URL " - f"{target_url_2nd} wasn't possible!") + log.warning( + f"Failed fallback #2: taking a website-screenshot of URL " + f"{target_url_2nd} wasn't possible!" + ) if screenshot_bytes: img = Image.open(BytesIO(screenshot_bytes)) self.create_thumbnails_from_image_bytes(img, item, settings_crawler) @@ -805,9 +850,7 @@ async def process_item(self, raw_item, spider): item["thumbnail"]["mimetype"] = _mimetype.decode() elif _mimetype and isinstance(_mimetype, str): item["thumbnail"]["mimetype"] = _mimetype - item["thumbnail"]["small"] = base64.b64encode( - response.body - ).decode() + item["thumbnail"]["small"] = base64.b64encode(response.body).decode() else: try: img = Image.open(BytesIO(response.body)) @@ -817,7 +860,8 @@ async def process_item(self, raw_item, spider): if url: log.warning( f"Thumbnail download of image file {url} failed: image file could not be identified " - f"(Image might be broken or corrupt). Falling back to website-screenshot.") + f"(Image might be broken or corrupt). Falling back to website-screenshot." + ) del item["thumbnail"] return await self.process_item(raw_item, spider) except Image.DecompressionBombError: @@ -826,10 +870,12 @@ async def process_item(self, raw_item, spider): # If such an error is thrown, the image object won't be available. # Therefore, we need to fall back to a website screenshot. absolute_pixel_limit_in_mp = (self.pixel_limit * 2) / 1000000 - log.warning(f"Thumbnail download of {url} triggered a 'PIL.Image.DecompressionBombError'! " - f"The image either exceeds the max size of {absolute_pixel_limit_in_mp} " - f"megapixels or might have been a DoS attempt. " - f"Falling back to website screenshot...") + log.warning( + f"Thumbnail download of {url} triggered a 'PIL.Image.DecompressionBombError'! " + f"The image either exceeds the max size of {absolute_pixel_limit_in_mp} " + f"megapixels or might have been a DoS attempt. " + f"Falling back to website screenshot..." + ) del item["thumbnail"] return await self.process_item(raw_item, spider) except Exception as e: @@ -841,9 +887,7 @@ async def process_item(self, raw_item, spider): return await self.process_item(raw_item, spider) else: # item['thumbnail']={} - raise DropItem( - "No thumbnail provided or resource was unavailable for fetching" - ) + raise DropItem("No thumbnail provided or resource was unavailable for fetching") return raw_item async def take_website_screenshot_with_playwright(self, spider: scrapy.Spider, target_url: str): @@ -856,10 +900,9 @@ async def take_website_screenshot_with_playwright(self, spider: scrapy.Spider, t playwright_cookies = spider.custom_settings.get("PLAYWRIGHT_COOKIES") if "PLAYWRIGHT_ADBLOCKER" in spider.custom_settings: playwright_adblock_enabled: bool = spider.custom_settings["PLAYWRIGHT_ADBLOCKER"] - playwright_dict = await WebTools.getUrlData(url=target_url, - engine=WebEngine.Playwright, - cookies=playwright_cookies, - adblock=playwright_adblock_enabled) + playwright_dict = await WebTools.getUrlData( + url=target_url, engine=WebEngine.Playwright, cookies=playwright_cookies, adblock=playwright_adblock_enabled + ) return playwright_dict @alru_cache(maxsize=128) @@ -883,9 +926,7 @@ async def download_thumbnail_url(self, url: str, spider: scrapy.Spider): request = scrapy.Request(url=url, callback=NO_CALLBACK, priority=1) # Thumbnail downloads will be executed with a slightly higher priority (default: 0), so there's less delay # between metadata processing and thumbnail retrieval steps in the pipelines - response: Deferred | Future = await maybe_deferred_to_future( - spider.crawler.engine.download(request) - ) + response: Deferred | Future = await maybe_deferred_to_future(spider.crawler.engine.download(request)) return response except ValueError: log.debug(f"Thumbnail-Pipeline received an invalid URL: {url}") @@ -927,23 +968,20 @@ def create_thumbnails_from_image_bytes(self, image: Image.Image, item, settings) ) item["thumbnail"] = {} item["thumbnail"]["mimetype"] = "image/jpeg" - item["thumbnail"]["small"] = base64.b64encode( - small_buffer.getvalue() - ).decode() - item["thumbnail"]["large"] = base64.b64encode( - large_buffer.getvalue() - ).decode() + item["thumbnail"]["small"] = base64.b64encode(small_buffer.getvalue()).decode() + item["thumbnail"]["large"] = base64.b64encode(large_buffer.getvalue()).decode() def get_settings_for_crawler(spider) -> scrapy.settings.Settings: all_settings = get_project_settings() - crawler_settings = settings.BaseSettings(getattr(spider, "custom_settings") or {}, 'spider') + crawler_settings = settings.BaseSettings(getattr(spider, "custom_settings") or {}, "spider") if isinstance(crawler_settings, dict): - crawler_settings = settings.BaseSettings(crawler_settings, 'spider') + crawler_settings = settings.BaseSettings(crawler_settings, "spider") for key in crawler_settings.keys(): if ( - all_settings.get(key) and crawler_settings.getpriority(key) > all_settings.getpriority(key) - or not all_settings.get(key) + all_settings.get(key) + and crawler_settings.getpriority(key) > all_settings.getpriority(key) + or not all_settings.get(key) ): all_settings.set(key, crawler_settings.get(key), crawler_settings.getpriority(key)) return all_settings @@ -969,12 +1007,19 @@ def process_item(self, raw_item, spider): db_item = self.find_item(item["sourceId"], spider) if db_item: if item["hash"] != db_item[1]: - log.debug(f"EduSharingCheckPipeline: hash has changed. Continuing pipelines for item {item['sourceId']}") + log.debug( + f"EduSharingCheckPipeline: hash has changed. Continuing pipelines for item {item['sourceId']}" + ) else: - if "EDU_SHARING_FORCE_UPDATE" in spider.custom_settings and spider.custom_settings["EDU_SHARING_FORCE_UPDATE"]: - log.debug(f"EduSharingCheckPipeline: hash unchanged for item {item['sourceId']}, " - f"but detected active 'force item update'-setting (resetVersion / forceUpdate). " - f"Continuing pipelines ...") + if ( + "EDU_SHARING_FORCE_UPDATE" in spider.custom_settings + and spider.custom_settings["EDU_SHARING_FORCE_UPDATE"] + ): + log.debug( + f"EduSharingCheckPipeline: hash unchanged for item {item['sourceId']}, " + f"but detected active 'force item update'-setting (resetVersion / forceUpdate). " + f"Continuing pipelines ..." + ) else: log.debug(f"EduSharingCheckPipeline: hash unchanged, skipping item {item['sourceId']}") # self.update(item['sourceId'], spider) @@ -983,10 +1028,12 @@ def process_item(self, raw_item, spider): # raise DropItem() return raw_item + class EduSharingTypeValidationPipeline(BasicPipeline): """ Rudimentary type-conversion before handling metadata properties off to the API client. """ + # ToDo: if you notice pydantic "ValidationError"s during crawls, implement handling of those edge-cases here! def process_item(self, item: scrapy.Item, spider: scrapy.Spider) -> Optional[scrapy.Item]: item_adapter = ItemAdapter(item) @@ -1045,7 +1092,7 @@ def __init__(self): self.exporters: dict[str, JsonItemExporter] = {} def open_spider(self, spider): - file = open(f'output_{spider.name}.json', 'wb') + file = open(f"output_{spider.name}.json", "wb") self.files[spider.name] = file exporter = JsonItemExporter( file, @@ -1062,9 +1109,10 @@ def open_spider(self, spider): # "ranking", # "thumbnail", ], - encoding='utf-8', + encoding="utf-8", indent=2, - ensure_ascii=False) + ensure_ascii=False, + ) self.exporters[spider.name] = exporter exporter.start_exporting() @@ -1086,12 +1134,8 @@ def __init__(self): CSVStorePipeline.rows = env.get("CSV_ROWS", allow_null=False).split(",") def open_spider(self, spider): - csv_file = open('output_' + spider.name + '.csv', 'w', newline='') - spamwriter = csv.writer( - csv_file, - delimiter=',', - quotechar='"', - quoting=csv.QUOTE_MINIMAL) + csv_file = open("output_" + spider.name + ".csv", "w", newline="") + spamwriter = csv.writer(csv_file, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL) spamwriter.writerow(self.rows) self.files[spider.name] = csv_file @@ -1100,14 +1144,14 @@ def open_spider(self, spider): @staticmethod def get_value(item, value): container = item - tokens = value.split('.') + tokens = value.split(".") for v in tokens: if v in container: container = container[v] else: return None - if tokens[0] == 'valuespaces': - return list(map(lambda x: Valuespaces.findKey(tokens[1], x)['prefLabel']['de'], container)) + if tokens[0] == "valuespaces": + return list(map(lambda x: Valuespaces.findKey(tokens[1], x)["prefLabel"]["de"], container)) return container def close_spider(self, spider): @@ -1126,9 +1170,11 @@ def __init__(self): self.counter = 0 def open_spider(self, spider): - logging.debug("Entering EduSharingStorePipeline...\n" - "Checking if 'crawler source template' ('Quellendatensatz-Template') should be used " - "(see: 'EDU_SHARING_SOURCE_TEMPLATE_ENABLED' .env setting)...") + logging.debug( + "Entering EduSharingStorePipeline...\n" + "Checking if 'crawler source template' ('Quellendatensatz-Template') should be used " + "(see: 'EDU_SHARING_SOURCE_TEMPLATE_ENABLED' .env setting)..." + ) est_enabled: bool = env.get_bool("EDU_SHARING_SOURCE_TEMPLATE_ENABLED", allow_null=True, default=False) # defaults to False for backwards-compatibility. # (The EduSharingSourceTemplateHelper class is explicitly set to throw errors and abort a crawl if this setting @@ -1141,12 +1187,16 @@ def open_spider(self, spider): whitelisted_properties: dict | None = est_helper.get_whitelisted_metadata_properties() if whitelisted_properties: setattr(spider, "edu_sharing_source_template_whitelist", whitelisted_properties) - logging.debug(f"Edu-sharing source template retrieval was successful. " - f"The following metadata properties will be whitelisted for all items:\n" - f"{whitelisted_properties}") + logging.debug( + f"Edu-sharing source template retrieval was successful. " + f"The following metadata properties will be whitelisted for all items:\n" + f"{whitelisted_properties}" + ) else: - logging.error(f"Edu-Sharing Source Template retrieval failed. " - f"(Does a 'Quellendatensatz' exist in the edu-sharing repository for this spider?)") + logging.error( + f"Edu-Sharing Source Template retrieval failed. " + f"(Does a 'Quellendatensatz' exist in the edu-sharing repository for this spider?)" + ) else: log.debug(f"Edu-Sharing Source Template feature is NOT ENABLED. Continuing EduSharingStorePipeline...") @@ -1291,16 +1341,16 @@ class LisumPipeline(BasicPipeline): # eafCodes in this list are used as keys in # https://github.com/openeduhub/oeh-metadata-vocabs/blob/master/discipline.ttl # but are not part of the (standard) http://agmud.de/wp-content/uploads/2021/09/eafsys.txt - '04010', # OEH: "Körperpflege" <-> eafCode 04010: "Mechatronik" - '20090', # OEH: "Esperanto" <-> eafCode: 20080 - '44099', # "Open Educational Resources" - '64018', # "Nachhaltigkeit" - '72001', # "Zeitgemäße Bildung" - '900', # Medienbildung - '999', # Sonstiges - 'niederdeutsch', - 'oeh01', # "Arbeit, Ernährung, Soziales" - 'oeh04010' # OEH: "Mechatronik" <-> eafCode: 04010 (Mechatronik) + "04010", # OEH: "Körperpflege" <-> eafCode 04010: "Mechatronik" + "20090", # OEH: "Esperanto" <-> eafCode: 20080 + "44099", # "Open Educational Resources" + "64018", # "Nachhaltigkeit" + "72001", # "Zeitgemäße Bildung" + "900", # Medienbildung + "999", # Sonstiges + "niederdeutsch", + "oeh01", # "Arbeit, Ernährung, Soziales" + "oeh04010", # OEH: "Mechatronik" <-> eafCode: 04010 (Mechatronik) ] EDUCATIONALCONTEXT_TO_LISUM = { @@ -1356,8 +1406,8 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # this eafCode (key) gets mapped to Lisum specific B-B shorthands like "C-MA" if discipline_list: for discipline_w3id in discipline_list: - discipline_eaf_code: str = discipline_w3id.split(sep='/')[-1] - eaf_code_digits_only_regex: re.Pattern = re.compile(r'\d{3,}') + discipline_eaf_code: str = discipline_w3id.split(sep="/")[-1] + eaf_code_digits_only_regex: re.Pattern = re.compile(r"\d{3,}") match discipline_eaf_code in self.DISCIPLINE_TO_LISUM_SHORTHAND: case True: discipline_lisum_keys.add(self.DISCIPLINE_TO_LISUM_SHORTHAND.get(discipline_eaf_code)) @@ -1372,8 +1422,10 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy case _: # due to having the 'custom'-field as a (raw) list of all eafCodes, this mainly serves # the purpose of reminding us if a 'discipline'-value couldn't be mapped to Lisum - log.debug(f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " - f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback...") + log.debug( + f"LisumPipeline failed to map from eafCode {discipline_eaf_code} " + f"to its corresponding 'ccm:taxonid' short-handle. Trying Fallback..." + ) match discipline_eaf_code: # catching edge-cases where OEH 'discipline'-vocab-keys don't line up with eafsys.txt values case "320": @@ -1386,25 +1438,33 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy discipline_eafcodes.add("2600103") # Körperpflege if eaf_code_digits_only_regex.search(discipline_eaf_code): # each numerical eafCode must have a length of (minimum) 3 digits to be considered valid - log.debug(f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " - f"used later for 'ccm:taxonentry').") + log.debug( + f"LisumPipeline: Writing eafCode {discipline_eaf_code} to buffer. (Wil be " + f"used later for 'ccm:taxonentry')." + ) if discipline_eaf_code not in self.EAFCODE_EXCLUSIONS: # making sure to only save eafCodes that are part of the standard eafsys.txt discipline_eafcodes.add(discipline_eaf_code) else: - log.debug(f"LisumPipeline: eafCode {discipline_eaf_code} is not part of 'EAF " - f"Sachgebietssystematik' (see: eafsys.txt), therefore skipping this " - f"value.") + log.debug( + f"LisumPipeline: eafCode {discipline_eaf_code} is not part of 'EAF " + f"Sachgebietssystematik' (see: eafsys.txt), therefore skipping this " + f"value." + ) else: # our 'discipline.ttl'-vocab holds custom keys (e.g. 'niederdeutsch', 'oeh04010') which # shouldn't be saved into 'ccm:taxonentry' (since they are not part of the regular # "EAF Sachgebietssystematik" - log.debug(f"LisumPipeline eafCode fallback for {discipline_eaf_code} to " - f"'ccm:taxonentry' was not possible. Only eafCodes with a minimum length " - f"of 3+ digits are valid. (Please confirm if the provided value is part of " - f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))") - log.debug(f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " - f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}") + log.debug( + f"LisumPipeline eafCode fallback for {discipline_eaf_code} to " + f"'ccm:taxonentry' was not possible. Only eafCodes with a minimum length " + f"of 3+ digits are valid. (Please confirm if the provided value is part of " + f"the 'EAF Sachgebietssystematik' (see: eafsys.txt))" + ) + log.debug( + f"LisumPipeline: Mapping discipline values from \n {discipline_list} \n to " + f"LisumPipeline: discipline_lisum_keys \n {discipline_lisum_keys}" + ) valuespaces["discipline"] = list() # clearing 'discipline'-field, so we don't accidentally write the # remaining OEH w3id-URLs to Lisum's 'ccm:taxonid'-field @@ -1417,15 +1477,18 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # up until this point, every educationalContext entry will be a w3id link, e.g. # 'http://w3id.org/openeduhub/vocabs/educationalContext/grundschule' for educational_context_w3id in educational_context_list: - educational_context_w3id_key = educational_context_w3id.split(sep='/')[-1] + educational_context_w3id_key = educational_context_w3id.split(sep="/")[-1] match educational_context_w3id_key in self.EDUCATIONALCONTEXT_TO_LISUM: case True: educational_context_w3id_key = self.EDUCATIONALCONTEXT_TO_LISUM.get( - educational_context_w3id_key) + educational_context_w3id_key + ) educational_context_lisum_keys.add(educational_context_w3id_key) case _: - log.debug(f"LisumPipeline: educationalContext {educational_context_w3id_key} " - f"not found in mapping table.") + log.debug( + f"LisumPipeline: educationalContext {educational_context_w3id_key} " + f"not found in mapping table." + ) educational_context_list = list(educational_context_lisum_keys) educational_context_list.sort() valuespaces["educationalContext"] = educational_context_list @@ -1435,7 +1498,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy intended_end_user_roles = set() if intended_end_user_role_list: for item_w3id in intended_end_user_role_list: - item_w3id: str = item_w3id.split(sep='/')[-1] + item_w3id: str = item_w3id.split(sep="/")[-1] if item_w3id: intended_end_user_roles.add(item_w3id) intended_end_user_role_list = list(intended_end_user_roles) @@ -1452,7 +1515,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # during transformation from Sodix to OEH lrt_multivalue = list() for lrt_string in lrt_item: - lrt_string = lrt_string.split(sep='/')[-1] + lrt_string = lrt_string.split(sep="/")[-1] if lrt_string in self.LRT_OEH_TO_LISUM: lrt_string = self.LRT_OEH_TO_LISUM.get(lrt_string) if lrt_string: @@ -1460,7 +1523,7 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy lrt_multivalue.append(lrt_string) lrt_temporary_list.append(lrt_multivalue) if type(lrt_item) is str: - lrt_w3id: str = lrt_item.split(sep='/')[-1] + lrt_w3id: str = lrt_item.split(sep="/")[-1] if lrt_w3id in self.LRT_OEH_TO_LISUM: lrt_w3id = self.LRT_OEH_TO_LISUM.get(lrt_w3id) if lrt_w3id and type(lrt_w3id) is str: @@ -1519,8 +1582,6 @@ def process_item(self, item: BaseItem, spider: scrapy.Spider) -> Optional[scrapy # 'discipline'-vocabulary-keys. discipline_eafcodes_list = list(discipline_eafcodes) log.debug(f"LisumPipeline: Saving eafCodes {discipline_eafcodes_list} to 'ccm:taxonentry'.") - base_item_adapter.update( - {'custom': { - 'ccm:taxonentry': discipline_eafcodes_list}}) + base_item_adapter.update({"custom": {"ccm:taxonentry": discipline_eafcodes_list}}) base_item_adapter["custom"]["ccm:taxonentry"] = discipline_eafcodes_list return item